aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/README.md14
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_markers.py82
-rw-r--r--src/argaze/utils/live_tobii_aruco_rois.py4
3 files changed, 64 insertions, 36 deletions
diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md
index 64f7f88..ecb5352 100644
--- a/src/argaze/utils/README.md
+++ b/src/argaze/utils/README.md
@@ -60,20 +60,26 @@ python ./src/argaze/utils/explore_tobii_sdcard.py -r RECORDING_PATH
python ./src/argaze/utils/explore_tobii_sdcard.py -s SEGMENT_PATH
```
+- Replay a Tobii Glasses Pro 2 session (replace SEGMENT_PATH) synchronizing video and data together.
+
+```
+python ./src/argaze/utils/replay_tobii_session.py -s SEGMENT_PATH
+```
+
- Export Tobii segment fixations (replace SEGMENT_PATH) into a fixations.json file into the segment folder
```
python ./src/argaze/utils/export_tobii_segment_fixations.py -s SEGMENT_PATH
```
-- Replay a Tobii Glasses Pro 2 session (replace SEGMENT_PATH) synchronizing video and data together.
+- Track ArUco markers into a Tobii camera video segment (replace SEGMENT_PATH). Load an roi scene (replace ROI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI.
```
-python ./src/argaze/utils/replay_tobii_session.py -s SEGMENT_PATH
+python ./src/argaze/utils/live_tobii_aruco_rois.py -s SEGMENT_PATH -c export/tobii_camera.json -m 7.5 -r ROI_SCENE
```
-- Track any 4.5cm Original ArUco marker into calibrated Tobii camera video stream (replace IP_ADDRESS). Load an roi scene (replace ROI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI.
+- Track ArUco markers into Tobii camera video stream (replace IP_ADDRESS). Load an roi scene (replace ROI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI.
```
-python ./src/argaze/utils/live_tobii_aruco_rois.py -t IP_ADDRESS -c export/tobii_camera.json -m 4.5 -s ROI_SCENE
+python ./src/argaze/utils/live_tobii_aruco_rois.py -t IP_ADDRESS -c export/tobii_camera.json -m 7.5 -r ROI_SCENE
```
diff --git a/src/argaze/utils/export_tobii_segment_aruco_markers.py b/src/argaze/utils/export_tobii_segment_aruco_markers.py
index 11c5c1b..2195169 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_markers.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_markers.py
@@ -20,6 +20,10 @@ def main():
# manage arguments
parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path')
+ parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath')
+ parser.add_argument('-r', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath')
+ parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary')
+ parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)')
args = parser.parse_args()
if args.segment_path != None:
@@ -27,44 +31,59 @@ def main():
# Load a tobii segment
tobii_segment = TobiiEntities.TobiiSegment(args.segment_path)
- # create aruco camera
- aruco_camera = ArUcoCamera.ArUcoCamera()
- aruco_camera.load_calibration_file('/Users/Robotron/Developpements/ArGaze/export/tobii_camera.json')
-
- # create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_ARUCO_ORIGINAL', 7.5, aruco_camera) # aruco dictionary, marker length (cm), camera
-
# Load a tobii segment video
tobii_segment_video = tobii_segment.load_video()
print(f'Video duration: {tobii_segment_video.get_duration()}, frame number: {tobii_segment_video.get_frame_number()}, width: {tobii_segment_video.get_width()}, height: {tobii_segment_video.get_height()}')
- # create ROIs 3D scene
+ # Load a tobii segment data
+ tobii_segment_data = tobii_segment.load_data()
+ print(f'Data keys: {tobii_segment_data.keys()}')
+
+ # Access to timestamped gaze position data buffer
+ tobii_ts_gaze_positions = tobii_segment_data.gidx_l_gp
+ print(f'{len(tobii_ts_gaze_positions)} gaze positions loaded')
+
+ # Create aruco camera
+ aruco_camera = ArUcoCamera.ArUcoCamera()
+ aruco_camera.load_calibration_file(args.camera_calibration)
+
+ # Create aruco tracker
+ aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)
+
+ # Create ROIs 3D scene
roi3D_scene = ROI3DScene.ROI3DScene()
- roi3D_scene.load('/Users/Robotron/Developpements/ArGaze/export/test.obj')
+ roi3D_scene.load(args.roi_scene)
- # replay loop
+ # Video and data replay loop
try:
- last_ts = 0
- for frame_ts, frame in tobii_segment_video.frames():
+ # Iterate on video frames activating video / data synchronisation through vts data buffer
+ for video_ts, video_frame in tobii_segment_video.frames(tobii_segment_data.vts):
+
+ try:
- if frame_ts > last_ts:
+ # Get closest gaze position before video timestamp and remove all gaze positions before
+ closest_gaze_ts, closest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts)
- delay = int((frame_ts - last_ts) / 1000)
+ # Draw video synchronized gaze pointer
+ pointer = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height))
+ cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1)
- if cv.waitKey(delay) == 27:
- break
+ # When expected values can't be found
+ except (KeyError, AttributeError, ValueError):
- # track markers with pose estimation and draw them
- aruco_tracker.track(frame.matrix)
- aruco_tracker.draw(frame.matrix)
+ pointer = (0,0)
- # project 3D scenes related to each aruco markers
+ # Track markers with pose estimation and draw them
+ aruco_tracker.track(video_frame.matrix)
+ aruco_tracker.draw(video_frame.matrix)
+
+ # Project 3D scenes related to each aruco markers
if aruco_tracker.get_markers_number():
for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
- # TODO : select different 3D scenes depending on aruco id
+ # TODO : Select different 3D scenes depending on aruco id
marker_rotation = aruco_tracker.get_marker_rotation(i)
marker_translation = aruco_tracker.get_marker_translation(i)
@@ -72,27 +91,30 @@ def main():
roi3D_scene.set_rotation(marker_rotation)
roi3D_scene.set_translation(marker_translation)
- # zero distorsion matrix
+ # Edit Zero distorsion matrix
D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
# DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
# This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable.
roi2D_scene = roi3D_scene.project(aruco_camera.get_K(), D0)
- # check if gaze is inside 2D rois
- #roi2D_scene.inside(pointer)
+ # Check if gaze is inside 2D rois
+ roi2D_scene.inside(pointer)
+
+ # Draw 2D rois
+ roi2D_scene.draw(video_frame.matrix)
- # draw 2D rois
- roi2D_scene.draw(frame.matrix)
+ # Close window using 'Esc' key
+ if cv.waitKey(1) == 27:
+ break
- cv.imshow(f'Segment {tobii_segment.get_id()} video', frame.matrix)
- last_ts = frame_ts
+ cv.imshow(f'Segment {tobii_segment.get_id()} video', video_frame.matrix)
- # exit on 'ctrl+C' interruption
+ # Exit on 'ctrl+C' interruption
except KeyboardInterrupt:
pass
- # stop frame display
+ # Stop frame display
cv.destroyAllWindows()
if __name__ == '__main__':
diff --git a/src/argaze/utils/live_tobii_aruco_rois.py b/src/argaze/utils/live_tobii_aruco_rois.py
index 92e8772..e8ed71f 100644
--- a/src/argaze/utils/live_tobii_aruco_rois.py
+++ b/src/argaze/utils/live_tobii_aruco_rois.py
@@ -23,7 +23,7 @@ def main():
parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip')
parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath')
- parser.add_argument('-s', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath')
+ parser.add_argument('-r', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath')
parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary')
parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)')
args = parser.parse_args()
@@ -45,7 +45,7 @@ def main():
aruco_camera.load_calibration_file(args.camera_calibration)
# Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, 7.5, aruco_camera) # aruco dictionary, marker length (cm), camera
+ aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)
# Create ROIs 3D scene
roi3D_scene = ROI3DScene.ROI3DScene()