aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2023-01-11 12:21:16 +0100
committerThéo de la Hogue2023-01-11 12:21:16 +0100
commita15230396a769292ae8ba71bcfaa82c8032f5710 (patch)
treed634ce34b94f667227fc923670bab95a963bc5d6 /src
parentaaf8bbd56f0b4985b4e2c12454e302ac6a25eef0 (diff)
downloadargaze-a15230396a769292ae8ba71bcfaa82c8032f5710.zip
argaze-a15230396a769292ae8ba71bcfaa82c8032f5710.tar.gz
argaze-a15230396a769292ae8ba71bcfaa82c8032f5710.tar.bz2
argaze-a15230396a769292ae8ba71bcfaa82c8032f5710.tar.xz
Updating utils script considering API updates.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/tobii_segment_arscene_edit.py26
-rw-r--r--src/argaze/utils/tobii_segment_arscene_export.py17
-rw-r--r--src/argaze/utils/tobii_segment_aruco_aoi_export.py343
-rw-r--r--src/argaze/utils/tobii_stream_arscene_display.py142
-rw-r--r--src/argaze/utils/tobii_stream_aruco_aoi_display.py311
-rw-r--r--src/argaze/utils/tobii_stream_aruco_cube_display.py360
6 files changed, 173 insertions, 1026 deletions
diff --git a/src/argaze/utils/tobii_segment_arscene_edit.py b/src/argaze/utils/tobii_segment_arscene_edit.py
index 2a059aa..b4f5445 100644
--- a/src/argaze/utils/tobii_segment_arscene_edit.py
+++ b/src/argaze/utils/tobii_segment_arscene_edit.py
@@ -141,8 +141,16 @@ def main():
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - 1/6)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
- # Track markers with pose estimation
- ar_scene.aruco_tracker.track(video_frame.matrix)
+ try:
+
+ # Estimate scene pose from ArUco markers into frame.
+ tvec, rmat, _ = ar_scene.estimate_pose(video_frame.matrix)
+
+ # Catch exceptions raised by estimate_pose method
+ except ArScene.PoseEstimationFailed as e:
+
+ cv.rectangle(visu_frame.matrix, (0, 100), (550, 150), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(e), (20, 130), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
else:
@@ -154,14 +162,22 @@ def main():
# Copy video frame to edit visualisation on it with out disrupting aruco tracking
visu_frame = video_frame.copy()
- # Project scene into frame
- scene_projection = ar_scene.project(video_frame.matrix, consistent_markers_number=1, visual_hfov=TobiiSpecifications.VISUAL_HFOV, pre_tracked_markers=True)
+ try:
+
+ # Project AOI scene into frame according estimated pose
+ aoi_scene_projection = ar_scene.project(tvec, rmat, visual_hfov=TobiiSpecifications.VISUAL_HFOV)
+
+ # Catch exceptions raised by project method
+ except ArScene.SceneProjectionFailed as e:
+
+ cv.rectangle(visu_frame.matrix, (0, 100), (550, 150), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(e), (20, 130), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
# Draw tracked markers
ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
# Draw scene projection
- scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
+ aoi_scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
# Project 3D scene on each video frame and the visualisation frame
if len(ar_scene.aruco_tracker.tracked_markers) > 0:
diff --git a/src/argaze/utils/tobii_segment_arscene_export.py b/src/argaze/utils/tobii_segment_arscene_export.py
index 3deb7b1..c4e45ea 100644
--- a/src/argaze/utils/tobii_segment_arscene_export.py
+++ b/src/argaze/utils/tobii_segment_arscene_export.py
@@ -145,21 +145,24 @@ def main():
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width*args.borders/100), int(video_frame.height)), (0, 0, 0), -1)
cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - args.borders/100)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
- # Project scene into frame
- scene_projection = ar_scene.project(video_frame.matrix, consistent_markers_number=1, visual_hfov=TobiiSpecifications.VISUAL_HFOV)
+ # Estimate scene pose from ArUco markers into frame.
+ tvec, rmat, _ = ar_scene.estimate_pose(video_frame.matrix)
+
+ # Project AOI scene into frame according estimated pose
+ aoi_scene_projection = ar_scene.project(tvec, rmat, visual_hfov=TobiiSpecifications.VISUAL_HFOV)
# Store all projected aoi
- for aoi_name in scene_projection.keys():
+ for aoi_name in aoi_scene_projection.keys():
- projected_aois[aoi_name] = numpy.rint(scene_projection[aoi_name]).astype(int)
+ projected_aois[aoi_name] = numpy.rint(aoi_scene_projection[aoi_name]).astype(int)
# Draw tracked markers
ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
- # Draw scene projection
- scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
+ # Draw AOI
+ aoi_scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
- # Catch exceptions raised by project_scene method
+ # Catch exceptions raised by estimate_pose and project methods
except (ArScene.PoseEstimationFailed, ArScene.SceneProjectionFailed) as e:
# Draw tracked markers
diff --git a/src/argaze/utils/tobii_segment_aruco_aoi_export.py b/src/argaze/utils/tobii_segment_aruco_aoi_export.py
deleted file mode 100644
index 2d1013a..0000000
--- a/src/argaze/utils/tobii_segment_aruco_aoi_export.py
+++ /dev/null
@@ -1,343 +0,0 @@
-#!/usr/bin/env python
-
-import argparse
-import os
-import json
-
-from argaze import DataStructures
-from argaze import GazeFeatures
-from argaze.TobiiGlassesPro2 import TobiiEntities, TobiiData, TobiiVideo, TobiiSpecifications
-from argaze.ArUcoMarkers import *
-from argaze.AreaOfInterest import *
-from argaze.utils import MiscFeatures
-
-import numpy
-import cv2 as cv
-
-def main():
- """
- Track ArUco markers into Tobii Glasses Pro 2 segment video file.
- For each loaded AOI scene .obj file, position the scene virtually relatively to each detected ArUco markers and project the scene into camera frame.
- Export AOIs video and data as a aruco_aoi.csv, aruco_aoi.mp4 files
- """
-
- # Manage arguments
- parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
- parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path')
- parser.add_argument('-r', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)')
- parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
- parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
- parser.add_argument('-md', '--marker_dictionary', metavar='MARKER_DICT', type=ArUcoMarkersDictionary.ArUcoMarkersDictionary, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL, DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
- parser.add_argument('-ms', '--marker_size', metavar='MARKER_SIZE', type=float, default=6, help='aruco marker size (cm)')
- parser.add_argument('-mi', '--marker_id_scene', metavar='MARKER_ID_SCENE', type=json.loads, help='{"marker": "aoi scene filepath"} dictionary')
- parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)')
- parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
- args = parser.parse_args()
-
- if args.segment_path != None:
-
- # Manage markers id to track
- if args.marker_id_scene == None:
- print(f'Track any Aruco markers from the {args.marker_dictionary.name} dictionary')
- else:
- print(f'Track Aruco markers {list(args.marker_id_scene.keys())} from the {args.marker_dictionary.name} dictionary')
-
- # Manage destination path
- destination_path = '.'
- if args.output != None:
-
- if not os.path.exists(os.path.dirname(args.output)):
-
- os.makedirs(os.path.dirname(args.output))
- print(f'{os.path.dirname(args.output)} folder created')
-
- destination_path = args.output
-
- else:
-
- destination_path = args.segment_path
-
- # Export into a dedicated time range folder
- if args.time_range[1] != None:
- timerange_path = f'[{int(args.time_range[0])}s - {int(args.time_range[1])}s]'
- else:
- timerange_path = f'[all]'
-
- destination_path = f'{destination_path}/{timerange_path}'
-
- if not os.path.exists(destination_path):
-
- os.makedirs(destination_path)
- print(f'{destination_path} folder created')
-
- vs_data_filepath = f'{destination_path}/aoi.csv'
- vs_video_filepath = f'{destination_path}/aoi.mp4'
-
- # Load a tobii segment
- tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1e6), int(args.time_range[1] * 1e6) if args.time_range[1] != None else None)
-
- # Load a tobii segment video
- tobii_segment_video = tobii_segment.load_video()
- print(f'Video properties:\n\tduration: {tobii_segment_video.duration/1e6} s\n\twidth: {tobii_segment_video.width} px\n\theight: {tobii_segment_video.height} px')
-
- # Load a tobii segment data
- tobii_segment_data = tobii_segment.load_data()
-
- print(f'Loaded data count:')
- for name in tobii_segment_data.keys():
- print(f'\t{name}: {len(tobii_segment_data[name])} data')
-
- # Access to video timestamp data buffer
- tobii_ts_vts = tobii_segment_data['VideoTimeStamp']
-
- # Access to timestamped head rotations data buffer
- tobii_ts_head_rotations = tobii_segment_data['Gyroscope']
-
- # Prepare video exportation at the same format than segment video
- output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.stream)
-
- # Create aruco camera
- aruco_camera = ArUcoCamera.ArUcoCamera()
-
- # Load calibration file
- if args.camera_calibration != None:
-
- aruco_camera.from_json(args.camera_calibration)
-
- else:
-
- raise UserWarning('.json camera calibration filepath required. Use -c option.')
-
- # Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(args.marker_dictionary, args.marker_size, aruco_camera)
-
- # Load specific configuration file
- if args.aruco_tracker_configuration != None:
-
- aruco_tracker.load_configuration_file(args.aruco_tracker_configuration)
-
- print(f'ArUcoTracker configuration for {args.marker_dictionary.format} markers detection:')
- aruco_tracker.print_configuration()
-
- # Load AOI 3D scene for each marker and create a AOI 2D scene and frame when a 'Visualisation_Plan' AOI exist
- aoi3D_scenes = {}
- aoi2D_visu_scenes = {}
- all_aois_names = []
-
- if args.marker_id_scene != None:
-
- for marker_id, aoi_scene_filepath in args.marker_id_scene.items():
-
- marker_id = int(marker_id)
-
- aoi3D_scenes[marker_id] = AOI3DScene.AOI3DScene()
- aoi3D_scenes[marker_id].load(aoi_scene_filepath)
-
- print(f'AOI in {os.path.basename(aoi_scene_filepath)} scene related to marker #{marker_id}:')
- for aoi in aoi3D_scenes[marker_id].keys():
-
- print(f'\t{aoi}')
-
- # Store aoi name once
- if aoi not in all_aois_names:
- all_aois_names.append(aoi)
-
- def aoi3D_scene_selector(marker_id):
- return aoi3D_scenes.get(marker_id, None)
-
- # Create timestamped buffer to store AOIs and primary time stamp offset
- ts_offset_aois = DataStructures.TimeStampedBuffer()
-
- # Video and data replay loop
- try:
-
- # Initialise progress bar
- MiscFeatures.printProgressBar(0, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
-
- head_moving = False
- head_movement_last = 0.
-
- # Iterate on video frames
- for video_ts, video_frame in tobii_segment_video.frames():
-
- video_ts_ms = video_ts / 1e3
-
- # Copy video frame to edit visualisation on it without disrupting aruco tracking
- visu_frame = video_frame.copy()
-
- # Process video and data frame
- try:
-
- # Get nearest video timestamp
- _, nearest_vts = tobii_ts_vts.get_last_before(video_ts)
-
- # Edit dictionary to store 2D aoi with primary timestamp offset and warning
- all_aoi2D = {
- 'offset': nearest_vts.offset,
- 'warning': None
- }
-
- # Get nearest head rotation before video timestamp and remove all head rotations before
- _, nearest_head_rotation = tobii_ts_head_rotations.pop_last_before(video_ts)
-
- # Calculate head movement considering only head yaw and pitch
- head_movement = numpy.array(nearest_head_rotation.value)
- head_movement_px = head_movement.astype(int)
- head_movement_norm = numpy.linalg.norm(head_movement[0:2])
-
- # Draw movement vector
- cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2)), (int(visu_frame.width/2) + head_movement_px[1], int(visu_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
-
- # Head movement detection hysteresis
- # TODO : pass the threshold value as argument
- if not head_moving and head_movement_norm > 50:
- head_moving = True
-
- if head_moving and head_movement_norm < 10:
- head_moving = False
-
- # When head is moving, ArUco tracking could return bad pose estimation and so bad AOI scene projection
- if head_moving:
-
- all_aoi2D['warning'] = 'Head is moving'
-
- ts_offset_aois[video_ts] = all_aoi2D
-
- raise UserWarning('Head is moving')
-
- # Hide frame left and right borders before tracking to ignore markers outside focus area
- cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
- cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - 1/6)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
-
- # Track markers with pose estimation and draw them
- aruco_tracker.track(video_frame.matrix)
- aruco_tracker.draw_tracked_markers(visu_frame.matrix)
-
- # When no marker is detected, no AOI scene projection can't be done
- if len(aruco_tracker.tracked_markers) == 0:
-
- all_aoi2D['warning'] = 'No marker detected'
-
- ts_offset_aois[video_ts] = all_aoi2D
-
- raise UserWarning('No marker detected')
-
- # Store aoi 2D video for further scene merging
- aoi2D_dict = {}
-
- # Project 3D scene on each video frame and the visualisation frame
- for (marker_id, marker) in aruco_tracker.tracked_markers.items():
-
- # Copy 3D scene related to detected marker
- aoi3D_scene = aoi3D_scene_selector(marker_id)
-
- if aoi3D_scene == None:
- continue
-
- # Transform scene into camera referential
- aoi3D_camera = aoi3D_scene.transform(marker.translation, marker.rotation)
-
- # Get aoi inside vision cone field
- cone_vision_height_cm = 200 # cm
- cone_vision_radius_cm = numpy.tan(numpy.deg2rad(TobiiSpecifications.VISUAL_HFOV / 2)) * cone_vision_height_cm
-
- aoi3D_inside, aoi3D_outside = aoi3D_camera.vision_cone(cone_vision_radius_cm, cone_vision_height_cm)
-
- # Keep only aoi inside vision cone field
- aoi3D_scene = aoi3D_scene.copy(exclude=aoi3D_outside.keys())
-
- # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
- # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_video_scene = aoi3D_scene.project(marker.translation, marker.rotation, aruco_camera.K)
-
- # Store each 2D aoi for further scene merging
- for name, aoi in aoi2D_video_scene.items():
-
- if name not in aoi2D_dict.keys():
- aoi2D_dict[name] = []
-
- aoi2D_dict[name].append(aoi.clockwise())
-
- # Merge all 2D aoi into a single 2D scene
- aoi2D_merged_scene = AOI2DScene.AOI2DScene()
- for name, aoi_array in aoi2D_dict.items():
- aoi2D_merged_scene[name] = AOIFeatures.AreaOfInterest(numpy.sum(aoi_array, axis=0) / len(aoi_array))
-
- aoi2D_merged_scene.draw(visu_frame.matrix, (0, 0))
-
- # Store all 2D aoi
- for aoi_name in aoi2D_merged_scene.keys():
-
- all_aoi2D[aoi_name] = numpy.rint(aoi2D_merged_scene[aoi_name]).astype(int)
-
- ts_offset_aois[video_ts] = all_aoi2D
-
- # Warn user when the merged scene is empty
- if len(aoi2D_merged_scene.keys()) == 0:
-
- raise UserWarning('Scene is empty')
-
- # Write warning
- except UserWarning as w:
-
- cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, str(w), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
-
- # Raised when timestamped buffer is empty
- except KeyError:
- pass
-
- # Draw focus area
- cv.rectangle(visu_frame.matrix, (int(video_frame.width/6), 0), (int(visu_frame.width*(1-1/6)), int(visu_frame.height)), (255, 150, 150), 1)
-
- # Draw center
- cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1)
- cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
-
- # Write segment timing
- cv.rectangle(visu_frame.matrix, (0, 0), (550, 50), (63, 63, 63), -1)
- cv.putText(visu_frame.matrix, f'Segment time: {int(video_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
-
- if args.window:
-
- # Close window using 'Esc' key
- if cv.waitKey(1) == 27:
- break
-
- # Display visualisation
- cv.imshow(f'Segment {tobii_segment.id} ArUco AOI', visu_frame.matrix)
-
- # Write video
- output_video.write(visu_frame.matrix)
-
- # Update Progress Bar
- progress = video_ts_ms - int(args.time_range[0] * 1e3)
- MiscFeatures.printProgressBar(progress, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
-
- # Exit on 'ctrl+C' interruption
- except KeyboardInterrupt:
- pass
-
- # Stop frame display
- cv.destroyAllWindows()
-
- # End output video file
- output_video.close()
-
- # Print aruco tracking metrics
- print('\nAruco marker tracking metrics')
- try_count, tracked_counts = aruco_tracker.track_metrics
-
- for marker_id, tracked_count in tracked_counts.items():
- print(f'Markers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)')
-
- # Export aruco aoi data
- ts_offset_aois.as_dataframe().to_csv(vs_data_filepath, index=True)
- print(f'Aruco AOI data saved into {vs_data_filepath}')
-
- # Notify when the aruco aoi video has been exported
- print(f'Aruco AOI video saved into {vs_video_filepath}')
-
-if __name__ == '__main__':
-
- main() \ No newline at end of file
diff --git a/src/argaze/utils/tobii_stream_arscene_display.py b/src/argaze/utils/tobii_stream_arscene_display.py
new file mode 100644
index 0000000..e364e8e
--- /dev/null
+++ b/src/argaze/utils/tobii_stream_arscene_display.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+
+import argparse
+import os, json
+
+from argaze import *
+from argaze.TobiiGlassesPro2 import *
+from argaze.ArUcoMarkers import *
+from argaze.AreaOfInterest import *
+from argaze.utils import MiscFeatures
+
+import cv2 as cv
+import numpy
+
+def main():
+ """
+ Track ArUcoScene into Tobii Glasses Pro 2 camera video stream.
+ """
+
+ # Manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
+ parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default=None, help='tobii glasses ip')
+ parser.add_argument('-p', '--project_path', metavar='ARGAZE_PROJECT', type=str, default=None, help='json argaze project filepath')
+ parser.add_argument('-b', '--borders', metavar='BORDERS', type=float, default=16.666, help='define left and right borders mask (%) to not track aruco out of these borders')
+ parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
+ args = parser.parse_args()
+
+ # Create tobii controller (with auto discovery network process if no ip argument is provided)
+ print("Looking for a Tobii Glasses Pro 2 device ...")
+
+ try:
+
+ tobii_controller = TobiiController.TobiiController(args.tobii_ip)
+ print(f'Tobii Glasses Pro 2 device found at {tobii_controller.address} address.')
+
+ except ConnectionError as e:
+
+ print(e)
+ exit()
+
+ # Setup camera at 25 fps to work on Full HD video stream
+ tobii_controller.set_scene_camera_freq_25()
+
+ # Print current confirugration
+ print(f'Tobii Glasses Pro 2 configuration:')
+ for key, value in tobii_controller.get_configuration().items():
+ print(f'\t{key}: {value}')
+
+ # Enable tobii data stream
+ tobii_data_stream = tobii_controller.enable_data_stream()
+
+ # Enable tobii video stream
+ tobii_video_stream = tobii_controller.enable_video_stream()
+
+ # Load ar scene
+ ar_scene = ArScene.ArScene.from_json(args.project_path)
+
+ print('\n')
+ print(ar_scene)
+
+ # Start streaming
+ tobii_controller.start_streaming()
+
+ # Live video stream capture loop
+ try:
+
+ # Assess loop performance
+ loop_chrono = MiscFeatures.TimeProbe()
+ fps = 0
+
+ while tobii_video_stream.is_alive():
+
+ # Read video stream
+ video_ts, video_frame = tobii_video_stream.read()
+
+ # Copy video frame to edit visualisation on it without disrupting aruco tracking
+ visu_frame = video_frame.copy()
+
+ # Process video and data frame
+ try:
+
+ # Hide frame left and right borders before tracking to ignore markers outside focus area
+ cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width*args.borders/100), int(video_frame.height)), (0, 0, 0), -1)
+ cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - args.borders/100)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
+
+ # Estimate scene pose from ArUco markers into frame.
+ tvec, rmat, _ = ar_scene.estimate_pose(video_frame.matrix)
+
+ # Project AOI scene into frame according estimated pose
+ aoi_scene_projection = ar_scene.project(tvec, rmat, visual_hfov=TobiiSpecifications.VISUAL_HFOV)
+
+ # Draw scene axis
+ ar_scene.draw_axis(visu_frame.matrix)
+
+ # Draw AOI
+ aoi_scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
+
+ # Draw tracked markers
+ ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+
+ # Catch exceptions raised by estimate_pose and project methods
+ except (ArScene.PoseEstimationFailed, ArScene.SceneProjectionFailed) as e:
+
+ # Draw tracked markers
+ ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+
+ cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(e), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+
+ # Assess loop performance
+ lap_time, lap_counter, elapsed_time = loop_chrono.lap()
+
+ # Update fps each 10 loops
+ if lap_counter >= 10:
+
+ fps = 1e3 * lap_counter / elapsed_time
+ loop_chrono.restart()
+
+ # Write stream timing
+ cv.rectangle(visu_frame.matrix, (0, 0), (700, 50), (63, 63, 63), -1)
+ cv.putText(visu_frame.matrix, f'Video stream time: {int(video_ts*1e-3)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.putText(visu_frame.matrix, f'Fps: {int(fps)}', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+
+ cv.imshow(f'Stream ArUco AOI', visu_frame.matrix)
+
+ # Close window using 'Esc' key
+ if cv.waitKey(1) == 27:
+ break
+
+ # Exit on 'ctrl+C' interruption
+ except KeyboardInterrupt:
+ pass
+
+ # Stop frame display
+ cv.destroyAllWindows()
+
+ # Stop streaming
+ tobii_controller.stop_streaming()
+
+if __name__ == '__main__':
+
+ main() \ No newline at end of file
diff --git a/src/argaze/utils/tobii_stream_aruco_aoi_display.py b/src/argaze/utils/tobii_stream_aruco_aoi_display.py
deleted file mode 100644
index 7eb5196..0000000
--- a/src/argaze/utils/tobii_stream_aruco_aoi_display.py
+++ /dev/null
@@ -1,311 +0,0 @@
-#!/usr/bin/env python
-
-import argparse
-import os, json
-
-from argaze import DataStructures
-from argaze import GazeFeatures
-from argaze.TobiiGlassesPro2 import *
-from argaze.ArUcoMarkers import *
-from argaze.AreaOfInterest import *
-from argaze.utils import MiscFeatures
-
-import cv2 as cv
-import numpy
-
-def main():
- """
- Track any ArUco marker into Tobii Glasses Pro 2 camera video stream.
- For each loaded AOI scene .obj file, position the scene virtually relatively to each detected ArUco markers and project the scene into camera frame.
- """
-
- # Manage arguments
- parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
- parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default=None, help='tobii glasses ip')
- parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
- parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
- parser.add_argument('-md', '--marker_dictionary', metavar='MARKER_DICT', type=ArUcoMarkersDictionary.ArUcoMarkersDictionary, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
- parser.add_argument('-ms', '--marker_size', metavar='MARKER_SIZE', type=float, default=6, help='aruco marker size (cm)')
- parser.add_argument('-mi', '--marker_id_scene', metavar='MARKER_ID_SCENE', type=json.loads, help='{"marker": "aoi scene filepath"} dictionary')
- parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
- args = parser.parse_args()
-
- # Manage markers id to track
- if args.marker_id_scene == None:
- print(f'Track any Aruco markers from the {args.marker_dictionary.name} dictionary')
- else:
- print(f'Track Aruco markers {list(args.marker_id_scene.keys())} from the {args.marker_dictionary.name} dictionary')
-
- # Create tobii controller (with auto discovery network process if no ip argument is provided)
- print("Looking for a Tobii Glasses Pro 2 device ...")
-
- try:
-
- tobii_controller = TobiiController.TobiiController(args.tobii_ip)
- print(f'Tobii Glasses Pro 2 device found at {tobii_controller.address} address.')
-
- except ConnectionError as e:
-
- print(e)
- exit()
-
- # Setup camera at 25 fps to work on Full HD video stream
- tobii_controller.set_scene_camera_freq_25()
-
- # Print current confirugration
- print(f'Tobii Glasses Pro 2 configuration:')
- for key, value in tobii_controller.get_configuration().items():
- print(f'\t{key}: {value}')
-
- # Enable tobii data stream
- tobii_data_stream = tobii_controller.enable_data_stream()
-
- # Enable tobii video stream
- tobii_video_stream = tobii_controller.enable_video_stream()
-
- # Create aruco camera
- aruco_camera = ArUcoCamera.ArUcoCamera()
-
- # Load calibration file
- if args.camera_calibration != None:
-
- aruco_camera.load_calibration_file(args.camera_calibration)
-
- else:
-
- raise UserWarning('.json camera calibration filepath required. Use -c option.')
-
- # Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(args.marker_dictionary, args.marker_size, aruco_camera)
-
- # Load specific configuration file
- if args.aruco_tracker_configuration != None:
-
- aruco_tracker.load_configuration_file(args.aruco_tracker_configuration)
-
- print(f'ArUcoTracker configuration for {args.marker_dictionary.name} markers detection:')
- aruco_tracker.print_configuration()
-
- # Load AOI 3D scene for each marker and create a AOI 2D scene and frame when a 'Visualisation_Plan' AOI exist
- aoi3D_scenes = {}
- aoi2D_visu_scenes = {}
-
- if args.marker_id_scene != None:
-
- for marker_id, aoi_scene_filepath in args.marker_id_scene.items():
-
- marker_id = int(marker_id)
-
- aoi3D_scenes[marker_id] = AOI3DScene.AOI3DScene()
- aoi3D_scenes[marker_id].load(aoi_scene_filepath)
-
- print(f'AOI in {os.path.basename(aoi_scene_filepath)} scene related to marker #{marker_id}:')
- for aoi in aoi3D_scenes[marker_id].keys():
-
- print(f'\t{aoi}')
-
- def aoi3D_scene_selector(marker_id):
- return aoi3D_scenes.get(marker_id, None)
-
- # Create timestamped buffer to store AOIs scene in time
- ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()
-
- # Init head movement
- head_movement_px = numpy.array((0, 0))
- head_movement_norm = 0
-
- # Init data timestamped in millisecond
- data_ts_ms = 0
-
- # Assess temporal performance
- loop_chrono = MiscFeatures.TimeProbe()
- gyroscope_chrono = MiscFeatures.TimeProbe()
-
- loop_ps = 0
- gyroscope_ps = 0
-
- def data_stream_callback(data_ts, data_object, data_object_type):
-
- nonlocal head_movement_px
- nonlocal head_movement_norm
- nonlocal data_ts_ms
- nonlocal gyroscope_chrono
-
- data_ts_ms = data_ts / 1e3
-
- match data_object_type:
-
- case 'Gyroscope':
-
- # Assess gyroscope stream performance
- gyroscope_chrono.lap()
-
- # Calculate head movement considering only head yaw and pitch
- head_movement = numpy.array(data_object.value)
- head_movement_px = head_movement.astype(int)
- head_movement_norm = numpy.linalg.norm(head_movement[0:2])
-
- tobii_data_stream.reading_callback = data_stream_callback
-
- # Start streaming
- tobii_controller.start_streaming()
-
- # Live video stream capture loop
- try:
-
- # Assess loop performance
- loop_chrono = MiscFeatures.TimeProbe()
- fps = 0
-
- # Detect head movement
- head_moving = False
- head_movement_last = 0.
-
- while tobii_video_stream.is_alive():
-
- # Read video stream
- video_ts, video_frame = tobii_video_stream.read()
- video_ts_ms = video_ts / 1e3
-
- # Copy video frame to edit visualisation on it without disrupting aruco tracking
- visu_frame = video_frame.copy()
-
- # Process video and data frame
- try:
-
- # Head movement detection hysteresis
- # TODO : pass the threshold value as argument
- if not head_moving and head_movement_norm > 50:
- head_moving = True
-
- if head_moving and head_movement_norm < 10:
- head_moving = False
-
- # When head is moving, ArUco tracking could return bad pose estimation and so bad AOI scene projection
- if head_moving:
-
- ts_aois_scenes[round(video_ts_ms)] = AOI2DScene.AOI2DScene()
-
- raise UserWarning('Head is moving')
-
- # Hide frame left and right borders before tracking to ignore markers outside focus area
- cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
- cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - 1/6)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
-
- # Track markers with pose estimation and draw them
- aruco_tracker.track(video_frame.matrix)
- aruco_tracker.draw_tracked_markers(visu_frame.matrix)
-
- # When no marker is detected, no AOI scene projection can't be done
- if aruco_tracker.tracked_markers_number == 0:
-
- ts_aois_scenes[round(video_ts_ms)] = AOI2DScene.AOI2DScene()
-
- raise UserWarning('No marker detected')
-
- # Store aoi 2D video for further scene merging
- aoi2D_dict = {}
-
- # Project 3D scene on each video frame and the visualisation frame
- for marker_id, marker in aruco_tracker.tracked_markers.items():
-
- # Copy 3D scene related to detected marker
- aoi3D_scene = aoi3D_scene_selector(marker_id)
-
- if aoi3D_scene == None:
- continue
-
- # Transform scene into camera referential
- aoi3D_camera = aoi3D_scene.transform(marker.translation, marker.rotation)
-
- # Get aoi inside vision cone field
- cone_vision_height_cm = 200 # cm
- cone_vision_radius_cm = numpy.tan(numpy.deg2rad(TobiiSpecifications.VISUAL_HFOV / 2)) * cone_vision_height_cm
-
- aoi3D_inside, aoi3D_outside = aoi3D_camera.vision_cone(cone_vision_radius_cm, cone_vision_height_cm)
-
- # Keep only aoi inside vision cone field
- aoi3D_scene = aoi3D_scene.copy(exclude=aoi3D_outside.keys())
-
- # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
- # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_video_scene = aoi3D_scene.project(marker.translation, marker.rotation, aruco_camera.K)
-
- # Store each 2D aoi for further scene merging
- for name, aoi in aoi2D_video_scene.items():
-
- if name not in aoi2D_dict.keys():
- aoi2D_dict[name] = []
-
- aoi2D_dict[name].append(aoi.clockwise())
-
- # Merge all 2D aoi into a single 2D scene
- aoi2D_merged_scene = AOI2DScene.AOI2DScene()
- for name, aoi_array in aoi2D_dict.items():
- aoi2D_merged_scene[name] = AOIFeatures.AreaOfInterest(numpy.sum(aoi_array, axis=0) / len(aoi_array))
-
- aoi2D_merged_scene.draw(visu_frame.matrix)
-
- # Store 2D merged scene at this time in millisecond
- ts_aois_scenes[round(video_ts_ms)] = aoi2D_merged_scene
-
- # Warn user when the merged scene is empty
- if len(aoi2D_merged_scene.keys()) == 0:
-
- raise UserWarning('Scene is empty')
-
- # Write warning
- except UserWarning as w:
-
- cv.rectangle(visu_frame.matrix, (0, 100), (500, 150), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, str(w), (20, 140), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
-
- # Assess loop performance
- lap_time, lap_counter, elapsed_time = loop_chrono.lap()
-
- # Update fps each 10 loops
- if lap_counter >= 10:
-
- loop_ps = 1e3 * lap_counter / elapsed_time
- loop_chrono.restart()
-
- # Assess gyroscope streaming performance
- elapsed_time, lap_counter = gyroscope_chrono.end()
- gyroscope_ps = 1e3 * lap_counter / elapsed_time
- gyroscope_chrono.restart()
-
- # Draw head movement vector
- cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2)), (int(visu_frame.width/2) + head_movement_px[1], int(visu_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
-
- # Draw center
- cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1)
- cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
-
- # Write stream timing
- cv.rectangle(visu_frame.matrix, (0, 0), (1100, 50), (63, 63, 63), -1)
- cv.putText(visu_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(visu_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(visu_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
-
- cv.rectangle(visu_frame.matrix, (0, 50), (500, 100), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, f'Gyroscope fps: {int(gyroscope_ps)}', (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
-
- cv.imshow(f'Stream ArUco AOI', visu_frame.matrix)
-
- # Close window using 'Esc' key
- if cv.waitKey(1) == 27:
- break
-
- # Exit on 'ctrl+C' interruption
- except KeyboardInterrupt:
- pass
-
- # Stop frame display
- cv.destroyAllWindows()
-
- # Stop streaming
- tobii_controller.stop_streaming()
-
-if __name__ == '__main__':
-
- main() \ No newline at end of file
diff --git a/src/argaze/utils/tobii_stream_aruco_cube_display.py b/src/argaze/utils/tobii_stream_aruco_cube_display.py
deleted file mode 100644
index e310308..0000000
--- a/src/argaze/utils/tobii_stream_aruco_cube_display.py
+++ /dev/null
@@ -1,360 +0,0 @@
-#!/usr/bin/env python
-
-import argparse
-import os, json
-import math
-import threading
-
-from argaze import DataStructures
-from argaze import GazeFeatures
-from argaze.TobiiGlassesPro2 import *
-from argaze.ArUcoMarkers import *
-from argaze.AreaOfInterest import *
-from argaze.utils import MiscFeatures
-
-import cv2 as cv
-import numpy
-
-def make_rotation_matrix(x, y, z):
-
- # Create rotation matrix around x axis
- c = numpy.cos(numpy.deg2rad(x))
- s = numpy.sin(numpy.deg2rad(x))
- Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]])
-
- # Create rotation matrix around y axis
- c = numpy.cos(numpy.deg2rad(y))
- s = numpy.sin(numpy.deg2rad(y))
- Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
-
- # Create rotation matrix around z axis
- c = numpy.cos(numpy.deg2rad(z))
- s = numpy.sin(numpy.deg2rad(z))
- Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
-
- # Return intrinsic rotation matrix
- return Rx.dot(Ry.dot(Rz))
-
-def main():
- """
- Track ArUcoCube into Tobii Glasses Pro 2 camera video stream.
- """
-
- # Manage arguments
- parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
- parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default=None, help='tobii glasses ip')
- parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
- parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
- parser.add_argument('-i', '--imu_calibration', metavar='IMU_CALIB', type=str, default=None, help='json imu calibration filepath')
- parser.add_argument('-ac', '--aruco_cube', metavar='ARUCO_CUBE', type=str, help='json aruco cube description filepath')
- parser.add_argument('-s', '--aoi_scene', metavar='AOI_SCENE', type=str, help='obj aoi 3D scene description filepath')
- parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
- args = parser.parse_args()
-
- # Create tobii controller (with auto discovery network process if no ip argument is provided)
- print('\nLooking for a Tobii Glasses Pro 2 device ...')
-
- try:
-
- tobii_controller = TobiiController.TobiiController(args.tobii_ip)
- print(f'Tobii Glasses Pro 2 device found at {tobii_controller.address} address.')
-
- except ConnectionError as e:
-
- print(e)
- exit()
-
- # Setup camera at 25 fps to work on Full HD video stream
- tobii_controller.set_scene_camera_freq_25()
-
- # Print current confirugration
- print(f'Tobii Glasses Pro 2 configuration:')
- for key, value in tobii_controller.get_configuration().items():
- print(f'\t{key}: {value}')
-
- # Enable tobii data stream
- tobii_data_stream = tobii_controller.enable_data_stream()
-
- # Enable tobii video stream
- tobii_video_stream = tobii_controller.enable_video_stream()
-
- # Load aruco cube description
- aruco_cube = ArUcoCube.ArUcoCube(args.aruco_cube)
- aruco_cube.print_cache()
-
- # Load AOI 3D scene centered onto aruco cube
- aoi3D_scene = AOI3DScene.AOI3DScene()
- aoi3D_scene.load(args.aoi_scene)
-
- print(f'\nAOI in {os.path.basename(args.aoi_scene)} scene related to ArCube:')
- for aoi in aoi3D_scene.keys():
- print(f'\t{aoi}')
-
- # Create aruco camera
- aruco_camera = ArUcoCamera.ArUcoCamera()
-
- # Load calibration file
- if args.camera_calibration != None:
-
- aruco_camera.load_calibration_file(args.camera_calibration)
-
- else:
-
- raise UserWarning('.json camera calibration filepath required. Use -c option.')
-
- # Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_cube.dictionary, aruco_cube.marker_size, aruco_camera)
-
- # Load specific configuration file
- if args.aruco_tracker_configuration != None:
-
- aruco_tracker.load_configuration_file(args.aruco_tracker_configuration)
-
- print(f'\nArUcoTracker configuration for markers detection:')
- aruco_tracker.print_configuration()
-
- # Create tobii imu handler to track head pose changes when arcuco cube pose can't be estimated
- # So, the resulting head pose is relative to last pose estimation
- tobii_imu = TobiiInertialMeasureUnit.TobiiInertialMeasureUnit()
-
- # Load optional imu calibration file
- if args.imu_calibration != None:
-
- tobii_imu.load_calibration_file(args.imu_calibration)
-
- # Init tobii imu lock
- tobii_imu_lock = threading.Lock()
-
- # TEST : DIFF ACC
- last_accl = numpy.zeros(3)
-
- # Init data timestamped in millisecond
- data_ts_ms = 0
-
- # Assess temporal performance
- loop_chrono = MiscFeatures.TimeProbe()
- loop_ps = 0
-
- def data_stream_callback(data_ts, data_object, data_object_type):
-
- nonlocal tobii_imu
- nonlocal tobii_imu_lock
- nonlocal data_ts_ms
-
- #TEST
- nonlocal last_accl
-
- data_ts_ms = data_ts / 1e3
-
- # Don't update imu when it is used
- if tobii_imu_lock.locked():
- return
-
- # Lock tobii imu updates
- tobii_imu_lock.acquire()
-
- match data_object_type:
-
- case 'Gyroscope':
-
- data_object = tobii_imu.apply_gyroscope_offset(data_object)
-
- tobii_imu.update_rotation(data_ts, data_object)
-
-
- case 'Accelerometer':
- pass
- '''
- print('raw accelerometer(m/s2)=', data_object.value)
-
- # TEST :
- diff_accl = last_accl - numpy.array(data_object.value)
- last_accl = numpy.array(data_object.value)
- print('\tdiff(cm/s2)=', 100 * numpy.linalg.norm(diff_accl))
-
- # TEST : ignore acceleration double
- if numpy.linalg.norm(diff_accl) > 0.:
-
- data_object = tobii_imu.apply_accelerometer_coefficients(data_object)
-
- print('corrected accelerometer(m/s2)=', data_object.value)
-
- print('current plumb=', tobii_imu.get_plumb())
-
- data_object = tobii_imu.apply_plumb(data_object)
-
- print('corrected accelerometer - gravity(m/s2)=', data_object.value)
- print('\tnorm(cm/s2)=', 100 * numpy.linalg.norm(data_object.value))
-
- tobii_imu.update_translation(data_ts, data_object)
- '''
- # Unlock tobii imu updates
- tobii_imu_lock.release()
-
- tobii_data_stream.reading_callback = data_stream_callback
-
- # Start streaming
- tobii_controller.start_streaming()
-
- # Live video stream capture loop
- try:
-
- # Assess loop performance
- loop_chrono = MiscFeatures.TimeProbe()
- fps = 0
-
- # Track aruco cube pose
- aruco_cube_tvec = numpy.zeros(3)
- aruco_cube_rvec = numpy.zeros(3)
- aruco_cube_success = False
- aruco_cube_validity = False
- aruco_cube_ts_ms = 0
-
- while tobii_video_stream.is_alive():
-
- # Read video stream
- video_ts, video_frame = tobii_video_stream.read()
- video_ts_ms = video_ts / 1e3
-
- # Copy video frame to edit visualisation on it without disrupting aruco tracking
- visu_frame = video_frame.copy()
-
- # Process video and data frame
- try:
-
- # Track markers with pose estimation
- aruco_tracker.track(video_frame.matrix)
- aruco_tracker.draw_tracked_markers(visu_frame.matrix)
-
- # Estimate cube pose from tracked markers
- tvec, rvec, success, validity = aruco_cube.estimate_pose(aruco_tracker.tracked_markers)
-
- # Cube pose estimation succeed and is validated by 2 faces at least
- if success and validity >= 1:
-
- # Lock tobii imu updates
- tobii_imu_lock.acquire()
-
- # Reset head rotation, translation and translation speed (cm/s)
- # Note : head translation speed is also estimated thanks to accelerometer sensor (see upward)
- tobii_imu.reset_rotation()
- #tobii_imu.reset_translation(translation_speed = (tvec - aruco_cube_tvec) / (video_ts_ms - aruco_cube_ts_ms))
-
- # Create a rotation matrix to transform cube rotation from camera referential to imu referential
- F = make_rotation_matrix(*TobiiInertialMeasureUnit.CAMERA_TO_IMU_ROTATION_VECTOR)
- R, _ = cv.Rodrigues(rvec)
- rvec_flipped, _ = cv.Rodrigues(F.dot(R))
-
- # Update head plumb orientation with flipped cube orientation
- tobii_imu.rotate_plumb(rvec_flipped)
-
- # Unlock tobii imu updates
- tobii_imu_lock.release()
-
- # Store cube pose
- aruco_cube_tvec = tvec
- aruco_cube_rvec = rvec
- aruco_cube_success = success
- aruco_cube_validity = validity
- aruco_cube_ts_ms = video_ts_ms
-
- # Cube pose estimation fails
- elif aruco_cube_success:
-
- # Use tobii glasses inertial sensors to estimate cube pose from last estimated pose
-
- # Translate cube into imu referential
- imu_tvec = aruco_cube_tvec + numpy.array(TobiiInertialMeasureUnit.CAMERA_TO_IMU_TRANSLATION_VECTOR)
-
- # Translate cube according head translation
- imu_tvec = imu_tvec + tobii_imu.translation
-
- # Rotate cube around imu origin according head rotation
- imu_rvec = tobii_imu.rotation * numpy.array([-1., -1., 1.])
- imu_R = make_rotation_matrix(*imu_rvec)
- new_imu_tvec = imu_tvec.dot(imu_R)
-
- # Translate back cube into camera referential
- new_tvec = new_imu_tvec - numpy.array(TobiiInertialMeasureUnit.CAMERA_TO_IMU_TRANSLATION_VECTOR)
-
- # Rotate cube orientation (supposing cube top is up in )
- imu_rvec = tobii_imu.rotation * numpy.array([1., -1., 1.])
- imu_R = make_rotation_matrix(*imu_rvec)
-
- C, _ = cv.Rodrigues(aruco_cube_rvec)
- C = C.dot(imu_R)
- new_rvec, _ = cv.Rodrigues(C)
- #new_rvec = aruco_cube_rvec
-
- # Set cube pose estimation
- aruco_cube.translation = new_tvec
- aruco_cube.rotation = new_rvec
-
- else:
-
- raise UserWarning('Cube pose estimation fails.')
-
- # Project AOI 3 scene onto camera frame
-
- # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
- # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_scene = aoi3D_scene.project(aruco_cube.translation, aruco_cube.rotation, aruco_camera.K)
-
- # Draw projected scene
- #aoi2D_scene.draw(visu_frame.matrix)
-
- # Draw markers pose estimation
- #aruco_tracker.draw_tracked_markers(visu_frame.matrix)
-
- # Draw cube pose estimation (without camera distorsion)
- aruco_cube.draw(visu_frame.matrix, aruco_camera.K, aruco_camera.D, draw_places=True)
-
- # Warn about cube pose validity
- if not aruco_cube_validity:
-
- raise UserWarning('Cube pose estimation is not validated.')
-
- # Write warning
- except UserWarning as w:
-
- cv.rectangle(visu_frame.matrix, (0, 100), (600, 150), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, str(w), (20, 140), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
-
- # Assess loop performance
- lap_time, lap_counter, elapsed_time = loop_chrono.lap()
-
- # Update fps each 10 loops
- if lap_counter >= 10:
-
- loop_ps = 1e3 * lap_counter / elapsed_time
- loop_chrono.restart()
-
- # Draw center
- cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1)
- cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
-
- # Write stream timing
- cv.rectangle(visu_frame.matrix, (0, 0), (1100, 50), (63, 63, 63), -1)
- cv.putText(visu_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(visu_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(visu_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
-
- cv.imshow(f'Stream ArUcoCube', visu_frame.matrix)
-
- # Close window using 'Esc' key
- if cv.waitKey(1) == 27:
- break
-
- # Exit on 'ctrl+C' interruption
- except KeyboardInterrupt:
- pass
-
- # Stop frame display
- cv.destroyAllWindows()
-
- # Stop streaming
- tobii_controller.stop_streaming()
-
-if __name__ == '__main__':
-
- main() \ No newline at end of file