aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/tobii_segment_aruco_aoi_export.py68
1 files changed, 44 insertions, 24 deletions
diff --git a/src/argaze/utils/tobii_segment_aruco_aoi_export.py b/src/argaze/utils/tobii_segment_aruco_aoi_export.py
index 7d4931e..bb7d769 100644
--- a/src/argaze/utils/tobii_segment_aruco_aoi_export.py
+++ b/src/argaze/utils/tobii_segment_aruco_aoi_export.py
@@ -6,7 +6,7 @@ import json
from argaze import DataStructures
from argaze import GazeFeatures
-from argaze.TobiiGlassesPro2 import TobiiEntities, TobiiVideo, TobiiSpecifications
+from argaze.TobiiGlassesPro2 import TobiiEntities, TobiiData, TobiiVideo, TobiiSpecifications
from argaze.ArUcoMarkers import *
from argaze.AreaOfInterest import *
from argaze.utils import MiscFeatures
@@ -38,9 +38,9 @@ def main():
# Manage markers id to track
if args.marker_id_scene == None:
- print(f'Track any Aruco markers from the {args.marker_dictionary} dictionary')
+ print(f'Track any Aruco markers from the {args.marker_dictionary.name} dictionary')
else:
- print(f'Track Aruco markers {list(args.marker_id_scene.keys())} from the {args.marker_dictionary} dictionary')
+ print(f'Track Aruco markers {list(args.marker_id_scene.keys())} from the {args.marker_dictionary.name} dictionary')
# Manage destination path
destination_path = '.'
@@ -87,11 +87,14 @@ def main():
for name in tobii_segment_data.keys():
print(f'\t{name}: {len(tobii_segment_data[name])} data')
+ # Access to video timestamp data buffer
+ tobii_ts_vts = tobii_segment_data['VideoTimeStamp']
+
# Access to timestamped head rotations data buffer
tobii_ts_head_rotations = tobii_segment_data['Gyroscope']
# Prepare video exportation at the same format than segment video
- output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.get_stream())
+ output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.stream)
# Create aruco camera
aruco_camera = ArUcoCamera.ArUcoCamera()
@@ -113,12 +116,13 @@ def main():
aruco_tracker.load_configuration_file(args.aruco_tracker_configuration)
- print(f'ArUcoTracker configuration for {aruco_tracker.get_markers_dictionay().get_markers_format()} markers detection:')
+ print(f'ArUcoTracker configuration for {args.marker_dictionary.format} markers detection:')
aruco_tracker.print_configuration()
# Load AOI 3D scene for each marker and create a AOI 2D scene and frame when a 'Visualisation_Plan' AOI exist
aoi3D_scenes = {}
aoi2D_visu_scenes = {}
+ all_aois_names = []
for marker_id, aoi_scene_filepath in args.marker_id_scene.items():
@@ -132,17 +136,21 @@ def main():
print(f'\t{aoi}')
+ # Store aoi name once
+ if aoi not in all_aois_names:
+ all_aois_names.append(aoi)
+
def aoi3D_scene_selector(marker_id):
return aoi3D_scenes.get(marker_id, None)
- # Create timestamped buffer to store AOIs scene in time
- ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()
+ # Create timestamped buffer to store AOIs and primary time stamp offset
+ ts_offset_aois = DataStructures.TimeStampedBuffer()
# Video and data replay loop
try:
# Initialise progress bar
- MiscFeatures.printProgressBar(0, tobii_segment_video.get_duration()/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
+ MiscFeatures.printProgressBar(0, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
head_moving = False
head_movement_last = 0.
@@ -158,6 +166,9 @@ def main():
# Process video and data frame
try:
+ # Get nearest video timestamp
+ _, nearest_vts = tobii_ts_vts.get_last_before(video_ts)
+
# Get nearest head rotation before video timestamp and remove all head rotations before
_, nearest_head_rotation = tobii_ts_head_rotations.pop_first_until(video_ts)
@@ -180,7 +191,7 @@ def main():
# When head is moving, ArUco tracking could return bad pose estimation and so bad AOI scene projection
if head_moving:
- ts_aois_scenes[round(video_ts_ms)] = AOIFeatures.EmptyAOIScene()
+ ts_offset_aois[round(video_ts_ms)] = AOIFeatures.EmptyAOIScene()
raise UserWarning('Head is moving')
@@ -190,12 +201,12 @@ def main():
# Track markers with pose estimation and draw them
aruco_tracker.track(video_frame.matrix)
- aruco_tracker.draw(visu_frame.matrix)
+ aruco_tracker.draw_tracked_markers(visu_frame.matrix)
# When no marker is detected, no AOI scene projection can't be done
- if aruco_tracker.markers_number() == 0:
+ if len(aruco_tracker.tracked_markers) == 0:
- ts_aois_scenes[round(video_ts_ms)] = AOIFeatures.EmptyAOIScene()
+ ts_offset_aois[round(video_ts_ms)] = AOIFeatures.EmptyAOIScene()
raise UserWarning('No marker detected')
@@ -203,7 +214,7 @@ def main():
aoi2D_dict = {}
# Project 3D scene on each video frame and the visualisation frame
- for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
+ for (marker_id, marker) in aruco_tracker.tracked_markers.items():
# Copy 3D scene related to detected marker
aoi3D_scene = aoi3D_scene_selector(marker_id)
@@ -212,7 +223,7 @@ def main():
continue
# Transform scene into camera referential
- aoi3D_camera = aoi3D_scene.transform(aruco_tracker.get_marker_translation(i), aruco_tracker.get_marker_rotation(i))
+ aoi3D_camera = aoi3D_scene.transform(marker.translation, marker.rotation)
# Get aoi inside vision cone field
cone_vision_height_cm = 200 # cm
@@ -225,7 +236,7 @@ def main():
# DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_video_scene = aoi3D_scene.project(aruco_tracker.get_marker_translation(i), aruco_tracker.get_marker_rotation(i), aruco_camera.get_K())
+ aoi2D_video_scene = aoi3D_scene.project(marker.translation, marker.rotation, aruco_camera.K)
# Store each 2D aoi for further scene merging
for name, aoi in aoi2D_video_scene.items():
@@ -242,8 +253,20 @@ def main():
aoi2D_merged_scene.draw(visu_frame.matrix, (0, 0))
- # Store 2D merged scene at this time in millisecond
- ts_aois_scenes[round(video_ts_ms)] = aoi2D_merged_scene
+ # Edit dictionary with all 2D aoi with primary timestamp offset
+ all_aoi2D = {
+ 'offset': nearest_vts.offset
+ }
+
+ for aoi_name in all_aois_names:
+
+ try:
+ all_aoi2D[aoi_name] = aoi2D_merged_scene[aoi_name]
+ except:
+ all_aoi2D[aoi_name] = None
+
+ # Store all 2D aoi
+ ts_offset_aois[video_ts] = all_aoi2D
# Warn user when the merged scene is empty
if len(aoi2D_merged_scene.keys()) == 0:
@@ -278,14 +301,14 @@ def main():
break
# Display visualisation
- cv.imshow(f'Segment {tobii_segment.get_id()} ArUco AOI', visu_frame.matrix)
+ cv.imshow(f'Segment {tobii_segment.id} ArUco AOI', visu_frame.matrix)
# Write video
output_video.write(visu_frame.matrix)
# Update Progress Bar
progress = video_ts_ms - int(args.time_range[0] * 1e3)
- MiscFeatures.printProgressBar(progress, tobii_segment_video.get_duration()/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
+ MiscFeatures.printProgressBar(progress, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
# Exit on 'ctrl+C' interruption
except KeyboardInterrupt:
@@ -299,16 +322,13 @@ def main():
# Print aruco tracking metrics
print('\nAruco marker tracking metrics')
- try_count, tracked_counts, rejected_counts = aruco_tracker.get_track_metrics()
+ try_count, tracked_counts = aruco_tracker.track_metrics
for marker_id, tracked_count in tracked_counts.items():
print(f'Markers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)')
- for marker_id, rejected_count in rejected_counts.items():
- print(f'Markers {marker_id} has been rejected in {rejected_count} / {try_count} frames ({round(100 * rejected_count / try_count, 2)} %)')
-
# Export aruco aoi data
- ts_aois_scenes.export_as_csv(vs_data_filepath, exclude=['dimension'])
+ ts_offset_aois.export_as_csv(vs_data_filepath)
print(f'Aruco AOI data saved into {vs_data_filepath}')
# Notify when the aruco aoi video has been exported