aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/argaze/utils/export_tobii_segment_aruco_visual_scan.py')
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py100
1 files changed, 76 insertions, 24 deletions
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index 4622b00..4f84943 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -14,6 +14,17 @@ import numpy
import cv2 as cv
+aoi_color = {
+ 'Scene_Plan': (127, 127, 127),
+ 'PFD_Plan': (63, 127, 63),
+ 'Attitude_Plan': (0, 255, 0),
+ 'Air_Speed_Plan': (255, 0, 255),
+ 'Vertical_Speed_Plan': (255, 255, 0),
+ 'Localiser_Plan': (0, 0, 255),
+ 'ND_Plan': (127, 63, 63),
+ 'Marker_Plan': (0, 0, 0)
+}
+
def main():
"""
Track any ArUco marker into Tobii Glasses Pro 2 segment video file.
@@ -50,13 +61,15 @@ def main():
os.makedirs(os.path.dirname(args.output))
print(f'{os.path.dirname(args.output)} folder created')
- visual_scan_filepath = f'{args.output}/visual_scan.csv'
- video_filepath = f'{args.output}/fullstream+visu.mp4'
+ vs_data_filepath = f'{args.output}/visual_scan.csv'
+ vs_visu_filepath = f'{args.output}/visual_scan.jpg'
+ vs_video_filepath = f'{args.output}/visual_scan.mp4'
else:
- visual_scan_filepath = f'{args.segment_path}/visual_scan.csv'
- video_filepath = f'{args.segment_path}/fullstream+visu.mp4'
+ vs_data_filepath = f'{args.segment_path}/visual_scan.csv'
+ vs_visu_filepath = f'{args.segment_path}/visual_scan.jpg'
+ vs_video_filepath = f'{args.segment_path}/visual_scan.mp4'
# Load a tobii segment
tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1000000), int(args.time_range[1] * 1000000) if args.time_range[1] != None else None)
@@ -69,12 +82,16 @@ def main():
tobii_segment_data = tobii_segment.load_data()
print(f'Data keys: {tobii_segment_data.keys()}')
- # Access to timestamped gaze position data buffer
+ # Access to timestamped gaze positions data buffer
tobii_ts_gaze_positions = tobii_segment_data.gidx_l_gp
print(f'{len(tobii_ts_gaze_positions)} gaze positions loaded')
+ # Access to timestamped gaze 3D positions data buffer
+ #tobii_ts_gaze_3d_positions = tobii_segment_data.gidx_gp3
+ #print(f'{len(tobii_ts_gaze_3d_positions)} gaze 3D positions loaded')
+
# Prepare video exportation at the same format than segment video
- output_video = TobiiVideo.TobiiVideoOutput(video_filepath, tobii_segment_video.get_stream())
+ output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.get_stream())
# Create aruco camera
aruco_camera = ArUcoCamera.ArUcoCamera()
@@ -86,7 +103,7 @@ def main():
# Create AOIs 3D scene
aoi3D_scene = AOI3DScene.AOI3DScene()
aoi3D_scene.load(args.aoi_scene)
- print(f'AOIs names: {aoi3D_scene.areas.keys()}')
+ print(f'AOIs names: {aoi3D_scene.keys()}')
# Create timestamped buffer to store AOIs scene in time
ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()
@@ -94,6 +111,26 @@ def main():
# Create timestamped buffer to store gaze positions in time
ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
+ # Create a visual scan visualisation frame
+ visu_width = 1920
+ visu_height = 1080
+ visu_ratio = visu_height
+ visu_frame = numpy.full((visu_height, visu_width, 3), 255, dtype=numpy.uint8)
+
+ # Project 3D scene on the reference frame
+ # TODO : center projection on a reference AOI
+ # TODO: pass the reference AOI in argument
+ aoi3D_scene.rotation = numpy.asarray([[-numpy.pi, 0.0, 0.0]])
+ aoi3D_scene.translation = numpy.asarray([[25.0, -32.0, 20.0]])
+
+ # Edit a projection matrix for the reference frame
+ K0 = numpy.asarray([[visu_ratio, 0.0, visu_width/2], [0.0, visu_ratio, visu_height/2], [0.0, 0.0, 1.0]])
+
+ aoi2D_visu_scene = aoi3D_scene.project(K0)
+
+ for name, aoi in aoi2D_visu_scene.items():
+ aoi.draw(visu_frame, aoi_color[name])
+
# Video and data replay loop
try:
@@ -109,11 +146,11 @@ def main():
closest_gaze_ts, closest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts)
# Draw gaze position
- gaze_position = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height))
- cv.circle(video_frame.matrix, gaze_position, 4, (0, 255, 255), -1)
+ video_gaze_pixel = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height))
+ cv.circle(video_frame.matrix, video_gaze_pixel, 4, (0, 255, 255), -1)
# Store gaze position at this time in millisecond
- ts_gaze_positions[video_ts/1000] = gaze_position
+ ts_gaze_positions[round(video_ts/1000)] = video_gaze_pixel
# Wait for gaze position
except ValueError:
@@ -123,7 +160,7 @@ def main():
aruco_tracker.track(video_frame.matrix)
aruco_tracker.draw(video_frame.matrix)
- # Project 3D scene related to each aruco marker
+ # Project 3D scene on each video frame and the visualisation frame
if aruco_tracker.get_markers_number():
for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
@@ -137,19 +174,30 @@ def main():
aoi3D_scene.rotation = aruco_tracker.get_marker_rotation(i)
aoi3D_scene.translation = aruco_tracker.get_marker_translation(i)
-
- # Edit Zero distorsion matrix
- D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
# DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0)
+ aoi2D_video_scene = aoi3D_scene.project(aruco_camera.get_K())
- # Draw 2D scene
- aoi2D_scene.draw(video_frame.matrix, gaze_position)
+ # Draw 2D scene on video frame
+ aoi2D_video_scene.draw(video_frame.matrix, video_gaze_pixel)
# Store 2D scene at this time in millisecond
- ts_aois_scenes[video_ts/1000] = aoi2D_scene
+ ts_aois_scenes[round(video_ts/1000)] = aoi2D_video_scene
+
+ # Draw gaze path
+ for name, aoi in aoi2D_video_scene.items():
+
+ if not aoi.looked(video_gaze_pixel):
+ continue
+
+ ref_aoi = name #'Scene_Plan'
+
+ look_at = aoi2D_video_scene[ref_aoi].look_at(video_gaze_pixel)
+
+ visu_gaze_pixel = aoi2D_visu_scene[ref_aoi].looked_pixel(look_at)
+
+ cv.circle(visu_frame, visu_gaze_pixel, 4, aoi_color[ref_aoi], -1)
# Close window using 'Esc' key
if cv.waitKey(1) == 27:
@@ -158,6 +206,9 @@ def main():
# Display video
cv.imshow(f'Segment {tobii_segment.get_id()} video', video_frame.matrix)
+ # Display visual scan frame
+ cv.imshow(f'Segment {tobii_segment.get_id()} visual scan', visu_frame)
+
# Write video
output_video.write(video_frame.matrix)
@@ -174,18 +225,19 @@ def main():
# End output video file
output_video.close()
-
- print(f'\nAOIs video saved into {video_filepath}')
+ print(f'\nVisual scan video saved into {vs_video_filepath}')
# Build visual scan based on a pointer position
visual_scan = GazeFeatures.PointerBasedVisualScan(ts_aois_scenes, ts_gaze_positions)
-
print(f'{len(visual_scan.steps())} visual scan steps found')
- # Export visual scan
- visual_scan.export_as_csv(visual_scan_filepath)
+ # Export visual scan data
+ visual_scan.export_as_csv(vs_data_filepath)
+ print(f'Visual scan data saved into {vs_data_filepath}')
- print(f'Visual scan saved into {visual_scan_filepath}')
+ # Export visual scan image
+ cv.imwrite(vs_visu_filepath, visu_frame)
+ print(f'Visual scan image saved into {vs_visu_filepath}')
if __name__ == '__main__':