aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/tobii_segment_arscene_export.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/argaze/utils/tobii_segment_arscene_export.py')
-rw-r--r--src/argaze/utils/tobii_segment_arscene_export.py245
1 files changed, 245 insertions, 0 deletions
diff --git a/src/argaze/utils/tobii_segment_arscene_export.py b/src/argaze/utils/tobii_segment_arscene_export.py
new file mode 100644
index 0000000..7bbef59
--- /dev/null
+++ b/src/argaze/utils/tobii_segment_arscene_export.py
@@ -0,0 +1,245 @@
+#!/usr/bin/env python
+
+import argparse
+import os, json
+import math
+import threading
+
+from argaze import *
+from argaze.TobiiGlassesPro2 import *
+from argaze.ArUcoMarkers import *
+from argaze.AreaOfInterest import *
+from argaze.utils import MiscFeatures
+
+import cv2 as cv
+import numpy
+
+def make_rotation_matrix(x, y, z):
+
+ # Create rotation matrix around x axis
+ c = numpy.cos(numpy.deg2rad(x))
+ s = numpy.sin(numpy.deg2rad(x))
+ Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]])
+
+ # Create rotation matrix around y axis
+ c = numpy.cos(numpy.deg2rad(y))
+ s = numpy.sin(numpy.deg2rad(y))
+ Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
+
+ # Create rotation matrix around z axis
+ c = numpy.cos(numpy.deg2rad(z))
+ s = numpy.sin(numpy.deg2rad(z))
+ Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
+
+ # Return intrinsic rotation matrix
+ return Rx.dot(Ry.dot(Rz))
+
+def main():
+ """
+ Track ArUcoPlan into Tobii Glasses Pro 2 camera video stream.
+ """
+
+ # Manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
+ parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path')
+ parser.add_argument('-t', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)')
+ parser.add_argument('-p', '--project_path', metavar='ARGAZE_PROJECT', type=str, default=None, help='json argaze project filepath')
+ parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)')
+ parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
+ args = parser.parse_args()
+
+ if args.segment_path != None:
+
+ # Manage destination path
+ destination_path = '.'
+ if args.output != None:
+
+ if not os.path.exists(os.path.dirname(args.output)):
+
+ os.makedirs(os.path.dirname(args.output))
+ print(f'{os.path.dirname(args.output)} folder created')
+
+ destination_path = args.output
+
+ else:
+
+ destination_path = args.segment_path
+
+ # Export into a dedicated time range folder
+ if args.time_range[1] != None:
+ timerange_path = f'[{int(args.time_range[0])}s - {int(args.time_range[1])}s]'
+ else:
+ timerange_path = f'[all]'
+
+ destination_path = f'{destination_path}/{timerange_path}'
+
+ if not os.path.exists(destination_path):
+
+ os.makedirs(destination_path)
+ print(f'{destination_path} folder created')
+
+ vs_data_filepath = f'{destination_path}/aoi.csv'
+ vs_video_filepath = f'{destination_path}/aoi.mp4'
+
+ # Load a tobii segment
+ tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1e6), int(args.time_range[1] * 1e6) if args.time_range[1] != None else None)
+
+ # Load a tobii segment video
+ tobii_segment_video = tobii_segment.load_video()
+ print(f'\nVideo properties:\n\tduration: {tobii_segment_video.duration/1e6} s\n\twidth: {tobii_segment_video.width} px\n\theight: {tobii_segment_video.height} px')
+
+ # Load a tobii segment data
+ tobii_segment_data = tobii_segment.load_data()
+
+ print(f'\nLoaded data count:')
+ for name in tobii_segment_data.keys():
+ print(f'\t{name}: {len(tobii_segment_data[name])} data')
+
+ # Access to video timestamp data buffer
+ tobii_ts_vts = tobii_segment_data['VideoTimeStamp']
+
+ # Access to timestamped gaze position data buffer
+ tobii_ts_gaze_positions = tobii_segment_data['GazePosition']
+
+ # Prepare video exportation at the same format than segment video
+ output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.stream)
+
+ # Load ar scene
+ ar_scene = ArScene.ArScene.from_json(args.project_path)
+
+ print(ar_scene)
+
+ # Create timestamped buffer to store AOIs and primary time stamp offset
+ ts_offset_aois = DataStructures.TimeStampedBuffer()
+
+ # Video and data replay loop
+ try:
+
+ # Initialise progress bar
+ #MiscFeatures.printProgressBar(0, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
+
+ # Iterate on video frames
+ for video_ts, video_frame in tobii_segment_video.frames():
+
+ video_ts_ms = video_ts / 1e3
+
+ # Copy video frame to edit visualisation on it without disrupting aruco tracking
+ visu_frame = video_frame.copy()
+
+ # Prepare to store projected AOI
+ projected_aois = {}
+
+ # Process video and data frame
+ try:
+
+ # Get nearest video timestamp
+ _, nearest_vts = tobii_ts_vts.get_last_before(video_ts)
+
+ projected_aois['offset'] = nearest_vts.offset
+
+ # Hide frame left and right borders before tracking to ignore markers outside focus area
+ cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
+ cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - 1/6)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
+
+ # Project scene into frame
+ scene_projection, unconsistencies = ar_scene.project(video_frame.matrix, consistent_markers_number=1, visual_hfov=TobiiSpecifications.VISUAL_HFOV)
+
+ # DEBUG: print unconsistencies distances or angles
+ for key, value in unconsistencies.items():
+ print(f'{video_ts}: Unconsistent {key}: {value}')
+
+ # Store all projected aoi
+ for aoi_name in scene_projection.keys():
+
+ projected_aois[aoi_name] = numpy.rint(scene_projection[aoi_name]).astype(int)
+
+ # Draw tracked markers
+ ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+
+ # Draw scene projection
+ scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
+
+ # Catch warnings raised by project_scene method
+ except UserWarning as w:
+
+ projected_aois['comment'] = w
+
+ # Draw tracked markers
+ ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+
+ if w == 'Pose estimation fails':
+
+ # Draw black AOI scene
+ scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 0, 0))
+
+ cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(w), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+
+ # Raised when timestamped buffer is empty
+ except KeyError as e:
+
+ e = 'VideoTimeStamp missing'
+
+ projected_aois['offset'] = 0
+ projected_aois['comment'] = e
+
+ cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(e), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 1, cv.LINE_AA)
+
+ # Store projected AOI
+ ts_offset_aois[video_ts] = projected_aois
+
+ # Draw focus area
+ cv.rectangle(visu_frame.matrix, (int(video_frame.width/6), 0), (int(visu_frame.width*(1-1/6)), int(visu_frame.height)), (255, 150, 150), 1)
+
+ # Draw center
+ cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1)
+ cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
+
+ # Write segment timing
+ cv.rectangle(visu_frame.matrix, (0, 0), (550, 50), (63, 63, 63), -1)
+ cv.putText(visu_frame.matrix, f'Segment time: {int(video_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+
+ if args.window:
+
+ # Close window using 'Esc' key
+ if cv.waitKey(1) == 27:
+ break
+
+ # Display visualisation
+ cv.imshow(f'Segment {tobii_segment.id} ArUco AOI', visu_frame.matrix)
+
+ # Write video
+ output_video.write(visu_frame.matrix)
+
+ # Update Progress Bar
+ progress = video_ts_ms - int(args.time_range[0] * 1e3)
+ #MiscFeatures.printProgressBar(progress, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
+
+ # Exit on 'ctrl+C' interruption
+ except KeyboardInterrupt:
+ pass
+
+ # Stop frame display
+ cv.destroyAllWindows()
+
+ # End output video file
+ output_video.close()
+
+ # Print aruco tracking metrics
+ print('\nAruco marker tracking metrics')
+ try_count, tracked_counts = ar_scene.aruco_tracker.track_metrics
+
+ for marker_id, tracked_count in tracked_counts.items():
+ print(f'Markers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)')
+
+ # Export aruco aoi data
+ ts_offset_aois.as_dataframe().to_csv(vs_data_filepath, index=True)
+ print(f'Aruco AOI data saved into {vs_data_filepath}')
+
+ # Notify when the aruco aoi video has been exported
+ print(f'Aruco AOI video saved into {vs_video_filepath}')
+
+if __name__ == '__main__':
+
+ main() \ No newline at end of file