From 5348cf5e1a20706e9bb51a4a5b05fed82173d289 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Mon, 28 Nov 2022 17:36:07 +0100 Subject: Adding a new utils script to use aruco set to analyse tobii segment. --- src/argaze/utils/tobii_stream_aruco_set_export.py | 346 ++++++++++++++++++++++ 1 file changed, 346 insertions(+) create mode 100644 src/argaze/utils/tobii_stream_aruco_set_export.py diff --git a/src/argaze/utils/tobii_stream_aruco_set_export.py b/src/argaze/utils/tobii_stream_aruco_set_export.py new file mode 100644 index 0000000..d93658f --- /dev/null +++ b/src/argaze/utils/tobii_stream_aruco_set_export.py @@ -0,0 +1,346 @@ +#!/usr/bin/env python + +import argparse +import os, json +import math +import threading + +from argaze import DataStructures +from argaze import GazeFeatures +from argaze.TobiiGlassesPro2 import * +from argaze.ArUcoMarkers import * +from argaze.AreaOfInterest import * +from argaze.utils import MiscFeatures + +import cv2 as cv +import numpy + +def make_rotation_matrix(x, y, z): + + # Create rotation matrix around x axis + c = numpy.cos(numpy.deg2rad(x)) + s = numpy.sin(numpy.deg2rad(x)) + Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]]) + + # Create rotation matrix around y axis + c = numpy.cos(numpy.deg2rad(y)) + s = numpy.sin(numpy.deg2rad(y)) + Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]]) + + # Create rotation matrix around z axis + c = numpy.cos(numpy.deg2rad(z)) + s = numpy.sin(numpy.deg2rad(z)) + Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]]) + + # Return intrinsic rotation matrix + return Rx.dot(Ry.dot(Rz)) + +def main(): + """ + Track ArUcoPlan into Tobii Glasses Pro 2 camera video stream. + """ + + # Manage arguments + parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) + parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path') + parser.add_argument('-t', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)') + parser.add_argument('-cc', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath') + parser.add_argument('-tc', '--tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath') + parser.add_argument('-as', '--aruco_set', metavar='ARUCO_SET', type=str, help='json aruco set description filepath') + parser.add_argument('-ai', '--aoi_scene', metavar='AOI_SCENE', type=str, help='obj aoi 3D scene description filepath') + parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)') + parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction) + args = parser.parse_args() + + if args.segment_path != None: + + # Manage destination path + destination_path = '.' + if args.output != None: + + if not os.path.exists(os.path.dirname(args.output)): + + os.makedirs(os.path.dirname(args.output)) + print(f'{os.path.dirname(args.output)} folder created') + + destination_path = args.output + + else: + + destination_path = args.segment_path + + # Export into a dedicated time range folder + if args.time_range[1] != None: + timerange_path = f'[{int(args.time_range[0])}s - {int(args.time_range[1])}s]' + else: + timerange_path = f'[all]' + + destination_path = f'{destination_path}/{timerange_path}' + + if not os.path.exists(destination_path): + + os.makedirs(destination_path) + print(f'{destination_path} folder created') + + vs_data_filepath = f'{destination_path}/aoi.csv' + vs_video_filepath = f'{destination_path}/aoi.mp4' + + # Load a tobii segment + tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1e6), int(args.time_range[1] * 1e6) if args.time_range[1] != None else None) + + # Load a tobii segment video + tobii_segment_video = tobii_segment.load_video() + print(f'Video properties:\n\tduration: {tobii_segment_video.duration/1e6} s\n\twidth: {tobii_segment_video.width} px\n\theight: {tobii_segment_video.height} px') + + # Load a tobii segment data + tobii_segment_data = tobii_segment.load_data() + + print(f'Loaded data count:') + for name in tobii_segment_data.keys(): + print(f'\t{name}: {len(tobii_segment_data[name])} data') + + # Access to video timestamp data buffer + tobii_ts_vts = tobii_segment_data['VideoTimeStamp'] + + # Access to timestamped head rotations data buffer + tobii_ts_head_rotations = tobii_segment_data['Gyroscope'] + + # Prepare video exportation at the same format than segment video + output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.stream) + + # Create aruco camera + aruco_camera = ArUcoCamera.ArUcoCamera() + + # Load calibration file + if args.camera_calibration != None: + + aruco_camera.load_calibration_file(args.camera_calibration) + + else: + + raise UserWarning('.json camera calibration filepath required. Use -c option.') + + # Build aruco set from its description file + aruco_set = ArUcoSetFactory.ArUcoSetFactory.make(args.aruco_set) + + print(f'\n{type(aruco_set)} cache: {aruco_set}') + + # Create aruco tracker + aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_set.dictionary, aruco_set.marker_size, aruco_camera) + + # Load specific configuration file + if args.tracker_configuration != None: + + aruco_tracker.load_configuration_file(args.tracker_configuration) + + print(f'\nArUcoTracker configuration for markers detection:') + aruco_tracker.print_configuration() + + # Load AOI 3D scene centered onto aruco set + aoi3D_scene = AOI3DScene.AOI3DScene() + aoi3D_scene.load(args.aoi_scene) + + print(f'\nAOI in {os.path.basename(args.aoi_scene)} scene:') + for aoi in aoi3D_scene.keys(): + print(f'\t{aoi}') + + # Create timestamped buffer to store AOIs and primary time stamp offset + ts_offset_aois = DataStructures.TimeStampedBuffer() + + # Video and data replay loop + try: + + # Initialise progress bar + #MiscFeatures.printProgressBar(0, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100) + + head_moving = False + head_movement_last = 0. + + # Iterate on video frames + for video_ts, video_frame in tobii_segment_video.frames(): + + video_ts_ms = video_ts / 1e3 + + # Copy video frame to edit visualisation on it without disrupting aruco tracking + visu_frame = video_frame.copy() + + # Process video and data frame + try: + + # Get nearest video timestamp + _, nearest_vts = tobii_ts_vts.get_last_before(video_ts) + + # Edit dictionary to store 2D aoi with primary timestamp offset and warning + all_aoi2D = { + 'offset': nearest_vts.offset, + 'warning': None + } + + # Get nearest head rotation before video timestamp and remove all head rotations before + _, nearest_head_rotation = tobii_ts_head_rotations.pop_first_until(video_ts) + + # Calculate head movement considering only head yaw and pitch + head_movement = numpy.array(nearest_head_rotation.value) + head_movement_px = head_movement.astype(int) + head_movement_norm = numpy.linalg.norm(head_movement[0:2]) + + # Draw movement vector + cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2)), (int(visu_frame.width/2) + head_movement_px[1], int(visu_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3) + + # Head movement detection hysteresis + # TODO : pass the threshold value as argument + if not head_moving and head_movement_norm > 50: + head_moving = True + + if head_moving and head_movement_norm < 10: + head_moving = False + + # When head is moving, ArUco tracking could return bad pose estimation and so bad AOI scene projection + if head_moving: + + all_aoi2D['warning'] = 'Head is moving' + + ts_offset_aois[video_ts] = all_aoi2D + + raise UserWarning(all_aoi2D['warning']) + + # Hide frame left and right borders before tracking to ignore markers outside focus area + cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1) + cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - 1/6)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1) + + # Track markers with pose estimation and draw them + aruco_tracker.track(video_frame.matrix) + aruco_tracker.draw_tracked_markers(visu_frame.matrix) + + # When no marker is detected, no AOI scene projection can't be done + if len(aruco_tracker.tracked_markers) == 0: + + all_aoi2D['warning'] = 'No marker detected' + + ts_offset_aois[video_ts] = all_aoi2D + + raise UserWarning(all_aoi2D['warning']) + + # Estimate set pose from tracked markers + tvec, rvec, success, validity, unvalid = aruco_set.estimate_pose(aruco_tracker.tracked_markers) + + # Print unvalid distances or angles + for key, value in unvalid.items(): + print(f'{video_ts}: Unvalid {key}: {value}.') + + # When pose estimation fails, ignore AOI scene projection + if not success: + + # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it + # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. + aoi2D_video_scene = aoi3D_scene.project(tvec, rvec, aruco_camera.K) + + # Draw black AOI scene + aoi2D_video_scene.draw(visu_frame.matrix, (0, 0), color=(0, 0, 0)) + + all_aoi2D['warning'] = 'Pose estimation fails' + + ts_offset_aois[video_ts] = all_aoi2D + + raise UserWarning(all_aoi2D['warning']) + + # Consider pose estimation if it is validated by 1 face at least + elif validity >= 1: + + # Transform scene into camera referential + aoi3D_camera = aoi3D_scene.transform(tvec, rvec) + + # Get aoi inside vision cone field + cone_vision_height_cm = 200 # cm + cone_vision_radius_cm = numpy.tan(numpy.deg2rad(TobiiSpecifications.VISUAL_HFOV / 2)) * cone_vision_height_cm + + aoi3D_inside, aoi3D_outside = aoi3D_camera.vision_cone(cone_vision_radius_cm, cone_vision_height_cm) + + # Keep only aoi inside vision cone field + aoi3D_scene = aoi3D_scene.copy(exclude=aoi3D_outside.keys()) + + # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it + # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. + aoi2D_video_scene = aoi3D_scene.project(tvec, rvec, aruco_camera.K) + + # Draw AOI scene + aoi2D_video_scene.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255)) + + # Store all 2D aoi + for aoi_name in aoi2D_video_scene.keys(): + + all_aoi2D[aoi_name] = numpy.rint(aoi2D_video_scene[aoi_name]).astype(int) + + ts_offset_aois[video_ts] = all_aoi2D + + # Warn user when the merged scene is empty + if len(aoi2D_video_scene.keys()) == 0: + + all_aoi2D['warning'] = 'AOI projection is empty' + + raise UserWarning(all_aoi2D['warning']) + + # Write warning + except UserWarning as w: + + cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1) + cv.putText(visu_frame.matrix, str(w), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA) + + # Raised when timestamped buffer is empty + except KeyError as e: + pass + + # Draw focus area + cv.rectangle(visu_frame.matrix, (int(video_frame.width/6), 0), (int(visu_frame.width*(1-1/6)), int(visu_frame.height)), (255, 150, 150), 1) + + # Draw center + cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1) + cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1) + + # Write segment timing + cv.rectangle(visu_frame.matrix, (0, 0), (550, 50), (63, 63, 63), -1) + cv.putText(visu_frame.matrix, f'Segment time: {int(video_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA) + + if args.window: + + # Close window using 'Esc' key + if cv.waitKey(1) == 27: + break + + # Display visualisation + cv.imshow(f'Segment {tobii_segment.id} ArUco AOI', visu_frame.matrix) + + # Write video + output_video.write(visu_frame.matrix) + + # Update Progress Bar + progress = video_ts_ms - int(args.time_range[0] * 1e3) + #MiscFeatures.printProgressBar(progress, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100) + + # Exit on 'ctrl+C' interruption + except KeyboardInterrupt: + pass + + # Stop frame display + cv.destroyAllWindows() + + # End output video file + output_video.close() + + # Print aruco tracking metrics + print('\nAruco marker tracking metrics') + try_count, tracked_counts = aruco_tracker.track_metrics + + for marker_id, tracked_count in tracked_counts.items(): + print(f'Markers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)') + + # Export aruco aoi data + ts_offset_aois.as_dataframe().to_csv(vs_data_filepath, index=True) + print(f'Aruco AOI data saved into {vs_data_filepath}') + + # Notify when the aruco aoi video has been exported + print(f'Aruco AOI video saved into {vs_video_filepath}') + +if __name__ == '__main__': + + main() \ No newline at end of file -- cgit v1.1