diff options
Diffstat (limited to 'src/argaze/utils/tobii_stream_arcube_display.py')
-rw-r--r-- | src/argaze/utils/tobii_stream_arcube_display.py | 390 |
1 files changed, 390 insertions, 0 deletions
diff --git a/src/argaze/utils/tobii_stream_arcube_display.py b/src/argaze/utils/tobii_stream_arcube_display.py new file mode 100644 index 0000000..d1dc734 --- /dev/null +++ b/src/argaze/utils/tobii_stream_arcube_display.py @@ -0,0 +1,390 @@ +#!/usr/bin/env python + +import argparse +import os, json +import math + +from argaze import DataStructures +from argaze import GazeFeatures +from argaze.TobiiGlassesPro2 import * +from argaze.ArUcoMarkers import * +from argaze.AreaOfInterest import * +from argaze.utils import MiscFeatures + +import cv2 as cv +import numpy + +def make_rotation_matrix(x, y, z): + + # Create rotation matrix around x axis + c = numpy.cos(numpy.deg2rad(x)) + s = numpy.sin(numpy.deg2rad(x)) + Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]]) + + # Create rotation matrix around y axis + c = numpy.cos(numpy.deg2rad(y)) + s = numpy.sin(numpy.deg2rad(y)) + Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]]) + + # Create rotation matrix around z axis + c = numpy.cos(numpy.deg2rad(z)) + s = numpy.sin(numpy.deg2rad(z)) + Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]]) + + # Return intrinsic rotation matrix + return Rx.dot(Ry.dot(Rz)) + +def main(): + """ + Track ArUcoCube into Tobii Glasses Pro 2 camera video stream. + """ + + # Manage arguments + parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) + parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default=None, help='tobii glasses ip') + parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath') + parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath') + parser.add_argument('-ac', '--aruco_cube', metavar='ARUCO_CUBE', type=str, help='json aruco cube description filepath') + parser.add_argument('-s', '--aoi_scene', metavar='AOI_SCENE', type=str, help='obj aoi 3D scene description filepath') + parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction) + args = parser.parse_args() + + # Create tobii controller (with auto discovery network process if no ip argument is provided) + print('\nLooking for a Tobii Glasses Pro 2 device ...') + + try: + + tobii_controller = TobiiController.TobiiController(args.tobii_ip) + print(f'Tobii Glasses Pro 2 device found at {tobii_controller.address} address.') + + except ConnectionError as e: + + print(e) + exit() + + # Enable tobii data stream + tobii_data_stream = tobii_controller.enable_data_stream() + + # Enable tobii video stream + tobii_video_stream = tobii_controller.enable_video_stream() + + # Load aruco cube description + aruco_cube = ArUcoCube.ArUcoCube(args.aruco_cube) + aruco_cube.print_cache() + + # Load AOI 3D scene centered onto aruco cube + aoi3D_scene = AOI3DScene.AOI3DScene() + aoi3D_scene.load(args.aoi_scene) + + print(f'\nAOI in {os.path.basename(args.aoi_scene)} scene related to ArCube:') + for aoi in aoi3D_scene.keys(): + print(f'\t{aoi}') + + # Create aruco camera + aruco_camera = ArUcoCamera.ArUcoCamera() + + # Load calibration file + if args.camera_calibration != None: + + aruco_camera.load_calibration_file(args.camera_calibration) + + else: + + raise UserWarning('.json camera calibration filepath required. Use -c option.') + + # Create aruco tracker + aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_cube.dictionary, aruco_cube.marker_size, aruco_camera) + + # Load specific configuration file + if args.aruco_tracker_configuration != None: + + aruco_tracker.load_configuration_file(args.aruco_tracker_configuration) + + print(f'\nArUcoTracker configuration for markers detection:') + aruco_tracker.print_configuration() + + # Init gyroscope processing to track head rotation changes when arcuco cube pose can't be estimated + # So, the resulting head rotation is relative to last pose estimation (it's not an absolute orientation) + last_gyroscope = numpy.zeros(3) + last_gyroscope_ts_ms = 0 + gyroscope_offset = numpy.zeros(3) + gyroscope_offset_smooth = 0.5 + head_rotation = numpy.zeros(3) + + + # Init accelerotmeter processing to track head translation changes when arcuco cube pose can't be estimated + # So, the resulting head translation is relative to last pose estimation (it's not an absolute position) + last_accelerometer = numpy.zeros(3) + last_accelerometer_ts_ms = 0 + accelerometer_offset = numpy.zeros(3) + accelerometer_offset_smooth = 0.2 + earth_gravity = numpy.array([0, -9.81, 0]) + head_plumb = numpy.zeros(3) + head_translation_speed = numpy.zeros(3) + last_head_translation_speed = numpy.zeros(3) + head_translation = numpy.zeros(3) + + # Init data timestamped in millisecond + data_ts_ms = 0 + + # Assess temporal performance + loop_chrono = MiscFeatures.TimeProbe() + loop_ps = 0 + + def data_stream_callback(data_ts, data_object, data_object_type): + + nonlocal data_ts_ms + + data_ts_ms = data_ts / 1e3 + + match data_object_type: + + case 'Gyroscope': + + nonlocal last_gyroscope + nonlocal last_gyroscope_ts_ms + nonlocal gyroscope_offset + nonlocal head_rotation + + # Convert deg/s into deg/ms + current_gyroscope = numpy.array(data_object.value) * 1e-3 + + # Init gyroscope derivation and integration + if last_gyroscope_ts_ms == 0: + + last_gyroscope = current_gyroscope + last_gyroscope_ts_ms = data_ts_ms + + # Calculate elapsed time + delta_time = data_ts_ms - last_gyroscope_ts_ms + + # Derivate gyroscope + gyroscope_derivation = (current_gyroscope - last_gyroscope) / delta_time if delta_time > 0 else numpy.zeros(3) + + # Update gyroscope offset smoothly and reset head rotation when gyroscope is stable + if numpy.linalg.norm(gyroscope_derivation) < 1e-5: + + gyroscope_offset = gyroscope_offset * gyroscope_offset_smooth + current_gyroscope * (1 - gyroscope_offset_smooth) + head_rotation = numpy.zeros(3) + + # Integrate gyroscope with offset compensation + head_rotation += (last_gyroscope - gyroscope_offset) * delta_time + + # Store current as last + last_gyroscope = current_gyroscope + last_gyroscope_ts_ms = data_ts_ms + + case 'Accelerometer': + + nonlocal last_accelerometer + nonlocal last_accelerometer_ts_ms + nonlocal accelerometer_offset + nonlocal earth_gravity + nonlocal head_plumb + nonlocal head_translation_speed + nonlocal last_head_translation_speed + nonlocal head_translation + + # Check head plumb as we need to know it to remove earth gravity + + #print('head_plumb=', head_plumb) + #print('numpy.linalg.norm(head_plumb)=', numpy.linalg.norm(head_plumb)) + #print('numpy.linalg.norm(earth_gravity)=', numpy.linalg.norm(earth_gravity)) + + if math.isclose(numpy.linalg.norm(head_plumb), numpy.linalg.norm(earth_gravity), abs_tol=1e-3): + + #print('raw accelerometer=',numpy.array(data_object.value)) + #print('head_plumb=', head_plumb) + + # Remove gravity along head plumb to accelerometer + current_accelerometer = numpy.array(data_object.value) - head_plumb + + # Convert m/s2 into cm/ms2 + current_accelerometer = numpy.array(data_object.value) * 1e-4 + + # Init accelerometer integration + if last_accelerometer_ts_ms == 0: + + last_accelerometer = current_accelerometer + last_accelerometer_ts_ms = data_ts_ms + + # Calculate elapsed time + delta_time = data_ts_ms - last_accelerometer_ts_ms + + # Update accelerometer offset smoothly and reset head translation speed when head translation speed is close to zero + # Note : head translation speed is simultaneously estimated thanks to cube pose (see below) + print('numpy.linalg.norm(head_translation_speed)=',numpy.linalg.norm(head_translation_speed)) + + if numpy.linalg.norm(head_translation_speed) < 1e-3: + + accelerometer_offset = accelerometer_offset * accelerometer_offset_smooth + current_accelerometer * (1 - accelerometer_offset_smooth) + print('> accelerometer_offset=',accelerometer_offset) + head_translation_speed = numpy.zeros(3) + + # Integrate accelerometer with offset compensation + head_translation_speed += (last_accelerometer - accelerometer_offset) * delta_time + head_translation += last_head_translation_speed * delta_time + + print('current_accelerometer(cm/ms2)=',current_accelerometer) + print('head_translation_speed(cm/s)=',head_translation_speed) + print('head_translation(cm)=',head_translation) + + # Store current as last + last_accelerometer = current_accelerometer + last_accelerometer_ts_ms = data_ts_ms + last_head_translation_speed = head_translation_speed + + #else: + + # print('no valid head plumb') + + tobii_data_stream.reading_callback = data_stream_callback + + # Start streaming + tobii_controller.start_streaming() + + # Live video stream capture loop + try: + + # Assess loop performance + loop_chrono = MiscFeatures.TimeProbe() + fps = 0 + + # Track aruco cube pose + aruco_cube_tvec = numpy.zeros(3) + aruco_cube_rvec = numpy.zeros(3) + aruco_cube_success = False + aruco_cube_validity = False + aruco_cube_ts_ms = 0 + + while tobii_video_stream.is_alive(): + + # Read video stream + video_ts, video_frame = tobii_video_stream.read() + video_ts_ms = video_ts / 1e3 + + # Copy video frame to edit visualisation on it without disrupting aruco tracking + visu_frame = video_frame.copy() + + # Process video and data frame + try: + + # Track markers with pose estimation + aruco_tracker.track(video_frame.matrix) + + # Estimate cube pose from tracked markers + tvec, rvec, success, validity = aruco_cube.estimate_pose(aruco_tracker.get_tracked_markers()) + + # Cube pose estimation succeed and is validated by 2 faces at least + if success and validity >= 2: + + # Reset head rotation, translation and translation speed (cm/s) + # Note : head translation speed is simultaneously estimated thanks to accelerometer sensor (see upward) + head_rotation = numpy.zeros(3) + head_translation = numpy.zeros(3) + head_translation_speed = (tvec - aruco_cube_tvec) / (video_ts_ms - aruco_cube_ts_ms) + + # Rotate head plumb orientation + C, _ = cv.Rodrigues(rvec) + head_plumb = -C.dot(earth_gravity) + + # Store cube pose + aruco_cube_tvec = tvec + aruco_cube_rvec = rvec + aruco_cube_success = success + aruco_cube_validity = validity + aruco_cube_ts_ms = video_ts_ms + + # Cube pose estimation fails + elif aruco_cube_success: + + # Use tobii glasses inertial sensors to estimate cube pose from last estimated pose + + # Translate cube according head translation + new_tvec = aruco_cube_tvec + head_translation + + #if numpy.linalg.norm(head_rotation) > 0: + # print(f'X={head_rotation[0]:3f}, Y={head_rotation[1]:3f}, Z={head_rotation[2]:3f}') + + # Rotate cube around origin according head rotation + R = make_rotation_matrix(*head_rotation) + + # rotate tvec ??? + #new_tvec = aruco_cube_tvec.dot(R.T) + + # rotate rvec + C, _ = cv.Rodrigues(aruco_cube_rvec) + C = C.dot(R) + new_rvec, _ = cv.Rodrigues(C) + + # Set cube pose estimation + aruco_cube.set_pose(tvec = new_tvec, rvec = new_rvec) + + else: + + raise UserWarning('Cube pose estimation fails.') + + # Project AOI 3 scene onto camera frame + + # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it + # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. + aoi2D_scene = aoi3D_scene.project(aruco_cube_tvec, aruco_cube_rvec, aruco_camera.get_K()) + + # Draw projected scene + aoi2D_scene.draw(visu_frame.matrix) + + # Draw markers pose estimation + aruco_tracker.draw_tracked_markers(visu_frame.matrix) + + # Draw cube pose estimation (without camera distorsion) + aruco_cube.draw(visu_frame.matrix, aruco_camera.get_K(), (0, 0, 0, 0)) + + # Warn about cube pose validity + if not aruco_cube_validity: + + raise UserWarning('Cube pose estimation is not validated.') + + # Write warning + except UserWarning as w: + + cv.rectangle(visu_frame.matrix, (0, 100), (600, 150), (127, 127, 127), -1) + cv.putText(visu_frame.matrix, str(w), (20, 140), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA) + + # Assess loop performance + lap_time, lap_counter, elapsed_time = loop_chrono.lap() + + # Update fps each 10 loops + if lap_counter >= 10: + + loop_ps = 1e3 * lap_counter / elapsed_time + loop_chrono.restart() + + # Draw center + cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1) + cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1) + + # Write stream timing + cv.rectangle(visu_frame.matrix, (0, 0), (1100, 50), (63, 63, 63), -1) + cv.putText(visu_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA) + cv.putText(visu_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA) + cv.putText(visu_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA) + + cv.imshow(f'Stream ArUcoCube', visu_frame.matrix) + + # Close window using 'Esc' key + if cv.waitKey(1) == 27: + break + + # Exit on 'ctrl+C' interruption + except KeyboardInterrupt: + pass + + # Stop frame display + cv.destroyAllWindows() + + # Stop streaming + tobii_controller.stop_streaming() + +if __name__ == '__main__': + + main()
\ No newline at end of file |