aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2022-11-23 09:35:14 +0100
committerThéo de la Hogue2022-11-23 09:35:14 +0100
commit74ff40539b242d43708151009d3018fff238b3e3 (patch)
treee7956defb93ae68688fe621e39a5212a600ca606
parent9fcf0dff3950db157bcf9c1e0f68469682846924 (diff)
downloadargaze-74ff40539b242d43708151009d3018fff238b3e3.zip
argaze-74ff40539b242d43708151009d3018fff238b3e3.tar.gz
argaze-74ff40539b242d43708151009d3018fff238b3e3.tar.bz2
argaze-74ff40539b242d43708151009d3018fff238b3e3.tar.xz
Renaming arcube into aruco_cube
-rw-r--r--src/argaze/utils/tobii_stream_aruco_cube_display.py360
1 files changed, 360 insertions, 0 deletions
diff --git a/src/argaze/utils/tobii_stream_aruco_cube_display.py b/src/argaze/utils/tobii_stream_aruco_cube_display.py
new file mode 100644
index 0000000..e310308
--- /dev/null
+++ b/src/argaze/utils/tobii_stream_aruco_cube_display.py
@@ -0,0 +1,360 @@
+#!/usr/bin/env python
+
+import argparse
+import os, json
+import math
+import threading
+
+from argaze import DataStructures
+from argaze import GazeFeatures
+from argaze.TobiiGlassesPro2 import *
+from argaze.ArUcoMarkers import *
+from argaze.AreaOfInterest import *
+from argaze.utils import MiscFeatures
+
+import cv2 as cv
+import numpy
+
+def make_rotation_matrix(x, y, z):
+
+ # Create rotation matrix around x axis
+ c = numpy.cos(numpy.deg2rad(x))
+ s = numpy.sin(numpy.deg2rad(x))
+ Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]])
+
+ # Create rotation matrix around y axis
+ c = numpy.cos(numpy.deg2rad(y))
+ s = numpy.sin(numpy.deg2rad(y))
+ Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
+
+ # Create rotation matrix around z axis
+ c = numpy.cos(numpy.deg2rad(z))
+ s = numpy.sin(numpy.deg2rad(z))
+ Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
+
+ # Return intrinsic rotation matrix
+ return Rx.dot(Ry.dot(Rz))
+
+def main():
+ """
+ Track ArUcoCube into Tobii Glasses Pro 2 camera video stream.
+ """
+
+ # Manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
+ parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default=None, help='tobii glasses ip')
+ parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
+ parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
+ parser.add_argument('-i', '--imu_calibration', metavar='IMU_CALIB', type=str, default=None, help='json imu calibration filepath')
+ parser.add_argument('-ac', '--aruco_cube', metavar='ARUCO_CUBE', type=str, help='json aruco cube description filepath')
+ parser.add_argument('-s', '--aoi_scene', metavar='AOI_SCENE', type=str, help='obj aoi 3D scene description filepath')
+ parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
+ args = parser.parse_args()
+
+ # Create tobii controller (with auto discovery network process if no ip argument is provided)
+ print('\nLooking for a Tobii Glasses Pro 2 device ...')
+
+ try:
+
+ tobii_controller = TobiiController.TobiiController(args.tobii_ip)
+ print(f'Tobii Glasses Pro 2 device found at {tobii_controller.address} address.')
+
+ except ConnectionError as e:
+
+ print(e)
+ exit()
+
+ # Setup camera at 25 fps to work on Full HD video stream
+ tobii_controller.set_scene_camera_freq_25()
+
+ # Print current confirugration
+ print(f'Tobii Glasses Pro 2 configuration:')
+ for key, value in tobii_controller.get_configuration().items():
+ print(f'\t{key}: {value}')
+
+ # Enable tobii data stream
+ tobii_data_stream = tobii_controller.enable_data_stream()
+
+ # Enable tobii video stream
+ tobii_video_stream = tobii_controller.enable_video_stream()
+
+ # Load aruco cube description
+ aruco_cube = ArUcoCube.ArUcoCube(args.aruco_cube)
+ aruco_cube.print_cache()
+
+ # Load AOI 3D scene centered onto aruco cube
+ aoi3D_scene = AOI3DScene.AOI3DScene()
+ aoi3D_scene.load(args.aoi_scene)
+
+ print(f'\nAOI in {os.path.basename(args.aoi_scene)} scene related to ArCube:')
+ for aoi in aoi3D_scene.keys():
+ print(f'\t{aoi}')
+
+ # Create aruco camera
+ aruco_camera = ArUcoCamera.ArUcoCamera()
+
+ # Load calibration file
+ if args.camera_calibration != None:
+
+ aruco_camera.load_calibration_file(args.camera_calibration)
+
+ else:
+
+ raise UserWarning('.json camera calibration filepath required. Use -c option.')
+
+ # Create aruco tracker
+ aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_cube.dictionary, aruco_cube.marker_size, aruco_camera)
+
+ # Load specific configuration file
+ if args.aruco_tracker_configuration != None:
+
+ aruco_tracker.load_configuration_file(args.aruco_tracker_configuration)
+
+ print(f'\nArUcoTracker configuration for markers detection:')
+ aruco_tracker.print_configuration()
+
+ # Create tobii imu handler to track head pose changes when arcuco cube pose can't be estimated
+ # So, the resulting head pose is relative to last pose estimation
+ tobii_imu = TobiiInertialMeasureUnit.TobiiInertialMeasureUnit()
+
+ # Load optional imu calibration file
+ if args.imu_calibration != None:
+
+ tobii_imu.load_calibration_file(args.imu_calibration)
+
+ # Init tobii imu lock
+ tobii_imu_lock = threading.Lock()
+
+ # TEST : DIFF ACC
+ last_accl = numpy.zeros(3)
+
+ # Init data timestamped in millisecond
+ data_ts_ms = 0
+
+ # Assess temporal performance
+ loop_chrono = MiscFeatures.TimeProbe()
+ loop_ps = 0
+
+ def data_stream_callback(data_ts, data_object, data_object_type):
+
+ nonlocal tobii_imu
+ nonlocal tobii_imu_lock
+ nonlocal data_ts_ms
+
+ #TEST
+ nonlocal last_accl
+
+ data_ts_ms = data_ts / 1e3
+
+ # Don't update imu when it is used
+ if tobii_imu_lock.locked():
+ return
+
+ # Lock tobii imu updates
+ tobii_imu_lock.acquire()
+
+ match data_object_type:
+
+ case 'Gyroscope':
+
+ data_object = tobii_imu.apply_gyroscope_offset(data_object)
+
+ tobii_imu.update_rotation(data_ts, data_object)
+
+
+ case 'Accelerometer':
+ pass
+ '''
+ print('raw accelerometer(m/s2)=', data_object.value)
+
+ # TEST :
+ diff_accl = last_accl - numpy.array(data_object.value)
+ last_accl = numpy.array(data_object.value)
+ print('\tdiff(cm/s2)=', 100 * numpy.linalg.norm(diff_accl))
+
+ # TEST : ignore acceleration double
+ if numpy.linalg.norm(diff_accl) > 0.:
+
+ data_object = tobii_imu.apply_accelerometer_coefficients(data_object)
+
+ print('corrected accelerometer(m/s2)=', data_object.value)
+
+ print('current plumb=', tobii_imu.get_plumb())
+
+ data_object = tobii_imu.apply_plumb(data_object)
+
+ print('corrected accelerometer - gravity(m/s2)=', data_object.value)
+ print('\tnorm(cm/s2)=', 100 * numpy.linalg.norm(data_object.value))
+
+ tobii_imu.update_translation(data_ts, data_object)
+ '''
+ # Unlock tobii imu updates
+ tobii_imu_lock.release()
+
+ tobii_data_stream.reading_callback = data_stream_callback
+
+ # Start streaming
+ tobii_controller.start_streaming()
+
+ # Live video stream capture loop
+ try:
+
+ # Assess loop performance
+ loop_chrono = MiscFeatures.TimeProbe()
+ fps = 0
+
+ # Track aruco cube pose
+ aruco_cube_tvec = numpy.zeros(3)
+ aruco_cube_rvec = numpy.zeros(3)
+ aruco_cube_success = False
+ aruco_cube_validity = False
+ aruco_cube_ts_ms = 0
+
+ while tobii_video_stream.is_alive():
+
+ # Read video stream
+ video_ts, video_frame = tobii_video_stream.read()
+ video_ts_ms = video_ts / 1e3
+
+ # Copy video frame to edit visualisation on it without disrupting aruco tracking
+ visu_frame = video_frame.copy()
+
+ # Process video and data frame
+ try:
+
+ # Track markers with pose estimation
+ aruco_tracker.track(video_frame.matrix)
+ aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+
+ # Estimate cube pose from tracked markers
+ tvec, rvec, success, validity = aruco_cube.estimate_pose(aruco_tracker.tracked_markers)
+
+ # Cube pose estimation succeed and is validated by 2 faces at least
+ if success and validity >= 1:
+
+ # Lock tobii imu updates
+ tobii_imu_lock.acquire()
+
+ # Reset head rotation, translation and translation speed (cm/s)
+ # Note : head translation speed is also estimated thanks to accelerometer sensor (see upward)
+ tobii_imu.reset_rotation()
+ #tobii_imu.reset_translation(translation_speed = (tvec - aruco_cube_tvec) / (video_ts_ms - aruco_cube_ts_ms))
+
+ # Create a rotation matrix to transform cube rotation from camera referential to imu referential
+ F = make_rotation_matrix(*TobiiInertialMeasureUnit.CAMERA_TO_IMU_ROTATION_VECTOR)
+ R, _ = cv.Rodrigues(rvec)
+ rvec_flipped, _ = cv.Rodrigues(F.dot(R))
+
+ # Update head plumb orientation with flipped cube orientation
+ tobii_imu.rotate_plumb(rvec_flipped)
+
+ # Unlock tobii imu updates
+ tobii_imu_lock.release()
+
+ # Store cube pose
+ aruco_cube_tvec = tvec
+ aruco_cube_rvec = rvec
+ aruco_cube_success = success
+ aruco_cube_validity = validity
+ aruco_cube_ts_ms = video_ts_ms
+
+ # Cube pose estimation fails
+ elif aruco_cube_success:
+
+ # Use tobii glasses inertial sensors to estimate cube pose from last estimated pose
+
+ # Translate cube into imu referential
+ imu_tvec = aruco_cube_tvec + numpy.array(TobiiInertialMeasureUnit.CAMERA_TO_IMU_TRANSLATION_VECTOR)
+
+ # Translate cube according head translation
+ imu_tvec = imu_tvec + tobii_imu.translation
+
+ # Rotate cube around imu origin according head rotation
+ imu_rvec = tobii_imu.rotation * numpy.array([-1., -1., 1.])
+ imu_R = make_rotation_matrix(*imu_rvec)
+ new_imu_tvec = imu_tvec.dot(imu_R)
+
+ # Translate back cube into camera referential
+ new_tvec = new_imu_tvec - numpy.array(TobiiInertialMeasureUnit.CAMERA_TO_IMU_TRANSLATION_VECTOR)
+
+ # Rotate cube orientation (supposing cube top is up in )
+ imu_rvec = tobii_imu.rotation * numpy.array([1., -1., 1.])
+ imu_R = make_rotation_matrix(*imu_rvec)
+
+ C, _ = cv.Rodrigues(aruco_cube_rvec)
+ C = C.dot(imu_R)
+ new_rvec, _ = cv.Rodrigues(C)
+ #new_rvec = aruco_cube_rvec
+
+ # Set cube pose estimation
+ aruco_cube.translation = new_tvec
+ aruco_cube.rotation = new_rvec
+
+ else:
+
+ raise UserWarning('Cube pose estimation fails.')
+
+ # Project AOI 3 scene onto camera frame
+
+ # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
+ # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
+ aoi2D_scene = aoi3D_scene.project(aruco_cube.translation, aruco_cube.rotation, aruco_camera.K)
+
+ # Draw projected scene
+ #aoi2D_scene.draw(visu_frame.matrix)
+
+ # Draw markers pose estimation
+ #aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+
+ # Draw cube pose estimation (without camera distorsion)
+ aruco_cube.draw(visu_frame.matrix, aruco_camera.K, aruco_camera.D, draw_places=True)
+
+ # Warn about cube pose validity
+ if not aruco_cube_validity:
+
+ raise UserWarning('Cube pose estimation is not validated.')
+
+ # Write warning
+ except UserWarning as w:
+
+ cv.rectangle(visu_frame.matrix, (0, 100), (600, 150), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(w), (20, 140), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+
+ # Assess loop performance
+ lap_time, lap_counter, elapsed_time = loop_chrono.lap()
+
+ # Update fps each 10 loops
+ if lap_counter >= 10:
+
+ loop_ps = 1e3 * lap_counter / elapsed_time
+ loop_chrono.restart()
+
+ # Draw center
+ cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1)
+ cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
+
+ # Write stream timing
+ cv.rectangle(visu_frame.matrix, (0, 0), (1100, 50), (63, 63, 63), -1)
+ cv.putText(visu_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.putText(visu_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.putText(visu_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+
+ cv.imshow(f'Stream ArUcoCube', visu_frame.matrix)
+
+ # Close window using 'Esc' key
+ if cv.waitKey(1) == 27:
+ break
+
+ # Exit on 'ctrl+C' interruption
+ except KeyboardInterrupt:
+ pass
+
+ # Stop frame display
+ cv.destroyAllWindows()
+
+ # Stop streaming
+ tobii_controller.stop_streaming()
+
+if __name__ == '__main__':
+
+ main() \ No newline at end of file