aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2022-10-24 15:22:10 +0200
committerThéo de la Hogue2022-10-24 15:22:10 +0200
commit520c6b064e646c11ca92bf836952c321757e4743 (patch)
treea0a3d92c71c8aae8bc3d08685b3d96bfda10cac1
parent6def9a746ef0027d83a0986d8e36c08ea546139e (diff)
downloadargaze-520c6b064e646c11ca92bf836952c321757e4743.zip
argaze-520c6b064e646c11ca92bf836952c321757e4743.tar.gz
argaze-520c6b064e646c11ca92bf836952c321757e4743.tar.bz2
argaze-520c6b064e646c11ca92bf836952c321757e4743.tar.xz
Working around inertial sensor pose tracking.
-rw-r--r--src/argaze/utils/tobii_stream_arcube_display.py174
1 files changed, 125 insertions, 49 deletions
diff --git a/src/argaze/utils/tobii_stream_arcube_display.py b/src/argaze/utils/tobii_stream_arcube_display.py
index c77d6e9..09d35f5 100644
--- a/src/argaze/utils/tobii_stream_arcube_display.py
+++ b/src/argaze/utils/tobii_stream_arcube_display.py
@@ -12,7 +12,26 @@ from argaze.utils import MiscFeatures
import cv2 as cv
import numpy
-#import imufusion
+
+def make_rotation_matrix(x, y, z):
+
+ # Create rotation matrix around x axis
+ c = numpy.cos(numpy.deg2rad(x))
+ s = numpy.sin(numpy.deg2rad(x))
+ Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]])
+
+ # Create rotation matrix around y axis
+ c = numpy.cos(numpy.deg2rad(y))
+ s = numpy.sin(numpy.deg2rad(y))
+ Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
+
+ # Create rotation matrix around z axis
+ c = numpy.cos(numpy.deg2rad(z))
+ s = numpy.sin(numpy.deg2rad(z))
+ Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
+
+ # Return intrinsic rotation matrix
+ return Rx.dot(Ry.dot(Rz))
def main():
"""
@@ -24,14 +43,10 @@ def main():
parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default=None, help='tobii glasses ip')
parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
- parser.add_argument('-ac', '--aruco_cube', metavar='ARUCO_CUBE', type=str, help='json aruco_cube description filepath')
+ parser.add_argument('-ac', '--aruco_cube', metavar='ARUCO_CUBE', type=str, help='json aruco cube description filepath')
parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
args = parser.parse_args()
- # Load ArUcoCube
- aruco_cube = ArUcoCube.ArUcoCube(args.aruco_cube)
- aruco_cube.print_cache()
-
# Create tobii controller (with auto discovery network process if no ip argument is provided)
print('\nLooking for a Tobii Glasses Pro 2 device ...')
@@ -51,6 +66,10 @@ def main():
# Enable tobii video stream
tobii_video_stream = tobii_controller.enable_video_stream()
+ # Load aruco cube description
+ aruco_cube = ArUcoCube.ArUcoCube(args.aruco_cube)
+ aruco_cube.print_cache()
+
# Create aruco camera
aruco_camera = ArUcoCamera.ArUcoCamera()
@@ -73,12 +92,15 @@ def main():
print(f'\nArUcoTracker configuration for markers detection:')
aruco_tracker.print_configuration()
-
- # Init head pose estimation
- accelerometer = numpy.array([])
- gyroscope = numpy.array([])
- head_translation = numpy.zeros(3)
- head_rotation = numpy.zeros(3)
+
+ # Init head movment estimation
+ last_accelerometer = numpy.array([])
+ last_accelerometer_ts_ms = 0
+ last_gyroscope = numpy.array([])
+ last_gyroscope_ts_ms = 0
+ gyroscope_drift = numpy.zeros(3)
+ head_translation_speed = numpy.zeros(3)
+ head_rotation_speed = numpy.zeros(3)
gravity = -9.81
# Init data timestamped in millisecond
@@ -90,10 +112,13 @@ def main():
def data_stream_callback(data_ts, data_object, data_object_type):
- nonlocal gyroscope
- nonlocal accelerometer
- nonlocal head_translation
- nonlocal head_rotation
+ nonlocal last_accelerometer
+ nonlocal last_accelerometer_ts_ms
+ nonlocal last_gyroscope
+ nonlocal last_gyroscope_ts_ms
+ nonlocal gyroscope_drift
+ nonlocal head_translation_speed
+ nonlocal head_rotation_speed
nonlocal data_ts_ms
data_ts_ms = data_ts / 1e3
@@ -101,27 +126,48 @@ def main():
match data_object_type:
case 'Accelerometer':
-
- accelerometer = numpy.array(data_object.value) - gravity
- return
+
+ # Convert m/s2 into cm/ms2
+ current_accelerometer = numpy.array(data_object.value) * 1e-4
+
+ # Init accelerometer time integration
+ if last_accelerometer_ts_ms == 0:
+
+ last_accelerometer = current_accelerometer
+ last_accelerometer_ts_ms = data_ts_ms
+
+ # Integrate accelerometer values in time to get head translation speed (cm/ms)
+ else:
+
+ head_translation_speed += (last_accelerometer + current_accelerometer) * (data_ts_ms - last_accelerometer_ts_ms) / 2
+
+ # print(head_translation_speed)
case 'Gyroscope':
- gyroscope = numpy.array(data_object.value)
- return
-
- # Update head pose estimation when all inertial sensors are refreshed
- if accelerometer.size == 3 and gyroscope.size == 3:
- '''
- print('accelerometer:', accelerometer)
- print('gyroscope:', gyroscope)
-
- print('head translation:', head_translation)
- print('head rotation:', head_rotation)
- '''
- # Forget old inertial sensors data
- accelerometer = numpy.array([])
- gyroscope = numpy.array([])
+ # convert deg/s into deg/ms
+ current_gyroscope = numpy.array(data_object.value) * 1e-3
+
+ # Init gyroscope derivation
+ if last_gyroscope_ts_ms == 0:
+
+ last_gyroscope = current_gyroscope
+ last_gyroscope_ts_ms = data_ts_ms
+ gyroscope_derivation = 0
+
+ # Derivate gyroscope
+ else:
+ gyroscope_derivation = (current_gyroscope - last_gyroscope) / (data_ts_ms - last_gyroscope_ts_ms)
+
+ # Update gyroscope drift when is not moving
+ if numpy.linalg.norm(gyroscope_derivation) < 1e-7:
+
+ gyroscope_drift = current_gyroscope
+
+ # Drift compensation
+ head_rotation_speed = current_gyroscope - gyroscope_drift
+
+ #print(head_rotation_speed)
tobii_data_stream.reading_callback = data_stream_callback
@@ -135,6 +181,13 @@ def main():
loop_chrono = MiscFeatures.TimeProbe()
fps = 0
+ # Track aruco cube pose
+ aruco_cube_tvec = numpy.zeros(3)
+ aruco_cube_rvec = numpy.zeros(3)
+ aruco_cube_success = False
+ aruco_cube_validity = False
+ aruco_cube_ts_ms = 0
+
while tobii_video_stream.is_alive():
# Read video stream
@@ -150,28 +203,51 @@ def main():
# Track markers with pose estimation
aruco_tracker.track(video_frame.matrix)
- # Warn user that no markers have been detected
- if aruco_tracker.get_tracked_markers_number() == 0:
-
- raise UserWarning('No marker detected')
-
# Estimate cube pose from tracked markers
tvec, rvec, success, validity = aruco_cube.estimate_pose(aruco_tracker.get_tracked_markers())
- # When cube pose estimation fails, use inertial sensors to offset cube pose
- if not success:
+ # Cube estimation succeed and validated by 2 faces at least: store cube pose
+ if success and validity >= 2:
+
+ aruco_cube_tvec = tvec
+ aruco_cube_rvec = rvec
+ aruco_cube_success = success
+ aruco_cube_validity = validity
+ aruco_cube_ts_ms = video_ts_ms
+
+ # Cube pose estimation fails: use tobii glasses inertial sensors to estimate cube pose from last estimated pose
+ elif aruco_cube_success:
+
+ # Translate cube according head translation speed
+ #aruco_cube_tvec += head_translation_speed * (video_ts_ms - aruco_cube_ts_ms)
+
+ #print('after:')
+ #print(aruco_cube_tvec)
+
+ # Rotate cube around origin according head rotation speed
+ R = make_rotation_matrix(* (head_rotation_speed * (video_ts_ms - aruco_cube_ts_ms)))
+ aruco_cube_tvec = aruco_cube_tvec.dot(R.T)
+ aruco_cube_ts_ms = video_ts_ms
+
+ #print('berore rotation: ', aruco_cube_tvec)
+ #print(R)
+ #print('after rotation: ', aruco_cube_tvec)
+
+ # Set cube pose estimation
+ aruco_cube.set_pose(tvec = aruco_cube_tvec, rvec = aruco_cube_rvec)
+
+ else:
+
+ raise UserWarning('Cube pose estimation fails.')
- #aruco_cube.offset_pose(tvec=[0, 3, 0], rvec=[0, 0, 0])
- raise UserWarning('Cube pose estimation failed.')
-
# Draw markers pose estimation
#aruco_tracker.draw_tracked_markers(visu_frame.matrix)
- # Draw cube pose estimation
- aruco_cube.draw(visu_frame.matrix, aruco_camera.get_K(), aruco_camera.get_D())
+ # Draw cube pose estimation (without camera distorsion)
+ aruco_cube.draw(visu_frame.matrix, aruco_camera.get_K(), (0, 0, 0, 0))
# Warn about cube pose validity
- if not validity:
+ if not aruco_cube_validity:
raise UserWarning('Cube pose estimation is not validated.')
@@ -189,7 +265,7 @@ def main():
loop_ps = 1e3 * lap_counter / elapsed_time
loop_chrono.restart()
-
+
# Draw center
cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1)
cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
@@ -201,7 +277,7 @@ def main():
cv.putText(visu_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
cv.imshow(f'Stream ArUcoCube', visu_frame.matrix)
-
+
# Close window using 'Esc' key
if cv.waitKey(1) == 27:
break