aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2022-11-23 09:35:40 +0100
committerThéo de la Hogue2022-11-23 09:35:40 +0100
commit214a057db794ced4fed7722912a571297d2a64de (patch)
treea0b407607deebd6fb7bcf840082746b6ad768e5b
parent74ff40539b242d43708151009d3018fff238b3e3 (diff)
downloadargaze-214a057db794ced4fed7722912a571297d2a64de.zip
argaze-214a057db794ced4fed7722912a571297d2a64de.tar.gz
argaze-214a057db794ced4fed7722912a571297d2a64de.tar.bz2
argaze-214a057db794ced4fed7722912a571297d2a64de.tar.xz
Adding a new utils script to show how to use ArUcoPlan class.
-rw-r--r--src/argaze/utils/tobii_stream_aruco_plan_display.py (renamed from src/argaze/utils/tobii_stream_arcube_display.py)101
1 files changed, 51 insertions, 50 deletions
diff --git a/src/argaze/utils/tobii_stream_arcube_display.py b/src/argaze/utils/tobii_stream_aruco_plan_display.py
index 9f4a196..16fc8ef 100644
--- a/src/argaze/utils/tobii_stream_arcube_display.py
+++ b/src/argaze/utils/tobii_stream_aruco_plan_display.py
@@ -37,7 +37,7 @@ def make_rotation_matrix(x, y, z):
def main():
"""
- Track ArUcoCube into Tobii Glasses Pro 2 camera video stream.
+ Track ArUcoPlan into Tobii Glasses Pro 2 camera video stream.
"""
# Manage arguments
@@ -46,7 +46,7 @@ def main():
parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
parser.add_argument('-i', '--imu_calibration', metavar='IMU_CALIB', type=str, default=None, help='json imu calibration filepath')
- parser.add_argument('-ac', '--aruco_cube', metavar='ARUCO_CUBE', type=str, help='json aruco cube description filepath')
+ parser.add_argument('-ac', '--aruco_plan', metavar='ARUCO_PLAN', type=str, help='json aruco plan description filepath')
parser.add_argument('-s', '--aoi_scene', metavar='AOI_SCENE', type=str, help='obj aoi 3D scene description filepath')
parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
args = parser.parse_args()
@@ -78,15 +78,15 @@ def main():
# Enable tobii video stream
tobii_video_stream = tobii_controller.enable_video_stream()
- # Load aruco cube description
- aruco_cube = ArUcoCube.ArUcoCube(args.aruco_cube)
- aruco_cube.print_cache()
+ # Load aruco plan description
+ aruco_plan = ArUcoPlan.ArUcoPlan(args.aruco_plan)
+ aruco_plan.print_cache()
- # Load AOI 3D scene centered onto aruco cube
+ # Load AOI 3D scene centered onto aruco plan
aoi3D_scene = AOI3DScene.AOI3DScene()
aoi3D_scene.load(args.aoi_scene)
- print(f'\nAOI in {os.path.basename(args.aoi_scene)} scene related to ArCube:')
+ print(f'\nAOI in {os.path.basename(args.aoi_scene)} scene related to ArPlan:')
for aoi in aoi3D_scene.keys():
print(f'\t{aoi}')
@@ -103,7 +103,7 @@ def main():
raise UserWarning('.json camera calibration filepath required. Use -c option.')
# Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_cube.dictionary, aruco_cube.marker_size, aruco_camera)
+ aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_plan.dictionary, aruco_plan.marker_size, aruco_camera)
# Load specific configuration file
if args.aruco_tracker_configuration != None:
@@ -113,7 +113,7 @@ def main():
print(f'\nArUcoTracker configuration for markers detection:')
aruco_tracker.print_configuration()
- # Create tobii imu handler to track head pose changes when arcuco cube pose can't be estimated
+ # Create tobii imu handler to track head pose changes when arcuco plan pose can't be estimated
# So, the resulting head pose is relative to last pose estimation
tobii_imu = TobiiInertialMeasureUnit.TobiiInertialMeasureUnit()
@@ -203,12 +203,12 @@ def main():
loop_chrono = MiscFeatures.TimeProbe()
fps = 0
- # Track aruco cube pose
- aruco_cube_tvec = numpy.zeros(3)
- aruco_cube_rvec = numpy.zeros(3)
- aruco_cube_success = False
- aruco_cube_validity = False
- aruco_cube_ts_ms = 0
+ # Track aruco plan pose
+ aruco_plan_tvec = numpy.zeros(3)
+ aruco_plan_rvec = numpy.zeros(3)
+ aruco_plan_success = False
+ aruco_plan_validity = False
+ aruco_plan_ts_ms = 0
while tobii_video_stream.is_alive():
@@ -224,12 +224,13 @@ def main():
# Track markers with pose estimation
aruco_tracker.track(video_frame.matrix)
+ #aruco_tracker.draw_tracked_markers(visu_frame.matrix)
- # Estimate cube pose from tracked markers
- tvec, rvec, success, validity = aruco_cube.estimate_pose(aruco_tracker.tracked_markers)
+ # Estimate plan pose from tracked markers
+ tvec, rvec, success, validity = aruco_plan.estimate_pose(aruco_tracker.tracked_markers)
- # Cube pose estimation succeed and is validated by 2 faces at least
- if success and validity >= 2:
+ # Plan pose estimation succeed and is validated by 1 faces at least
+ if success and validity >= 1:
# Lock tobii imu updates
tobii_imu_lock.acquire()
@@ -237,67 +238,67 @@ def main():
# Reset head rotation, translation and translation speed (cm/s)
# Note : head translation speed is also estimated thanks to accelerometer sensor (see upward)
tobii_imu.reset_rotation()
- #tobii_imu.reset_translation(translation_speed = (tvec - aruco_cube_tvec) / (video_ts_ms - aruco_cube_ts_ms))
+ #tobii_imu.reset_translation(translation_speed = (tvec - aruco_plan_tvec) / (video_ts_ms - aruco_plan_ts_ms))
- # Create a rotation matrix to transform cube rotation from camera referential to imu referential
+ # Create a rotation matrix to transform plan rotation from camera referential to imu referential
F = make_rotation_matrix(*TobiiInertialMeasureUnit.CAMERA_TO_IMU_ROTATION_VECTOR)
R, _ = cv.Rodrigues(rvec)
rvec_flipped, _ = cv.Rodrigues(F.dot(R))
- # Update head plumb orientation with flipped cube orientation
+ # Update head plumb orientation with flipped plan orientation
tobii_imu.rotate_plumb(rvec_flipped)
# Unlock tobii imu updates
tobii_imu_lock.release()
- # Store cube pose
- aruco_cube_tvec = tvec
- aruco_cube_rvec = rvec
- aruco_cube_success = success
- aruco_cube_validity = validity
- aruco_cube_ts_ms = video_ts_ms
+ # Store plan pose
+ aruco_plan_tvec = tvec
+ aruco_plan_rvec = rvec
+ aruco_plan_success = success
+ aruco_plan_validity = validity
+ aruco_plan_ts_ms = video_ts_ms
- # Cube pose estimation fails
- elif aruco_cube_success:
+ # Plan pose estimation fails
+ elif aruco_plan_success:
- # Use tobii glasses inertial sensors to estimate cube pose from last estimated pose
+ # Use tobii glasses inertial sensors to estimate plan pose from last estimated pose
- # Translate cube into imu referential
- imu_tvec = aruco_cube_tvec + numpy.array(TobiiInertialMeasureUnit.CAMERA_TO_IMU_TRANSLATION_VECTOR)
+ # Translate plan into imu referential
+ imu_tvec = aruco_plan_tvec + numpy.array(TobiiInertialMeasureUnit.CAMERA_TO_IMU_TRANSLATION_VECTOR)
- # Translate cube according head translation
+ # Translate plan according head translation
imu_tvec = imu_tvec + tobii_imu.translation
- # Rotate cube around imu origin according head rotation
+ # Rotate plan around imu origin according head rotation
imu_rvec = tobii_imu.rotation * numpy.array([-1., -1., 1.])
imu_R = make_rotation_matrix(*imu_rvec)
new_imu_tvec = imu_tvec.dot(imu_R)
- # Translate back cube into camera referential
+ # Translate back plan into camera referential
new_tvec = new_imu_tvec - numpy.array(TobiiInertialMeasureUnit.CAMERA_TO_IMU_TRANSLATION_VECTOR)
- # Rotate cube orientation (supposing cube top is up in )
+ # Rotate plan orientation (supposing plan top is up in )
imu_rvec = tobii_imu.rotation * numpy.array([1., -1., 1.])
imu_R = make_rotation_matrix(*imu_rvec)
- C, _ = cv.Rodrigues(aruco_cube_rvec)
+ C, _ = cv.Rodrigues(aruco_plan_rvec)
C = C.dot(imu_R)
new_rvec, _ = cv.Rodrigues(C)
- #new_rvec = aruco_cube_rvec
+ #new_rvec = aruco_plan_rvec
- # Set cube pose estimation
- aruco_cube.translation = new_tvec
- aruco_cube.rotation = new_rvec
+ # Set plan pose estimation
+ aruco_plan.translation = new_tvec
+ aruco_plan.rotation = new_rvec
else:
- raise UserWarning('Cube pose estimation fails.')
+ raise UserWarning('Plan pose estimation fails.')
# Project AOI 3 scene onto camera frame
# DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_scene = aoi3D_scene.project(aruco_cube.translation, aruco_cube.rotation, aruco_camera.K)
+ aoi2D_scene = aoi3D_scene.project(aruco_plan.translation, aruco_plan.rotation, aruco_camera.K)
# Draw projected scene
#aoi2D_scene.draw(visu_frame.matrix)
@@ -305,13 +306,13 @@ def main():
# Draw markers pose estimation
#aruco_tracker.draw_tracked_markers(visu_frame.matrix)
- # Draw cube pose estimation (without camera distorsion)
- aruco_cube.draw(visu_frame.matrix, aruco_camera.K, aruco_camera.D, draw_faces=False)
+ # Draw plan pose estimation (without camera distorsion)
+ aruco_plan.draw(visu_frame.matrix, aruco_camera.K, ArUcoCamera.D0, draw_places=True)
- # Warn about cube pose validity
- if not aruco_cube_validity:
+ # Warn about plan pose validity
+ if not aruco_plan_validity:
- raise UserWarning('Cube pose estimation is not validated.')
+ raise UserWarning('Plan pose estimation is not validated.')
# Write warning
except UserWarning as w:
@@ -338,7 +339,7 @@ def main():
cv.putText(visu_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
cv.putText(visu_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.imshow(f'Stream ArUcoCube', visu_frame.matrix)
+ cv.imshow(f'Stream ArUcoPlan', visu_frame.matrix)
# Close window using 'Esc' key
if cv.waitKey(1) == 27: