aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2022-03-22 16:08:28 +0100
committerThéo de la Hogue2022-03-22 16:08:28 +0100
commitb33cef417f6427cbfe5e41032ce766494188fabf (patch)
tree77621d268ffecff8505efa824b0ee6f99ca8a459
parent880d8426bbf16dd9c91b85b7779eb221ac640cf0 (diff)
downloadargaze-b33cef417f6427cbfe5e41032ce766494188fabf.zip
argaze-b33cef417f6427cbfe5e41032ce766494188fabf.tar.gz
argaze-b33cef417f6427cbfe5e41032ce766494188fabf.tar.bz2
argaze-b33cef417f6427cbfe5e41032ce766494188fabf.tar.xz
Interrupting while loop using keyboard interruption system
-rw-r--r--src/argaze/utils/calibrate_tobii_camera.py50
-rw-r--r--src/argaze/utils/track_aruco_rois_with_tobii_glasses.py142
2 files changed, 102 insertions, 90 deletions
diff --git a/src/argaze/utils/calibrate_tobii_camera.py b/src/argaze/utils/calibrate_tobii_camera.py
index ea35386..c50b499 100644
--- a/src/argaze/utils/calibrate_tobii_camera.py
+++ b/src/argaze/utils/calibrate_tobii_camera.py
@@ -61,39 +61,43 @@ def main():
expected_markers_number = len(aruco_board.get_ids())
expected_corners_number = (aruco_board.get_size()[0] - 1 ) * (aruco_board.get_size()[1] - 1)
- # capture frame with a full displayed board
- while True:
+ # running until keyboard interruption
+ try:
- frame, frame_width, frame_height, frame_time, frame_pts = tobii_video_thread.read()
+ while True:
- # track all markers in the board
- aruco_tracker.track_board(frame, aruco_board, expected_markers_number)
+ # capture frame with a full displayed board
+ frame, frame_width, frame_height, frame_time, frame_pts = tobii_video_thread.read()
- # draw only markers
- aruco_tracker.draw(frame)
+ # track all markers in the board
+ aruco_tracker.track_board(frame, aruco_board, expected_markers_number)
- # draw current calibration data count
- cv.putText(frame, f'Capture: {aruco_camera.get_calibration_data_count()}', (50, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
- cv.imshow('Tobii Camera Calibration', frame)
+ # draw only markers
+ aruco_tracker.draw(frame)
- # if all board corners are detected
- if aruco_tracker.get_board_corners_number() == expected_corners_number:
+ # draw current calibration data count
+ cv.putText(frame, f'Capture: {aruco_camera.get_calibration_data_count()}', (50, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
+ cv.imshow('Tobii Camera Calibration', frame)
- # draw board corners to notify a capture is done
- aruco_tracker.draw_board(frame)
+ # if all board corners are detected
+ if aruco_tracker.get_board_corners_number() == expected_corners_number:
- # append data
- aruco_camera.store_calibration_data(aruco_tracker.get_board_corners(), aruco_tracker.get_board_corners_ids())
+ # draw board corners to notify a capture is done
+ aruco_tracker.draw_board(frame)
- cv.imshow(f'Tobii Camera Calibration', frame)
+ # append data
+ aruco_camera.store_calibration_data(aruco_tracker.get_board_corners(), aruco_tracker.get_board_corners_ids())
- time.sleep(2)
+ cv.imshow(f'Tobii Camera Calibration', frame)
- # quit on 'Esc' command
- key = cv.waitKey(1)
- if key == 27:
- cv.destroyAllWindows()
- break
+ time.sleep(2)
+
+ # exit on keyboard interruption
+ except KeyboardInterrupt:
+ pass
+
+ # stop frame display
+ cv.destroyAllWindows()
# stop tobii objects
tobii_video_thread.stop()
diff --git a/src/argaze/utils/track_aruco_rois_with_tobii_glasses.py b/src/argaze/utils/track_aruco_rois_with_tobii_glasses.py
index e7abc05..82e25bc 100644
--- a/src/argaze/utils/track_aruco_rois_with_tobii_glasses.py
+++ b/src/argaze/utils/track_aruco_rois_with_tobii_glasses.py
@@ -33,6 +33,9 @@ def main():
# create tobii controller
tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'ArGaze', 1)
+ # calibrate tobii glasses
+ tobii_controller.calibrate()
+
# create tobii data thread
tobii_data_thread = TobiiData.TobiiDataThread(tobii_controller)
tobii_data_thread.start()
@@ -56,77 +59,82 @@ def main():
tobii_controller.start_streaming()
# process video frames
+ frame_time = 0
last_frame_time = 0
roi2D_buffer = []
marker_buffer = []
- while True:
-
- frame, frame_width, frame_height, frame_time, pts = tobii_video_thread.read()
-
- # draw tobii gaze
- # TODO : sync gaze data according frame pts
- gp_data = tobii_data_thread.read_gaze_data(pts)
- if 'TIMESTAMP' in gp_data:
- pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height))
- cv.circle(frame, pointer, 4, (0, 255, 255), -1)
- else:
- pointer = (0, 0)
-
- # track markers with pose estimation and draw them
- aruco_tracker.track(frame)
- aruco_tracker.draw(frame)
-
- # project 3D scenes related to each aruco markers
- if aruco_tracker.get_markers_number():
-
- for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
-
- # TODO : select different 3D scenes depending on aruco id
-
- marker_rotation = aruco_tracker.get_marker_rotation(i)
- marker_translation = aruco_tracker.get_marker_translation(i)
-
- roi3D_scene.set_rotation(marker_rotation)
- roi3D_scene.set_translation(marker_translation)
-
- # zero distorsion matrix
- D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
-
- # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
- # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable.
- roi2D_scene = roi3D_scene.project(aruco_camera.getK(), D0)
-
- # check if gaze is inside 2D rois
- roi2D_scene.inside(pointer)
-
- # draw 2D rois
- roi2D_scene.draw(frame)
-
- # store roi2D into buffer
- for roi2D in roi2D_scene:
- roi2D['TIME'] = frame_time
- del roi2D['VERTICES']
- roi2D_buffer.append(roi2D)
-
- # store marker into buffer
- marker = {
- 'TIME': frame_time,
- 'ID': i,
- 'X': marker_translation[0][0],
- 'Y': marker_translation[0][1],
- 'Z': marker_translation[0][2]
- }
- marker_buffer.append(marker)
-
- cv.imshow(f'Live Scene', frame)
-
- # quit on 'Esc' command
- key = cv.waitKey(1)
- if key == 27:
- cv.destroyAllWindows()
- last_frame_time = frame_time
- break
+ # running until keyboard interruption
+ try:
+
+ while True:
+
+ frame, frame_width, frame_height, frame_time, pts = tobii_video_thread.read()
+
+ # draw tobii gaze
+ # TODO : sync gaze data according frame pts
+ gp_data = tobii_data_thread.read_gaze_data(pts)
+ if 'TIMESTAMP' in gp_data:
+ pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height))
+ cv.circle(frame, pointer, 4, (0, 255, 255), -1)
+ else:
+ pointer = (0, 0)
+
+ # track markers with pose estimation and draw them
+ aruco_tracker.track(frame)
+ aruco_tracker.draw(frame)
+
+ # project 3D scenes related to each aruco markers
+ if aruco_tracker.get_markers_number():
+
+ for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
+
+ # TODO : select different 3D scenes depending on aruco id
+
+ marker_rotation = aruco_tracker.get_marker_rotation(i)
+ marker_translation = aruco_tracker.get_marker_translation(i)
+
+ roi3D_scene.set_rotation(marker_rotation)
+ roi3D_scene.set_translation(marker_translation)
+
+ # zero distorsion matrix
+ D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
+
+ # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
+ # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable.
+ roi2D_scene = roi3D_scene.project(aruco_camera.getK(), D0)
+
+ # check if gaze is inside 2D rois
+ roi2D_scene.inside(pointer)
+
+ # draw 2D rois
+ roi2D_scene.draw(frame)
+
+ # store roi2D into buffer
+ for roi2D in roi2D_scene:
+ roi2D['TIME'] = frame_time
+ del roi2D['VERTICES']
+ roi2D_buffer.append(roi2D)
+
+ # store marker into buffer
+ marker = {
+ 'TIME': frame_time,
+ 'ID': i,
+ 'X': marker_translation[0][0],
+ 'Y': marker_translation[0][1],
+ 'Z': marker_translation[0][2]
+ }
+ marker_buffer.append(marker)
+
+ cv.imshow(f'Live Scene', frame)
+
+ # exit on keyboard interruption
+ except KeyboardInterrupt:
+ pass
+
+ # stop frame display
+ cv.destroyAllWindows()
+ last_frame_time = frame_time
# stop tobii objects
tobii_video_thread.stop()