From baa450b532d36f83214d7bb66ce71b15a45c8c84 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Wed, 6 Apr 2022 18:06:08 +0200 Subject: Updating live_tobii_aruco_detection.py script --- src/argaze/utils/README.md | 4 +- src/argaze/utils/live_tobii_aruco_detection.py | 198 ------------------------- src/argaze/utils/live_tobii_aruco_rois.py | 134 +++++++++++++++++ src/argaze/utils/live_tobii_session.py | 2 +- 4 files changed, 137 insertions(+), 201 deletions(-) delete mode 100644 src/argaze/utils/live_tobii_aruco_detection.py create mode 100644 src/argaze/utils/live_tobii_aruco_rois.py diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md index f03302c..f459fcb 100644 --- a/src/argaze/utils/README.md +++ b/src/argaze/utils/README.md @@ -66,8 +66,8 @@ python ./src/argaze/utils/explore_tobii_sdcard.py -s SEGMENT_PATH python ./src/argaze/utils/export_tobii_segment_fixations.py -s SEGMENT_PATH ``` -- Track any 6cm ArUco marker into calibrated Tobii camera video stream (replace IP_ADDRESS). Load an roi scene (replace ROI_SCENE) .obj file, position it virtually like the detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI. Export all collected datas into an export folder. +- Track any 4.5cm Original ArUco marker into calibrated Tobii camera video stream (replace IP_ADDRESS). Load an roi scene (replace ROI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI. ``` -python ./src/argaze/utils/track_aruco_rois_with_tobii_glasses.py -t IP_ADDRESS -c export/tobii_camera.json -s ROI_SCENE -o export/data +python ./src/argaze/utils/live_tobii_aruco_rois.py -t IP_ADDRESS -c export/tobii_camera.json -m 4.5 -s ROI_SCENE ``` diff --git a/src/argaze/utils/live_tobii_aruco_detection.py b/src/argaze/utils/live_tobii_aruco_detection.py deleted file mode 100644 index 3b3640f..0000000 --- a/src/argaze/utils/live_tobii_aruco_detection.py +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env python - -import argparse -import os - -from argaze.TobiiGlassesPro2 import * -from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera -from argaze.RegionOfInterest import * -from argaze.TobiiGlassesPro2 import * - -import cv2 as cv -import pandas -import matplotlib.pyplot as mpyplot -import matplotlib.patches as mpatches - -def main(): - """ - Track any ArUco marker into Tobii Glasses Pro 2 camera video stream. - From a loaded ROI scene .obj file, position the scene virtually like the detected ArUco markers and project the scene into camera frame. - Then, detect if Tobii gaze point is inside any ROI. - Export all collected datas into an export folder for further analysis. - """ - - # Manage arguments - parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) - parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip') - parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') - parser.add_argument('-s', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath') - parser.add_argument('-o', '--output', metavar='OUT', type=str, default='.', help='destination path') - parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') - parser.add_argument('-m', '--marker_size', metavar='MKR', type=int, default=6, help='aruco marker size (cm)') - args = parser.parse_args() - - # Create tobii controller - tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf') - - # Calibrate tobii glasses - #tobii_controller.calibrate() - - # Create tobii data stream - #tobii_data_stream = TobiiData.TobiiDataStream(tobii_controller) - - # Create tobii video stream - tobii_video_stream = TobiiVideo.TobiiVideoStream(tobii_controller) - - # create aruco camera - aruco_camera = ArUcoCamera.ArUcoCamera() - aruco_camera.load_calibration_file(args.camera_calibration) - - # Create aruco tracker - aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, 7.5, aruco_camera) # aruco dictionary, marker length (cm), camera - - # Create ROIs 3D scene - roi3D_scene = ROI3DScene.ROI3DScene() - roi3D_scene.load(args.roi_scene) - - # Start tobii glasses streaming - tobii_controller.start_streaming() - - # Process video frames - frame_time = 0 - last_frame_time = 0 - roi2D_buffer = [] - marker_buffer = [] - - # tracking loop - try: - - for frame_ts, frame in tobii_video_stream.frames(): - - # close window using 'Esc' key - if cv.waitKey(1) == 27: - break - - # draw tobii gaze - # TODO : sync gaze data according frame pts - gp_data = tobii_data_stream.read_gaze_data(pts) - if 'TIMESTAMP' in gp_data: - pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height)) - cv.circle(frame, pointer, 4, (0, 255, 255), -1) - else: - pointer = (0, 0) - - # track markers with pose estimation and draw them - aruco_tracker.track(frame) - aruco_tracker.draw(frame) - - # project 3D scenes related to each aruco markers - if aruco_tracker.get_markers_number(): - - for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()): - - # TODO : select different 3D scenes depending on aruco id - - marker_rotation = aruco_tracker.get_marker_rotation(i) - marker_translation = aruco_tracker.get_marker_translation(i) - - roi3D_scene.set_rotation(marker_rotation) - roi3D_scene.set_translation(marker_translation) - - # zero distorsion matrix - D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0]) - - # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it - # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable. - roi2D_scene = roi3D_scene.project(aruco_camera.getK(), D0) - - # check if gaze is inside 2D rois - roi2D_scene.inside(pointer) - - # draw 2D rois - roi2D_scene.draw(frame) - - # store roi2D into buffer - for roi2D in roi2D_scene: - roi2D['TIME'] = frame_time - del roi2D['VERTICES'] - roi2D_buffer.append(roi2D) - - # store marker into buffer - marker = { - 'TIME': frame_time, - 'ID': i, - 'X': marker_translation[0][0], - 'Y': marker_translation[0][1], - 'Z': marker_translation[0][2] - } - marker_buffer.append(marker) - - cv.imshow('Live Scene', frame) - - # exit on 'ctrl+C' interruption - except KeyboardInterrupt: - pass - - # stop frame display - cv.destroyAllWindows() - last_frame_time = frame_time - - # stop tobii glasses streaming - tobii_controller.stop_streaming() - - # create a pandas DataFrame for each buffer - ac_dataframe = pandas.DataFrame(tobii_data_thread.read_accelerometer_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z']) - gy_dataframe = pandas.DataFrame(tobii_data_thread.read_gyroscope_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z']) - gp_dataframe = pandas.DataFrame(tobii_data_thread.read_gaze_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y']) - data_pts_dataframe = pandas.DataFrame(tobii_data_thread.read_pts_buffer(), columns=['TIMESTAMP', 'TIME', 'PTS']) - video_pts_dataframe = pandas.DataFrame(tobii_video_thread.read_pts_buffer(), columns=['TIME', 'PTS']) - roi2D_dataframe = pandas.DataFrame(roi2D_buffer, columns=['TIME', 'NAME', 'POINTER_INSIDE']) - marker_dataframe = pandas.DataFrame(marker_buffer, columns=['TIME', 'ID', 'X', 'Y', 'Z']) - - # manage export folder - if not os.path.exists(args.output): - os.makedirs(args.output) - print(f'{args.output} folder created') - - # export all data frames - ac_dataframe.to_csv(f'{args.output}/accelerometer.csv', index=False) - gy_dataframe.to_csv(f'{args.output}/gyroscope.csv', index=False) - gp_dataframe.to_csv(f'{args.output}/gaze.csv', index=False) - data_pts_dataframe.to_csv(f'{args.output}/data_pts.csv', index=False) - video_pts_dataframe.to_csv(f'{args.output}/video_pts.csv', index=False) - roi2D_dataframe.to_csv(f'{args.output}/rois.csv', index=False) - marker_dataframe.to_csv(f'{args.output}/markers.csv', index=False) - - # edit figure - figure = mpyplot.figure(figsize=(int(last_frame_time), 5)) - - # plot gaze data - subplot = figure.add_subplot(211) - subplot.set_title('Gaze') - - subplot = gp_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (normalized)', legend=False) - subplot = gp_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (normalized)', legend=False) - - x_patch = mpatches.Patch(color='#276FB6', label='X') - y_speed_patch = mpatches.Patch(color='#9427B6', label='Y') - subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left') - - # plot maker position data - subplot = figure.add_subplot(212) - subplot.set_title('Marker') - - subplot = marker_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (cm)', legend=False) - subplot = marker_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (cm)', legend=False) - - x_patch = mpatches.Patch(color='#276FB6', label='X') - y_speed_patch = mpatches.Patch(color='#9427B6', label='Y') - subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left') - - # export figure - mpyplot.tight_layout() - mpyplot.savefig(f'{args.output}/visualisation.svg') - mpyplot.close('all') - -if __name__ == '__main__': - - main() \ No newline at end of file diff --git a/src/argaze/utils/live_tobii_aruco_rois.py b/src/argaze/utils/live_tobii_aruco_rois.py new file mode 100644 index 0000000..92e8772 --- /dev/null +++ b/src/argaze/utils/live_tobii_aruco_rois.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python + +import argparse +import os + +from argaze import DataStructures +from argaze.TobiiGlassesPro2 import * +from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera +from argaze.RegionOfInterest import * +from argaze.TobiiGlassesPro2 import * + +import cv2 as cv +import numpy + +def main(): + """ + Track any ArUco marker into Tobii Glasses Pro 2 camera video stream. + From a loaded ROI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame. + Then, detect if Tobii gaze point is inside any ROI. + """ + + # Manage arguments + parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) + parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip') + parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') + parser.add_argument('-s', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath') + parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') + parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)') + args = parser.parse_args() + + # Create tobii controller + tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf') + + # Calibrate tobii glasses + tobii_controller.calibrate() + + # Enable tobii data stream + tobii_data_stream = tobii_controller.enable_data_stream() + + # Enable tobii video stream + tobii_video_stream = tobii_controller.enable_video_stream() + + # create aruco camera + aruco_camera = ArUcoCamera.ArUcoCamera() + aruco_camera.load_calibration_file(args.camera_calibration) + + # Create aruco tracker + aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, 7.5, aruco_camera) # aruco dictionary, marker length (cm), camera + + # Create ROIs 3D scene + roi3D_scene = ROI3DScene.ROI3DScene() + roi3D_scene.load(args.roi_scene) + + # Start streaming + tobii_controller.start_streaming() + + # Live video stream capture loop + try: + + past_gaze_positions = DataStructures.TimeStampedBuffer() + + while tobii_video_stream.is_alive(): + + video_ts, video_frame = tobii_video_stream.read() + + try: + + # Read data stream + data_stream = tobii_data_stream.read() + + # Store received gaze positions + past_gaze_positions.append(data_stream.gidx_l_gp) + + # Get last gaze position before video timestamp and remove all former gaze positions + earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts) + + # Draw video synchronized gaze pointer + pointer = (int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height)) + cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1) + + # When expected values aren't in data stream + except (KeyError, AttributeError, ValueError): + + pointer = (0,0) + + # Track markers with pose estimation and draw them + aruco_tracker.track(video_frame.matrix) + aruco_tracker.draw(video_frame.matrix) + + # Project 3D scenes related to each aruco markers + if aruco_tracker.get_markers_number(): + + for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()): + + # TODO : Select different 3D scenes depending on aruco id + + marker_rotation = aruco_tracker.get_marker_rotation(i) + marker_translation = aruco_tracker.get_marker_translation(i) + + roi3D_scene.set_rotation(marker_rotation) + roi3D_scene.set_translation(marker_translation) + + # Edit Zero distorsion matrix + D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0]) + + # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it + # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable. + roi2D_scene = roi3D_scene.project(aruco_camera.get_K(), D0) + + # Check if gaze is inside 2D rois + roi2D_scene.inside(pointer) + + # Draw 2D rois + roi2D_scene.draw(video_frame.matrix) + + # Close window using 'Esc' key + if cv.waitKey(1) == 27: + break + + cv.imshow('Live Scene', video_frame.matrix) + + # Exit on 'ctrl+C' interruption + except KeyboardInterrupt: + pass + + # Stop frame display + cv.destroyAllWindows() + + # Stop streaming + tobii_controller.stop_streaming() + +if __name__ == '__main__': + + main() \ No newline at end of file diff --git a/src/argaze/utils/live_tobii_session.py b/src/argaze/utils/live_tobii_session.py index 7226860..f5ef0f4 100644 --- a/src/argaze/utils/live_tobii_session.py +++ b/src/argaze/utils/live_tobii_session.py @@ -69,7 +69,7 @@ def main(): cv.imshow(f'Live Tobii Camera', video_frame.matrix) - # exit on 'ctrl+C' interruption + # Exit on 'ctrl+C' interruption except KeyboardInterrupt: pass -- cgit v1.1