#!/usr/bin/env python """ tobii_argaze.py Author: - Théo de la Hogue, theo.de-la-hogue@enac.fr """ import os from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera from argaze.RegionOfInterest import * from argaze.TobiiGlassesPro2 import * import cv2 as cv import pandas import matplotlib.pyplot as mpyplot import matplotlib.patches as mpatches # tobii glasses ip address ip_address = '192.168.1.10' # manage export folder current_folder = os.path.dirname(__file__) export_folder = os.path.join(current_folder, '_export') if not os.path.exists(export_folder): os.makedirs(export_folder) print(f'\'_export\' folder created') # create tobii controller tobii_controller = TobiiController.TobiiController(ip_address, 'ArGaze', 1) # create tobii data thread tobii_data_thread = TobiiData.TobiiDataThread(tobii_controller) tobii_data_thread.start() # create tobii video thread tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller) tobii_video_thread.start() # create aruco camera aruco_camera = ArUcoCamera.ArUcoCamera() aruco_camera.load_calibration_file('tobii_camera.json') # create aruco tracker aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_4X4_50', 6, aruco_camera) # aruco dictionaries, marker length (cm), camera # create ROIs 3D scene roi3D_scene = ROI3DScene.ROI3DScene() roi3D_scene.load('roi3D_scene.obj') # start tobii glasses streaming tobii_controller.start_streaming() # process video frames last_frame_time = 0 roi2D_buffer = [] marker_buffer = [] while True: frame, frame_width, frame_height, frame_time, pts = tobii_video_thread.read() # draw tobii gaze # TODO : sync gaze data according frame pts gp_data = tobii_data_thread.read_gaze_data(pts) if 'TIMESTAMP' in gp_data: pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height)) cv.circle(frame, pointer, 4, (0, 255, 255), -1) else: pointer = (0, 0) # track markers with pose estimation and draw them aruco_tracker.track(frame) aruco_tracker.draw(frame) # project 3D scenes related to each aruco markers if aruco_tracker.get_markers_number(): for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()): # TODO : select different 3D scenes depending on aruco id marker_rotation = aruco_tracker.get_marker_rotation(i) marker_translation = aruco_tracker.get_marker_translation(i) roi3D_scene.set_rotation(marker_rotation) roi3D_scene.set_translation(marker_translation) # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable. roi2D_scene = roi3D_scene.project(frame, aruco_camera, False) # check if gaze is inside 2D rois roi2D_scene.inside(pointer) # draw 2D rois roi2D_scene.draw(frame) # store roi2D into buffer for roi2D in roi2D_scene: roi2D['TIME'] = frame_time del roi2D['VERTICES'] roi2D_buffer.append(roi2D) # store marker into buffer marker = { 'TIME': frame_time, 'ID': i, 'X': marker_translation[0][0], 'Y': marker_translation[0][1], 'Z': marker_translation[0][2] } marker_buffer.append(marker) cv.imshow(f'Live Scene', frame) # quit on 'Esc' command key = cv.waitKey(1) if key == 27: cv.destroyAllWindows() last_frame_time = frame_time break # stop tobii objects tobii_video_thread.stop() tobii_data_thread.stop() tobii_controller.stop_streaming() tobii_controller.close() # create a pandas DataFrame for each buffer ac_dataframe = pandas.DataFrame(tobii_data_thread.read_accelerometer_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z']) gy_dataframe = pandas.DataFrame(tobii_data_thread.read_gyroscope_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z']) gp_dataframe = pandas.DataFrame(tobii_data_thread.read_gaze_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y']) data_pts_dataframe = pandas.DataFrame(tobii_data_thread.read_pts_buffer(), columns=['TIMESTAMP', 'TIME', 'PTS']) video_pts_dataframe = pandas.DataFrame(tobii_video_thread.read_pts_buffer(), columns=['TIME', 'PTS']) roi2D_dataframe = pandas.DataFrame(roi2D_buffer, columns=['TIME', 'NAME', 'POINTER_INSIDE']) marker_dataframe = pandas.DataFrame(marker_buffer, columns=['TIME', 'ID', 'X', 'Y', 'Z']) # export all data frames ac_dataframe.to_csv(f'{export_folder}/accelerometer.csv', index=False) gy_dataframe.to_csv(f'{export_folder}/gyroscope.csv', index=False) gp_dataframe.to_csv(f'{export_folder}/gaze.csv', index=False) data_pts_dataframe.to_csv(f'{export_folder}/data_pts.csv', index=False) video_pts_dataframe.to_csv(f'{export_folder}/video_pts.csv', index=False) roi2D_dataframe.to_csv(f'{export_folder}/rois.csv', index=False) marker_dataframe.to_csv(f'{export_folder}/markers.csv', index=False) # edit figure figure = mpyplot.figure(figsize=(int(last_frame_time), 5)) # plot gaze data subplot = figure.add_subplot(211) subplot.set_title('Gaze') subplot = gp_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (normalized)', legend=False) subplot = gp_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (normalized)', legend=False) x_patch = mpatches.Patch(color='#276FB6', label='X') y_speed_patch = mpatches.Patch(color='#9427B6', label='Y') subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left') # plot maker position data subplot = figure.add_subplot(212) subplot.set_title('Marker') subplot = marker_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (cm)', legend=False) subplot = marker_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (cm)', legend=False) x_patch = mpatches.Patch(color='#276FB6', label='X') y_speed_patch = mpatches.Patch(color='#9427B6', label='Y') subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left') # export figure mpyplot.tight_layout() mpyplot.savefig(f'{export_folder}/visualisation.svg') mpyplot.close('all')