aboutsummaryrefslogtreecommitdiff
path: root/src/examples
diff options
context:
space:
mode:
Diffstat (limited to 'src/examples')
-rw-r--r--src/examples/README.md35
-rw-r--r--src/examples/export_aruco_markers.py24
-rw-r--r--src/examples/export_calibration_board.py24
-rw-r--r--src/examples/tobii_argaze/roi3D_scene.obj67
-rw-r--r--src/examples/tobii_argaze/scene.blendbin1929540 -> 0 bytes
-rw-r--r--src/examples/tobii_argaze/tobii_argaze.py180
-rw-r--r--src/examples/tobii_argaze/tobii_camera.json29
-rw-r--r--src/examples/tobii_camera_calibration.py113
8 files changed, 0 insertions, 472 deletions
diff --git a/src/examples/README.md b/src/examples/README.md
deleted file mode 100644
index ac24dc1..0000000
--- a/src/examples/README.md
+++ /dev/null
@@ -1,35 +0,0 @@
-# ArGaze examples
-
-_All examples requires argaze package._
-
-## export_aruco_markers.py
-
-Generates ArUco markers to place into a scene.
-
-## export_calibration_board.py
-
-Generates ArUco board to calibrate a camera.
-
-## tobii_camera_calibration.py
-
-Captures board pictures and finally outputs camera calibration data into an \_export/tobii_camera.json file.
-
-* Print the **A3_board_35cmx25cm_markers_4X4_3cm.pdf** file from ArUcoMarkers/utils folder on a A3 sheet to get the correct square and markers length (3 cm).
-* Launch the script.
-* Place the board in order to view it entirely on screen and move the camera in many configurations (orientation and distance) : the script will automatically take pictures. Do this step with a good lighting and a clear background.
-* Once enough pictures have been captured (~20), press Esc key then, wait for the camera calibration processing.
-* Finally, open \_export/tobii_camera.json file to see the calibration data : the rms parameter should be between 0. and 1. if the calibration suceeded (lower is better).
-
-## tobii_argaze
-
-Coordinates several tasks to :
-
-* Receive gaze data from Tobii glasses Pro 2,
-* Detect ArUco markers inside video stream from Tobii glasses Pro 2,
-* Synchronise gaze data and ArUcos detection using timestamp,
-* Build 3D AR ROIs from **roi3D_scene.obj** file through ArUcos markers pose estimation and project them onto video frame coordinate system,
-* Check if gaze is inside an ROI,
-* Store gathered data into unified csv file for further analysis.
-
-The **scene.blend** file is a Blender project to build and export roi3D_scene.obj file.
-
diff --git a/src/examples/export_aruco_markers.py b/src/examples/export_aruco_markers.py
deleted file mode 100644
index 2d55931..0000000
--- a/src/examples/export_aruco_markers.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-"""
-export_markers.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-"""
-import os
-from argaze.ArUcoMarkers import ArUcoMarkers
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export/markers')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export/markers\' folder created')
-
-# create aruco markers
-aruco_markers = ArUcoMarkers.ArUcoMarkers('DICT_4X4_50')
-
-# export markers
-aruco_markers.export_all(export_folder, 300) # destination folder, dpi
diff --git a/src/examples/export_calibration_board.py b/src/examples/export_calibration_board.py
deleted file mode 100644
index 24d7461..0000000
--- a/src/examples/export_calibration_board.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-"""
-export_board.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-"""
-import os
-from argaze.ArUcoMarkers import ArUcoBoard
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export\' folder created')
-
-# create aruco board
-aruco_board = ArUcoBoard.ArUcoBoard('DICT_4X4_50', 7, 5, 5, 3) # 7 columns, 5 rows, square size (cm), marker size (cm)
-
-# export aruco board
-aruco_board.export(export_folder, 50) # destination folder, dpi
diff --git a/src/examples/tobii_argaze/roi3D_scene.obj b/src/examples/tobii_argaze/roi3D_scene.obj
deleted file mode 100644
index d0b7c51..0000000
--- a/src/examples/tobii_argaze/roi3D_scene.obj
+++ /dev/null
@@ -1,67 +0,0 @@
-# Blender v3.0.1 OBJ File: 'scene.blend'
-# www.blender.org
-o Marker_Plan
-v -3.000000 -3.000000 0.000000
-v 3.000000 -3.000000 0.000000
-v -3.000000 3.000000 0.000000
-v 3.000000 3.000000 0.000000
-s off
-f 1 2 4 3
-o Air_Speed_Plan.001
-v -41.971680 -4.745928 -2.684396
-v -39.497086 -4.745928 -2.684396
-v -41.971680 7.846082 -2.684396
-v -39.497086 7.846082 -2.684396
-s off
-f 5 6 8 7
-o Attitude_Plan.005
-v -38.940212 -3.709124 -2.684396
-v -30.117123 -3.709124 -2.684396
-v -38.940212 6.711202 -2.684396
-v -30.117123 6.711202 -2.684396
-s off
-f 9 10 12 11
-o Localiser_Plan.003
-v -38.940212 -7.889488 -2.684396
-v -30.117125 -7.889488 -2.684396
-v -38.940212 -4.223971 -2.684396
-v -30.117125 -4.223971 -2.684396
-s off
-f 13 14 16 15
-o Vertical_Speed_Plan.002
-v -29.570124 -4.718364 -2.684396
-v -26.876801 -4.713788 -2.684396
-v -29.528456 7.846082 -2.684396
-v -26.835133 7.850657 -2.684396
-s off
-f 17 18 20 19
-o PFD_Plan.004
-v -42.908882 -9.217942 -2.684396
-v -26.146378 -9.217942 -2.684396
-v -42.908882 14.918060 -2.684396
-v -26.146378 14.918060 -2.684396
-s off
-f 21 22 24 23
-o ND_Plan.107
-v -22.813946 -9.217942 -2.684396
-v -6.051440 -9.217942 -2.684396
-v -22.813946 14.918060 -2.684396
-v -6.051440 14.918060 -2.684396
-s off
-f 25 26 28 27
-o FCU_Plan.108
-v -6.507059 16.577757 26.295910
-v 50.183128 16.577757 26.295910
-v -6.507059 23.751425 26.295910
-v 50.183128 23.751425 26.295910
-s off
-f 29 30 32 31
-o Exterior_Plan.006
-v -46.568127 34.893536 7.561725
-v 12.047465 39.802032 9.644265
-v -46.951084 38.173790 10.614324
-v 11.661365 43.150181 12.620070
-v 13.887004 62.445206 40.607811
-v -35.566383 52.329830 33.684719
-s off
-f 33 34 36 37 38 35
diff --git a/src/examples/tobii_argaze/scene.blend b/src/examples/tobii_argaze/scene.blend
deleted file mode 100644
index e7e5dda..0000000
--- a/src/examples/tobii_argaze/scene.blend
+++ /dev/null
Binary files differ
diff --git a/src/examples/tobii_argaze/tobii_argaze.py b/src/examples/tobii_argaze/tobii_argaze.py
deleted file mode 100644
index 8193a03..0000000
--- a/src/examples/tobii_argaze/tobii_argaze.py
+++ /dev/null
@@ -1,180 +0,0 @@
-#!/usr/bin/env python
-
-"""
-tobii_argaze.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-"""
-import os
-
-from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera
-from argaze.RegionOfInterest import *
-from argaze.TobiiGlassesPro2 import *
-
-import cv2 as cv
-import pandas
-import matplotlib.pyplot as mpyplot
-import matplotlib.patches as mpatches
-
-# tobii glasses ip address
-ip_address = '192.168.1.10'
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export\' folder created')
-
-# create tobii controller
-tobii_controller = TobiiController.TobiiController(ip_address, 'ArGaze', 1)
-
-# create tobii data thread
-tobii_data_thread = TobiiData.TobiiDataThread(tobii_controller)
-tobii_data_thread.start()
-
-# create tobii video thread
-tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
-tobii_video_thread.start()
-
-# create aruco camera
-aruco_camera = ArUcoCamera.ArUcoCamera()
-aruco_camera.load_calibration_file('tobii_camera.json')
-
-# create aruco tracker
-aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_4X4_50', 6, aruco_camera) # aruco dictionaries, marker length (cm), camera
-
-# create ROIs 3D scene
-roi3D_scene = ROI3DScene.ROI3DScene()
-roi3D_scene.load('roi3D_scene.obj')
-
-# start tobii glasses streaming
-tobii_controller.start_streaming()
-
-# process video frames
-last_frame_time = 0
-roi2D_buffer = []
-marker_buffer = []
-
-while True:
-
- frame, frame_width, frame_height, frame_time, pts = tobii_video_thread.read()
-
- # draw tobii gaze
- # TODO : sync gaze data according frame pts
- gp_data = tobii_data_thread.read_gaze_data(pts)
- if 'TIMESTAMP' in gp_data:
- pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height))
- cv.circle(frame, pointer, 4, (0, 255, 255), -1)
- else:
- pointer = (0, 0)
-
- # track markers with pose estimation and draw them
- aruco_tracker.track(frame)
- aruco_tracker.draw(frame)
-
- # project 3D scenes related to each aruco markers
- if aruco_tracker.get_markers_number():
-
- for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
-
- # TODO : select different 3D scenes depending on aruco id
-
- marker_rotation = aruco_tracker.get_marker_rotation(i)
- marker_translation = aruco_tracker.get_marker_translation(i)
-
- roi3D_scene.set_rotation(marker_rotation)
- roi3D_scene.set_translation(marker_translation)
-
- # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
- # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable.
- roi2D_scene = roi3D_scene.project(frame, aruco_camera, False)
-
- # check if gaze is inside 2D rois
- roi2D_scene.inside(pointer)
-
- # draw 2D rois
- roi2D_scene.draw(frame)
-
- # store roi2D into buffer
- for roi2D in roi2D_scene:
- roi2D['TIME'] = frame_time
- del roi2D['VERTICES']
- roi2D_buffer.append(roi2D)
-
- # store marker into buffer
- marker = {
- 'TIME': frame_time,
- 'ID': i,
- 'X': marker_translation[0][0],
- 'Y': marker_translation[0][1],
- 'Z': marker_translation[0][2]
- }
- marker_buffer.append(marker)
-
- cv.imshow(f'Live Scene', frame)
-
- # quit on 'Esc' command
- key = cv.waitKey(1)
- if key == 27:
- cv.destroyAllWindows()
- last_frame_time = frame_time
- break
-
-# stop tobii objects
-tobii_video_thread.stop()
-tobii_data_thread.stop()
-
-tobii_controller.stop_streaming()
-tobii_controller.close()
-
-# create a pandas DataFrame for each buffer
-ac_dataframe = pandas.DataFrame(tobii_data_thread.read_accelerometer_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
-gy_dataframe = pandas.DataFrame(tobii_data_thread.read_gyroscope_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
-gp_dataframe = pandas.DataFrame(tobii_data_thread.read_gaze_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y'])
-data_pts_dataframe = pandas.DataFrame(tobii_data_thread.read_pts_buffer(), columns=['TIMESTAMP', 'TIME', 'PTS'])
-video_pts_dataframe = pandas.DataFrame(tobii_video_thread.read_pts_buffer(), columns=['TIME', 'PTS'])
-roi2D_dataframe = pandas.DataFrame(roi2D_buffer, columns=['TIME', 'NAME', 'POINTER_INSIDE'])
-marker_dataframe = pandas.DataFrame(marker_buffer, columns=['TIME', 'ID', 'X', 'Y', 'Z'])
-
-# export all data frames
-ac_dataframe.to_csv(f'{export_folder}/accelerometer.csv', index=False)
-gy_dataframe.to_csv(f'{export_folder}/gyroscope.csv', index=False)
-gp_dataframe.to_csv(f'{export_folder}/gaze.csv', index=False)
-data_pts_dataframe.to_csv(f'{export_folder}/data_pts.csv', index=False)
-video_pts_dataframe.to_csv(f'{export_folder}/video_pts.csv', index=False)
-roi2D_dataframe.to_csv(f'{export_folder}/rois.csv', index=False)
-marker_dataframe.to_csv(f'{export_folder}/markers.csv', index=False)
-
-# edit figure
-figure = mpyplot.figure(figsize=(int(last_frame_time), 5))
-
-# plot gaze data
-subplot = figure.add_subplot(211)
-subplot.set_title('Gaze')
-
-subplot = gp_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (normalized)', legend=False)
-subplot = gp_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (normalized)', legend=False)
-
-x_patch = mpatches.Patch(color='#276FB6', label='X')
-y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
-subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
-
-
-# plot maker position data
-subplot = figure.add_subplot(212)
-subplot.set_title('Marker')
-
-subplot = marker_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (cm)', legend=False)
-subplot = marker_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (cm)', legend=False)
-
-x_patch = mpatches.Patch(color='#276FB6', label='X')
-y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
-subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
-
-# export figure
-mpyplot.tight_layout()
-mpyplot.savefig(f'{export_folder}/visualisation.svg')
-mpyplot.close('all') \ No newline at end of file
diff --git a/src/examples/tobii_argaze/tobii_camera.json b/src/examples/tobii_argaze/tobii_camera.json
deleted file mode 100644
index b7b5108..0000000
--- a/src/examples/tobii_argaze/tobii_camera.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
- "rms": 0.2778430441943373,
- "camera matrix": [
- [
- 567.7948916261545,
- 0.0,
- 477.23038710185534
- ],
- [
- 0.0,
- 566.2897424860757,
- 288.75352250724296
- ],
- [
- 0.0,
- 0.0,
- 1.0
- ]
- ],
- "distortion coefficients": [
- [
- 0.07351688052834335,
- -0.18678684802766135,
- 0.001473915039947321,
- 0.0008389464646594935,
- 0.13193649892597786
- ]
- ]
-} \ No newline at end of file
diff --git a/src/examples/tobii_camera_calibration.py b/src/examples/tobii_camera_calibration.py
deleted file mode 100644
index 0e621b5..0000000
--- a/src/examples/tobii_camera_calibration.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python
-
-"""
-tobii_camera_calibration.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-This program:
- - Captures board pictures with a full displayed board inside
- - Outputs camera calibration data into a camera.json file
-
-Reference:
- - https://automaticaddison.com/how-to-perform-pose-estimation-using-an-aruco-marker/
-"""
-import os
-import time
-
-from TobiiGlassesPro2 import TobiiController, TobiiVideo
-from ArUcoMarkers import ArUcoBoard, ArUcoTracker, ArUcoCamera
-
-import cv2 as cv
-
-# tobii glasses ip address
-ip_address = '192.168.1.10'
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export\' folder created')
-
-# create tobii controller
-tobii_controller = TobiiController.TobiiController(ip_address, 'ArGaze', 1)
-
-# create tobii video thread
-tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
-tobii_video_thread.start()
-
-# create aruco camera
-aruco_camera = ArUcoCamera.ArUcoCamera()
-
-# create aruco board
-aruco_board = ArUcoBoard.ArUcoBoard('DICT_4X4_50', 7, 5, 5, 3) # 7 columns, 5 rows, square size (cm), marker size (cm)
-
-# create aruco tracker
-aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_4X4_50', 6, aruco_camera) # aruco dictionaries, marker length (cm), camera
-
-# start tobii glasses streaming
-tobii_controller.start_streaming()
-
-print("Camera calibration starts")
-print("Waiting for calibration board...")
-
-frame_width = 0
-frame_height = 0
-
-expected_markers_number = len(aruco_board.get_ids())
-expected_corners_number = (aruco_board.get_size()[0] - 1 ) * (aruco_board.get_size()[1] - 1)
-
-# capture frame with a full displayed board
-while True:
-
- frame, frame_width, frame_height, frame_time, frame_pts = tobii_video_thread.read()
-
- # track all markers in the board
- aruco_tracker.track_board(frame, aruco_board, expected_markers_number)
-
- # draw only markers
- aruco_tracker.draw(frame)
-
- # draw current calibration data count
- cv.putText(frame, f'Capture: {aruco_camera.get_calibration_data_count()}', (50, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
- cv.imshow('Tobii Camera Calibration', frame)
-
- # if all board corners are detected
- if aruco_tracker.get_board_corners_number() == expected_corners_number:
-
- # draw board corners to notify a capture is done
- aruco_tracker.draw_board(frame)
-
- # append data
- aruco_camera.store_calibration_data(aruco_tracker.get_board_corners(), aruco_tracker.get_board_corners_ids())
-
- cv.imshow(f'Tobii Camera Calibration', frame)
-
- time.sleep(2)
-
- # quit on 'Esc' command
- key = cv.waitKey(1)
- if key == 27:
- cv.destroyAllWindows()
- break
-
-# stop tobii objects
-tobii_video_thread.stop()
-
-tobii_controller.stop_streaming()
-tobii_controller.close()
-
-print('\nCalibrating camera...')
-aruco_camera.calibrate(aruco_board, frame_width, frame_height)
-
-print('\nCalibration succeeded!')
-print(f'\nRMS:\n{aruco_camera.get_rms()}')
-print(f'\nCamera matrix:\n{aruco_camera.get_K()}')
-print(f'\nDistortion coefficients:\n{aruco_camera.get_D()}')
-
-aruco_camera.save_calibration_file(os.join(export_folder,'tobii_camera.json'))
-
-print(f'\nCalibration data exported into tobii_camera.json file')
-