aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoBoard.py79
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py72
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoMarkers.py71
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoTracker.py157
-rw-r--r--src/argaze/ArUcoMarkers/README.md13
-rw-r--r--src/argaze/ArUcoMarkers/__init__.py1
-rw-r--r--src/argaze/ArUcoMarkers/utils/A3_board_35cmx25cm_markers_4X4_3cm.pdfbin0 -> 127329 bytes
-rw-r--r--src/argaze/ArUcoMarkers/utils/A4_markers_4x4_3cm.pdfbin0 -> 30225 bytes
-rw-r--r--src/argaze/ArUcoMarkers/utils/_board_A3.afdesignbin0 -> 512504 bytes
-rw-r--r--src/argaze/ArUcoMarkers/utils/_markers.afdesignbin0 -> 533859 bytes
-rw-r--r--src/argaze/RegionOfInterest/README.md11
-rw-r--r--src/argaze/RegionOfInterest/ROI2DScene.py40
-rw-r--r--src/argaze/RegionOfInterest/ROI3DScene.py139
-rw-r--r--src/argaze/RegionOfInterest/__init__.py1
-rw-r--r--src/argaze/TobiiGlassesPro2/README.md112
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiController.py25
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiData.py271
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiVideo.py95
-rw-r--r--src/argaze/TobiiGlassesPro2/__init__.py1
-rw-r--r--src/argaze/TobiiGlassesPro2/utils/_calibration_target.afdesignbin0 -> 11982 bytes
-rw-r--r--src/argaze/TobiiGlassesPro2/utils/calibration_target_A4.pdfbin0 -> 1965 bytes
-rw-r--r--src/argaze/__init__.py0
-rw-r--r--src/examples/README.md35
-rw-r--r--src/examples/export_aruco_markers.py24
-rw-r--r--src/examples/export_calibration_board.py24
-rw-r--r--src/examples/tobii_argaze/roi3D_scene.obj67
-rw-r--r--src/examples/tobii_argaze/scene.blendbin0 -> 1929540 bytes
-rw-r--r--src/examples/tobii_argaze/tobii_argaze.py180
-rw-r--r--src/examples/tobii_argaze/tobii_camera.json29
-rw-r--r--src/examples/tobii_camera_calibration.py113
30 files changed, 1560 insertions, 0 deletions
diff --git a/src/argaze/ArUcoMarkers/ArUcoBoard.py b/src/argaze/ArUcoMarkers/ArUcoBoard.py
new file mode 100644
index 0000000..fa76303
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/ArUcoBoard.py
@@ -0,0 +1,79 @@
+import numpy
+import cv2 as cv
+import cv2.aruco as aruco
+
+# Built-in ArUco dictionaries from OpenCV library
+ARUCO_DICT = {
+ 'DICT_4X4_50': aruco.DICT_4X4_50,
+ 'DICT_4X4_100': aruco.DICT_4X4_100,
+ 'DICT_4X4_250': aruco.DICT_4X4_250,
+ 'DICT_4X4_1000': aruco.DICT_4X4_1000,
+ 'DICT_5X5_50': aruco.DICT_5X5_50,
+ 'DICT_5X5_100': aruco.DICT_5X5_100,
+ 'DICT_5X5_250': aruco.DICT_5X5_250,
+ 'DICT_5X5_1000': aruco.DICT_5X5_1000,
+ 'DICT_6X6_50': aruco.DICT_6X6_50,
+ 'DICT_6X6_100': aruco.DICT_6X6_100,
+ 'DICT_6X6_250': aruco.DICT_6X6_250,
+ 'DICT_6X6_1000': aruco.DICT_6X6_1000,
+ 'DICT_7X7_50': aruco.DICT_7X7_50,
+ 'DICT_7X7_100': aruco.DICT_7X7_100,
+ 'DICT_7X7_250': aruco.DICT_7X7_250,
+ 'DICT_7X7_1000': aruco.DICT_7X7_1000,
+ 'DICT_ARUCO_ORIGINAL': aruco.DICT_ARUCO_ORIGINAL
+}
+
+class ArUcoBoard():
+
+ # initialisation
+ def __init__(self, aruco_dictionary_name, columns, rows, square_size, marker_size):
+
+ # check aruco dictionary name
+ if ARUCO_DICT.get(aruco_dictionary_name, None) is None:
+ raise NameError(f'Bad ArUco dictionnary name: {aruco_dictionary_name}')
+
+ dict_name_split = aruco_dictionary_name.split('_')
+
+ self.__aruco_dict_format = dict_name_split[1]
+ self.__aruco_dict_number = int(dict_name_split[2])
+
+ # load ArUco dictionary
+ self.__aruco_dict = aruco.Dictionary_get(ARUCO_DICT[aruco_dictionary_name])
+
+ # store property
+ self.__columns = columns
+ self.__rows = rows
+ self.__square_size = square_size # in cm
+ self.__marker_size = marker_size # in cm
+
+ # create board model
+ self.__board = aruco.CharucoBoard_create(self.__columns, self.__rows, self.__square_size/100., self.__marker_size/100., self.__aruco_dict)
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # access to the board model
+ def get_model(self):
+
+ return self.__board
+
+ # access to the board markers ids
+ def get_ids(self):
+
+ return self.__board.ids
+
+ # access to the number of columns and rows
+ def get_size(self):
+
+ return self.__board.getChessboardSize()
+
+ # save a picture of the calibration board
+ def export(self, destination_folder, dpi):
+
+ output_filename = f'board_{self.__columns*self.__square_size}cmx{self.__rows*self.__square_size}cm_markers_{self.__aruco_dict_format}_{self.__marker_size}cm.png'
+
+ dimension = [int(e * self.__board.getSquareLength() * 254 * dpi) for e in self.__board.getChessboardSize()] # 1 meter = 254 inches
+
+ cv.imwrite(f'{destination_folder}/{output_filename}', self.__board.draw(dimension))
+
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
new file mode 100644
index 0000000..163391f
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -0,0 +1,72 @@
+import json
+import numpy
+import cv2.aruco as aruco
+
+class ArUcoCamera():
+
+ # initialisation
+ def __init__(self):
+
+ self.__rms = 0 # root mean square error
+ self.__K = [] # camera matrix (focal lengths and principal point)
+ self.__D = [] # distortion coefficients
+
+ # define calibration data
+ self.__corners_set_number = 0
+ self.__corners_set = []
+ self.__corners_set_ids = []
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # load camera calibration data
+ def load_calibration_file(self, camera_calibration_filepath):
+
+ with open(camera_calibration_filepath) as calibration_file:
+
+ calibration_data = json.load(calibration_file)
+
+ self.__rms = calibration_data['rms']
+ self.__K = numpy.asarray(calibration_data['camera matrix'])
+ self.__D = numpy.asarray(calibration_data['distortion coefficients'])
+
+ def save_calibration_file(self, camera_calibration_filepath):
+
+ calibration_data = {'rms': self.__rms, 'camera matrix': self.__K.tolist(), 'distortion coefficients': self.__D.tolist()}
+
+ with open(camera_calibration_filepath, 'w', encoding='utf-8') as calibration_file:
+
+ json.dump(calibration_data, calibration_file, ensure_ascii=False, indent=4)
+
+ def get_rms(self):
+ return self.__rms
+
+ def get_K(self):
+ return self.__K
+
+ def get_D(self):
+ return self.__D
+
+ def calibrate(self, board, frame_width, frame_height):
+
+ if self.__corners_set_number > 0:
+
+ self.__rms, self.__K, self.__D, r, t = aruco.calibrateCameraCharuco(self.__corners_set, self.__corners_set_ids, board.get_model(), [frame_width, frame_height], None, None)
+
+ def reset_calibration_data(self, corners, corners_ids):
+
+ self.__corners_set_number = 0
+ self.__corners_set = []
+ self.__corners_set_ids = []
+
+ def store_calibration_data(self, corners, corners_ids):
+
+ self.__corners_set_number += 1
+ self.__corners_set.append(corners)
+ self.__corners_set_ids.append(corners_ids)
+
+ def get_calibration_data_count(self):
+
+ return self.__corners_set_number
+
diff --git a/src/argaze/ArUcoMarkers/ArUcoMarkers.py b/src/argaze/ArUcoMarkers/ArUcoMarkers.py
new file mode 100644
index 0000000..1499218
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/ArUcoMarkers.py
@@ -0,0 +1,71 @@
+import numpy
+import cv2 as cv
+import cv2.aruco as aruco
+
+# Built-in ArUco dictionaries from OpenCV library
+ARUCO_DICT = {
+ 'DICT_4X4_50': aruco.DICT_4X4_50,
+ 'DICT_4X4_100': aruco.DICT_4X4_100,
+ 'DICT_4X4_250': aruco.DICT_4X4_250,
+ 'DICT_4X4_1000': aruco.DICT_4X4_1000,
+ 'DICT_5X5_50': aruco.DICT_5X5_50,
+ 'DICT_5X5_100': aruco.DICT_5X5_100,
+ 'DICT_5X5_250': aruco.DICT_5X5_250,
+ 'DICT_5X5_1000': aruco.DICT_5X5_1000,
+ 'DICT_6X6_50': aruco.DICT_6X6_50,
+ 'DICT_6X6_100': aruco.DICT_6X6_100,
+ 'DICT_6X6_250': aruco.DICT_6X6_250,
+ 'DICT_6X6_1000': aruco.DICT_6X6_1000,
+ 'DICT_7X7_50': aruco.DICT_7X7_50,
+ 'DICT_7X7_100': aruco.DICT_7X7_100,
+ 'DICT_7X7_250': aruco.DICT_7X7_250,
+ 'DICT_7X7_1000': aruco.DICT_7X7_1000,
+ 'DICT_ARUCO_ORIGINAL': aruco.DICT_ARUCO_ORIGINAL
+}
+
+class ArUcoMarkers():
+
+ # initialisation
+ def __init__(self, aruco_dictionary_name):
+
+ # check aruco dictionary name
+ if ARUCO_DICT.get(aruco_dictionary_name, None) is None:
+ raise NameError(f'Bad ArUco dictionnary name: {aruco_dictionary_name}')
+
+ dict_name_split = aruco_dictionary_name.split('_')
+
+ self.__aruco_dict_format = dict_name_split[1]
+ self.__aruco_dict_number = int(dict_name_split[2])
+
+ # load ArUco dictionary
+ self.__aruco_dict = aruco.Dictionary_get(ARUCO_DICT[aruco_dictionary_name])
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # save one marker
+ def export(self, destination_folder, dpi, i):
+
+ if i >= 0 and i < self.__aruco_dict_number:
+
+ output_filename = f'marker_{self.__aruco_dict_format}_{i}.png'
+
+ # create marker
+ marker = numpy.zeros((dpi, dpi, 1), dtype="uint8")
+ aruco.drawMarker(self.__aruco_dict, i, dpi, marker, 1)
+
+ # save marker into destination folder
+ cv.imwrite(f'{destination_folder}/{output_filename}', marker)
+
+ else:
+ raise ValueError(f'Bad ArUco index: {i}')
+
+ # save all markers
+ def export_all(self, destination_folder, dpi):
+
+ for i in range(self.__aruco_dict_number):
+
+ self.export(destination_folder, dpi, i)
+
+
diff --git a/src/argaze/ArUcoMarkers/ArUcoTracker.py b/src/argaze/ArUcoMarkers/ArUcoTracker.py
new file mode 100644
index 0000000..1b05e4a
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/ArUcoTracker.py
@@ -0,0 +1,157 @@
+import numpy
+import cv2 as cv
+import cv2.aruco as aruco
+
+# Built-in ArUco dictionaries from OpenCV library
+ARUCO_DICT = {
+ 'DICT_4X4_50': aruco.DICT_4X4_50,
+ 'DICT_4X4_100': aruco.DICT_4X4_100,
+ 'DICT_4X4_250': aruco.DICT_4X4_250,
+ 'DICT_4X4_1000': aruco.DICT_4X4_1000,
+ 'DICT_5X5_50': aruco.DICT_5X5_50,
+ 'DICT_5X5_100': aruco.DICT_5X5_100,
+ 'DICT_5X5_250': aruco.DICT_5X5_250,
+ 'DICT_5X5_1000': aruco.DICT_5X5_1000,
+ 'DICT_6X6_50': aruco.DICT_6X6_50,
+ 'DICT_6X6_100': aruco.DICT_6X6_100,
+ 'DICT_6X6_250': aruco.DICT_6X6_250,
+ 'DICT_6X6_1000': aruco.DICT_6X6_1000,
+ 'DICT_7X7_50': aruco.DICT_7X7_50,
+ 'DICT_7X7_100': aruco.DICT_7X7_100,
+ 'DICT_7X7_250': aruco.DICT_7X7_250,
+ 'DICT_7X7_1000': aruco.DICT_7X7_1000,
+ 'DICT_ARUCO_ORIGINAL': aruco.DICT_ARUCO_ORIGINAL
+}
+
+class ArUcoTracker():
+
+ # initialisation
+ def __init__(self, aruco_dictionary_name, marker_length, camera):
+
+ # check aruco dictionary name
+ if ARUCO_DICT.get(aruco_dictionary_name, None) is None:
+ raise NameError(f'Bad ArUco dictionnary name: {aruco_dictionary_name}')
+
+ # load ArUco dictionary
+ self.__aruco_dict = aruco.Dictionary_get(ARUCO_DICT[aruco_dictionary_name])
+
+ # define marker length in centimeter
+ self.__marker_length = marker_length
+
+ # define camera
+ self.__camera = camera
+
+ # setup ArUco detection parameters
+ self.__aruco_param = aruco.DetectorParameters_create()
+ self.__aruco_param.cornerRefinementMethod = aruco.CORNER_REFINE_CONTOUR
+
+ # define tracked markers data
+ self.__markers_corners = []
+ self.__markers_ids = []
+ self.__rvecs = []
+ self.__tvecs = []
+ self.__points = []
+
+ # define tracked board data
+ self.__board = None
+ self.__board_corners_number = 0
+ self.__board_corners = []
+ self.__board_corners_ids = []
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # track ArUco markers in frame
+ def track(self, frame, estimate_pose = True):
+
+ # DON'T MIRROR FRAME : it makes the markers detection to fail
+
+ # detect markers from gray picture
+ gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
+ self.__markers_corners, self.__markers_ids, rejectedPoints = aruco.detectMarkers(gray, self.__aruco_dict, parameters = self.__aruco_param)
+
+ if len(self.__markers_corners) > 0 and estimate_pose:
+
+ # markers pose estimation
+ self.__rvecs, self.__tvecs, self.__points = aruco.estimatePoseSingleMarkers(self.__markers_corners, self.__marker_length, self.__camera.get_K(), self.__camera.get_D())
+
+ else:
+
+ self.__rvecs = []
+ self.__tvecs = []
+ self.__points = []
+
+ # track ArUco markers board in frame setting up the number of detected markers needed to agree detection
+ def track_board(self, frame, board, expected_markers_number):
+
+ # DON'T MIRROR FRAME : it makes the markers detection to fail
+
+ # detect markers from gray picture
+ gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
+ self.__markers_corners, self.__markers_ids, rejectedPoints = aruco.detectMarkers(gray, self.__aruco_dict, parameters = self.__aruco_param)
+
+ # if all board markers are detected
+ if self.get_markers_number() == expected_markers_number:
+
+ self.__board = board
+ self.__board_corners_number, self.__board_corners, self.__board_corners_ids = aruco.interpolateCornersCharuco(self.__markers_corners, self.__markers_ids, gray, self.__board.get_model())
+
+ else:
+
+ self.__board = None
+ self.__board_corners_number = 0
+ self.__board_corners = []
+ self.__board_corners_ids = []
+
+ # draw tracked markers in frame
+ def draw(self, frame):
+
+ # draw detected markers square
+ if len(self.__markers_corners) > 0:
+
+ aruco.drawDetectedMarkers(frame, self.__markers_corners, self.__markers_ids)
+
+ # draw marker axis if pose has been estimated
+ if len(self.__rvecs) > 0:
+
+ for (i, marker_id) in enumerate(self.__markers_ids):
+
+ aruco.drawAxis(frame, self.__camera.get_K(), self.__camera.get_D(), self.__rvecs[i], self.__tvecs[i], self.__marker_length)
+
+ # draw tracked board corners in frame
+ def draw_board(self, frame):
+
+ if self.__board != None:
+
+ cv.drawChessboardCorners(frame, ((self.__board.get_size()[0] - 1 ), (self.__board.get_size()[1] - 1)), self.__board_corners, True)
+
+ # access to tracked markers data
+ def get_markers_number(self):
+ return len(self.__markers_corners)
+
+ def get_markers_ids(self):
+ return self.__markers_ids
+
+ def get_marker_corners(self, i):
+ return self.__markers_corners[i]
+
+ def get_marker_rotation(self, i):
+ return self.__rvecs[i]
+
+ def get_marker_translation(self, i):
+ return self.__tvecs[i]
+
+ def get_marker_points(self, i):
+ return self.__points[i]
+
+ # access to tracked board data
+ def get_board_corners_number(self):
+ return self.__board_corners_number
+
+ def get_board_corners_ids(self):
+ return self.__board_corners_ids
+
+ def get_board_corners(self):
+ return self.__board_corners
+
diff --git a/src/argaze/ArUcoMarkers/README.md b/src/argaze/ArUcoMarkers/README.md
new file mode 100644
index 0000000..f79be36
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/README.md
@@ -0,0 +1,13 @@
+# ArUcoMarkers
+
+_Class interface to work with OpenCV ArUco markers._
+https://docs.opencv.org/4.x/d5/dae/tutorial_aruco_detection.html
+
+## ArUcoBoard.py
+
+## ArUcoCamera.py
+
+## ArUcoMarkers.py
+
+## ArUcoTracker.py
+
diff --git a/src/argaze/ArUcoMarkers/__init__.py b/src/argaze/ArUcoMarkers/__init__.py
new file mode 100644
index 0000000..188e407
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ArUcoBoard', 'ArUcoCamera', 'ArUcoMarkers', 'ArUcoTracker'] \ No newline at end of file
diff --git a/src/argaze/ArUcoMarkers/utils/A3_board_35cmx25cm_markers_4X4_3cm.pdf b/src/argaze/ArUcoMarkers/utils/A3_board_35cmx25cm_markers_4X4_3cm.pdf
new file mode 100644
index 0000000..7725730
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/utils/A3_board_35cmx25cm_markers_4X4_3cm.pdf
Binary files differ
diff --git a/src/argaze/ArUcoMarkers/utils/A4_markers_4x4_3cm.pdf b/src/argaze/ArUcoMarkers/utils/A4_markers_4x4_3cm.pdf
new file mode 100644
index 0000000..412684f
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/utils/A4_markers_4x4_3cm.pdf
Binary files differ
diff --git a/src/argaze/ArUcoMarkers/utils/_board_A3.afdesign b/src/argaze/ArUcoMarkers/utils/_board_A3.afdesign
new file mode 100644
index 0000000..57c6588
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/utils/_board_A3.afdesign
Binary files differ
diff --git a/src/argaze/ArUcoMarkers/utils/_markers.afdesign b/src/argaze/ArUcoMarkers/utils/_markers.afdesign
new file mode 100644
index 0000000..8443b9a
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/utils/_markers.afdesign
Binary files differ
diff --git a/src/argaze/RegionOfInterest/README.md b/src/argaze/RegionOfInterest/README.md
new file mode 100644
index 0000000..faeb714
--- /dev/null
+++ b/src/argaze/RegionOfInterest/README.md
@@ -0,0 +1,11 @@
+# RegionOfInterest
+
+_Class interface to manage ROIs_
+https://en.wikipedia.org/wiki/Region_of_interest#:~:text=A%20region%20of%20interest%20(often,purpose%20of%20measuring%20its%20size.
+
+## ROI2DScene.py
+
+## ROI3DScene.py
+
+
+
diff --git a/src/argaze/RegionOfInterest/ROI2DScene.py b/src/argaze/RegionOfInterest/ROI2DScene.py
new file mode 100644
index 0000000..6e0faf4
--- /dev/null
+++ b/src/argaze/RegionOfInterest/ROI2DScene.py
@@ -0,0 +1,40 @@
+import cv2 as cv
+import matplotlib.path as mpath
+
+class ROI2DScene(list):
+
+ # subclass list
+ def __new__(cls):
+ return super(ROI2DScene, cls).__new__(cls)
+
+ # initialisation
+ def __init__(self):
+ pass
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # check if a (x, y) pointer is inside rois
+ def inside(self, pointer):
+
+ for roi in self:
+
+ roi['POINTER_INSIDE'] = mpath.Path(roi['VERTICES']).contains_points([pointer])[0]
+
+ # draw projected polygons
+ def draw(self, frame):
+
+ for roi in self:
+
+ vertices = roi['VERTICES']
+ inside = roi['POINTER_INSIDE']
+
+ color = (0, 255, 0) if inside else (0, 0, 255)
+
+ if inside:
+ cv.putText(frame, roi['NAME'], (vertices[3][0], vertices[3][1]), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+
+ cv.line(frame, vertices[-1], vertices[0], color, 1)
+ for A, B in zip(vertices, vertices[1:]):
+ cv.line(frame, A, B, color, 1)
diff --git a/src/argaze/RegionOfInterest/ROI3DScene.py b/src/argaze/RegionOfInterest/ROI3DScene.py
new file mode 100644
index 0000000..d292d2a
--- /dev/null
+++ b/src/argaze/RegionOfInterest/ROI3DScene.py
@@ -0,0 +1,139 @@
+import math
+import re
+
+from argaze.RegionOfInterest import ROI2DScene
+
+import numpy
+import cv2 as cv
+import matplotlib.path as mpath
+
+class ROI3DScene(list):
+
+ # subclass list
+ def __new__(cls):
+ return super(ROI3DScene, cls).__new__(cls)
+
+ # initialisation
+ def __init__(self):
+
+ # define rotation and translation matrix
+ self.__rotation = [0, 0, 0]
+ self.__translation = [0, 0, 0]
+
+ # define a zero distorsion matrix
+ self.__D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # load scen from .obj file
+ def load(self, obj_filepath):
+
+ # regex rules for .obj file parsing
+ OBJ_RX_DICT = {
+ 'comment': re.compile(r'#(.*)\n'),
+ 'name': re.compile(r'o (\w+)(.*)\n'),
+ 'vertice': re.compile(r'v ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+)\n'),
+ 'face': re.compile(r'f (.*)\n')
+ }
+
+ # regex .obj line parser
+ def __parse_obj_line(line):
+
+ for key, rx in OBJ_RX_DICT.items():
+ match = rx.search(line)
+ if match:
+ return key, match
+
+ # if there are no matches
+ return None, None
+
+ # start parsing
+ try:
+
+ roi3D = {}
+ vertices = []
+ faces = []
+
+ # open the file and read through it line by line
+ with open(obj_filepath, 'r') as file:
+
+ line = file.readline()
+
+ while line:
+
+ # at each line check for a match with a regex
+ key, match = __parse_obj_line(line)
+
+ # extract comment
+ if key == 'comment':
+ pass
+
+ # extract roi3D name
+ elif key == 'name':
+
+ roi3D['NAME'] = str(match.group(1))
+
+ # fill vertices array
+ elif key == 'vertice':
+
+ vertices.append(tuple([float(match.group(1)), float(match.group(2)), float(match.group(3))]))
+
+ # extract roi3D vertice id
+ elif key == 'face':
+
+ roi3D['FACE'] = [int(i) for i in match.group(1).split()]
+
+ # store roi3D dict into scene array
+ self.append(roi3D)
+
+ # clear roi3D dict
+ roi3D = {}
+
+ # go to next line
+ line = file.readline()
+
+ file.close()
+
+ # retreive all roi3D vertices
+ for roi3D in self:
+ roi3D['VERTICES'] = [ vertices[i-1] for i in roi3D['FACE'] ]
+ roi3D.pop('FACE', None)
+
+ # print scene
+ for roi3D in self:
+ name = roi3D['NAME']
+ vertices = roi3D['VERTICES']
+
+ except IOError:
+ raise IOError(f'File not found: {obj_filepath}')
+
+ def set_rotation(self, rvec):
+
+ self.__rotation = rvec
+
+ def set_translation(self, tvec):
+
+ self.__translation = tvec
+
+ # project 3D scene onto 2D scene through a camera
+ def project(self, frame, camera, apply_distorsion = True):
+
+ roi2D_scene = ROI2DScene.ROI2DScene()
+
+ for roi3D in self:
+
+ vertices_3D = numpy.array(roi3D['VERTICES']).astype('float32')
+
+ vertices_2D, J = cv.projectPoints(vertices_3D, self.__rotation, self.__translation, camera.get_K(), camera.get_D() if apply_distorsion else self.__D0)
+ vertices_2D = vertices_2D.astype('int').reshape((len(vertices_2D), 2))
+
+ roi2D = {
+ 'NAME': roi3D['NAME'],
+ 'VERTICES': vertices_2D
+ }
+
+ roi2D_scene.append(roi2D)
+
+ return roi2D_scene
diff --git a/src/argaze/RegionOfInterest/__init__.py b/src/argaze/RegionOfInterest/__init__.py
new file mode 100644
index 0000000..f81f73f
--- /dev/null
+++ b/src/argaze/RegionOfInterest/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ROI2DScene', 'ROI3DScene'] \ No newline at end of file
diff --git a/src/argaze/TobiiGlassesPro2/README.md b/src/argaze/TobiiGlassesPro2/README.md
new file mode 100644
index 0000000..3352274
--- /dev/null
+++ b/src/argaze/TobiiGlassesPro2/README.md
@@ -0,0 +1,112 @@
+# TobiiGlassesPro2
+
+_This module provides a class interface to handle TobbiGlassesPro2 data and video stream.
+It is based on TobiiGlassesPyController package (https://github.com/ddetommaso/TobiiGlassesPyController)._
+
+## To connect Tobii glasses on Mac OS
+
+* Install DHCP server: brew install isc-dhcp
+
+* Setup DHCP server:
+ * /usr/local/etc/dhcpd.conf
+ * /usr/local/etc/master.conf
+
+* Setup USB LAN Interface:
+ * ip: 192.168.1.1
+ * subnet: 255.255.255.0
+ * router: 192.168.1.254
+
+* Launch Tobii glasses interface to enable USB LAN Interface before to Launch DHCP server.
+
+* Launch DHCP server: sudo /usr/local/sbin/dhcpd
+
+* Kill DHCP server: sudo kill `cat /usr/local/var/dhcpd/dhcpd.pid`
+
+* Watch DHCP server: sudo log stream --info --debug --predicate "process == 'dhcpd'"
+
+## Tobii data structure
+
+Each data frame have the following structure:
+```
+ {
+ 'mems': {
+ 'ac': {
+ 'ts': 1042711827,
+ 's': 0,
+ 'ac': [0.549, -9.868, 2.203]
+ },
+ 'gy': {
+ 'ts': 1042723807,
+ 's': 0,
+ 'gy': [2.94, -2.384, 1.428]
+ }
+ },
+ 'right_eye': {
+ 'pc': {
+ 'ts': 1042743553,
+ 's': 0, 'gidx': 39971,
+ 'pc': [-35.24, -25.51, -31.66],
+ 'eye': 'right'
+ },
+ 'pd': {
+ 'ts': 1042743553,
+ 's': 0,
+ 'gidx': 39971,
+ 'pd': 3.72,
+ 'eye': 'right'
+ },
+ 'gd': {
+ 'ts': 1042743553,
+ 's': 0,
+ 'gidx': 39971,
+ 'gd': [0.1833, 0.307, 0.9339],
+ 'eye': 'right'
+ }
+ },
+ 'left_eye': {
+ 'pc': {
+ 'ts': 1042743553,
+ 's': 0,
+ 'gidx': 39971,
+ 'pc': [29.96, -27.92, -40.9],
+ 'eye': 'left'
+ },
+ 'pd': {
+ 'ts': 1042743553,
+ 's': 0,
+ 'gidx': 39971,
+ 'pd': 4.42,
+ 'eye': 'left'
+ },
+ 'gd': {
+ 'ts': 1042743553,
+ 's': 0,
+ 'gidx': 39971,
+ 'gd': [0.1528, 0.2977, 0.9423],
+ 'eye': 'left'
+ }
+ },
+ 'gp': {
+ 'ts': 1042743553,
+ 's': 0,
+ 'gidx': 39971,
+ 'l': 82832,
+ 'gp': [0.3975, 0.2228]
+ },
+ 'gp3': {
+ 'ts': 1042743553,
+ 's': 0,
+ 'gidx': 39971,
+ 'gp3': [313.64, 542.2, 1728.85]
+ },
+ 'pts': {
+ 'ts': 1042123917,
+ 's': 0,
+ 'pts': 93695, # Presentation Time Stamp (PTS) value to synchronise with live video stream frame PTS
+ 'pv': 7
+ },
+ 'vts': {
+ 'ts': -1
+ }
+ }
+``` \ No newline at end of file
diff --git a/src/argaze/TobiiGlassesPro2/TobiiController.py b/src/argaze/TobiiGlassesPro2/TobiiController.py
new file mode 100644
index 0000000..aafa225
--- /dev/null
+++ b/src/argaze/TobiiGlassesPro2/TobiiController.py
@@ -0,0 +1,25 @@
+import tobiiglassesctrl
+
+class TobiiController(tobiiglassesctrl.TobiiGlassesController):
+
+ # initialisation
+ def __init__(self, ip_address, project_name, participant_id):
+
+ super().__init__(ip_address, video_scene = True)
+
+ # edit project and participant
+ project_id = super().create_project(project_name)
+ participant_id = super().create_participant(project_id, project_name)
+ '''
+ # start calibration
+ input("Position Tobbi glasses calibration target then presse 'Enter' to start calibration.")
+ calibration_id = super().create_calibration(project_id, participant_id)
+ super().start_calibration(calibration_id)
+
+ if not super().wait_until_calibration_is_done(calibration_id):
+ ogging.error('TobiiController.__init__() : Calibration has failed')
+ '''
+
+ # destruction
+ def __del__(self):
+ pass
diff --git a/src/argaze/TobiiGlassesPro2/TobiiData.py b/src/argaze/TobiiGlassesPro2/TobiiData.py
new file mode 100644
index 0000000..6bfc89e
--- /dev/null
+++ b/src/argaze/TobiiGlassesPro2/TobiiData.py
@@ -0,0 +1,271 @@
+import threading
+import time
+
+class TobiiDataThread(threading.Thread):
+
+ # initialisation
+ def __init__(self, controller):
+
+ threading.Thread.__init__(self)
+ self.stop_event = threading.Event()
+ self.read_lock = threading.Lock()
+
+ self.controller = controller
+
+ self.fps = self.controller.get_et_freq()
+ self.sleep = 1./self.fps
+
+ self.__ac_buffer = [] # accelerometer
+ self.__gy_buffer = [] # gyroscope
+ self.__gp_buffer = [] # gaze point
+ self.__pts_buffer = [] # presentation timestamp
+
+ self.__start_ts = 0
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # extract ac data
+ def __get_ac(self, data):
+
+ ac_value = data['mems']['ac']['ac']
+ ac_ts = data['mems']['ac']['ts']
+ ac_data = {
+ 'TIMESTAMP': ac_ts,
+ 'TIME': (ac_ts - self.__start_ts) / 1000000.,
+ 'X': ac_value[0],
+ 'Y': ac_value[1],
+ 'Z': ac_value[2]
+ }
+
+ return ac_data
+
+ # extract gy data
+ def __get_gy(self, data):
+
+ gy_value = data['mems']['gy']['gy']
+ gy_ts = data['mems']['gy']['ts']
+ gy_data = {
+ 'TIMESTAMP': gy_ts,
+ 'TIME': (gy_ts - self.__start_ts) / 1000000.,
+ 'X': gy_value[0],
+ 'Y': gy_value[1],
+ 'Z': gy_value[2]
+ }
+
+ return gy_data
+
+ # extract gp data
+ def __get_gp(self, data):
+
+ gp_value = data['gp']['gp']
+ gp_ts = data['gp']['ts']
+ gp_data = {
+ 'TIMESTAMP': gp_ts,
+ 'TIME': (gp_ts - self.__start_ts) / 1000000.,
+ 'X': gp_value[0],
+ 'Y': gp_value[1]
+ }
+
+ return gp_data
+
+ # extract pts data
+ def __get_pts(self, data):
+
+ pts_value = data['pts']['pts']
+ pts_ts = data['pts']['ts']
+ pts_data = {
+ 'TIMESTAMP': pts_ts,
+ 'TIME': (pts_ts - self.__start_ts) / 1000000.,
+ 'PTS': pts_value
+ }
+
+ return pts_data
+
+ # thread start
+ def run(self):
+
+ while not self.stop_event.isSet():
+
+ time.sleep(self.sleep)
+
+ self.read_lock.acquire()
+
+ data = self.controller.get_data()
+
+ # store only timestamped datas
+ if 'pts' in data:
+
+ pts_data = data['pts']
+
+ if 'pts' in pts_data:
+
+ ac_ts = data['mems']['ac']['ts']
+ gy_ts = data['mems']['gy']['ts']
+ gp_ts = data['gp']['ts']
+ pts_ts = pts_data['ts']
+
+ # get start timestamp
+ if self.__start_ts == 0:
+
+ # ignore -1 timestamp
+ valid_ts = []
+ for ts in [ac_ts, gy_ts, gp_ts, pts_ts]:
+ if ts > 0:
+ valid_ts.append(ts)
+
+ self.__start_ts = min(valid_ts)
+ #print(f'Tobii Data Frame: __start_ts = {self.__start_ts}')
+
+ #print(f'Tobii Data Frame: ac_ts = {ac_ts}, gy_ts = {gy_ts}, gp_ts = {gp_ts}, pts_ts = {pts_ts}')
+
+ # ignore -1 timestamp and filter repetitions
+
+ if ac_ts != -1:
+ if len(self.__ac_buffer) == 0:
+ self.__ac_buffer.append(self.__get_ac(data))
+ elif ac_ts != self.__ac_buffer[-1]['TIMESTAMP']:
+ self.__ac_buffer.append(self.__get_ac(data))
+
+ if gy_ts != -1:
+ if len(self.__gy_buffer) == 0:
+ self.__gy_buffer.append(self.__get_gy(data))
+ elif gy_ts != self.__gy_buffer[-1]['TIMESTAMP']:
+ self.__gy_buffer.append(self.__get_gy(data))
+
+ if gp_ts != -1:
+ if len(self.__gp_buffer) == 0:
+ self.__gp_buffer.append(self.__get_gp(data))
+ elif gp_ts != self.__gp_buffer[-1]['TIMESTAMP']:
+ self.__gp_buffer.append(self.__get_gp(data))
+
+ if pts_ts != -1:
+ if len(self.__pts_buffer) == 0:
+ self.__pts_buffer.append(self.__get_pts(data))
+ elif pts_ts != self.__pts_buffer[-1]['TIMESTAMP']:
+ self.__pts_buffer.append(self.__get_pts(data))
+
+ self.read_lock.release()
+
+ # read ac data
+ def read_accelerometer_data(self, timestamp):
+
+ if len(self.__ac_buffer):
+
+ self.read_lock.acquire()
+
+ # TODO : find closest timestamp data
+ ac_data = self.__ac_buffer[-1].copy()
+
+ self.read_lock.release()
+
+ return ac_data
+
+ else:
+
+ return {}
+
+ # read ac buffer
+ def read_accelerometer_buffer(self):
+
+ self.read_lock.acquire()
+
+ ac_buffer = self.__ac_buffer.copy()
+
+ self.read_lock.release()
+
+ return ac_buffer
+
+ # read gy data
+ def read_gyroscope_data(self, timestamp):
+
+ if len(self.__gy_buffer):
+
+ self.read_lock.acquire()
+
+ # TODO : find closest timestamp data
+ gy_data = self.__gy_buffer[-1].copy()
+
+ self.read_lock.release()
+
+ return gy_data
+
+ else:
+
+ return {}
+
+ # read gy buffer
+ def read_gyroscope_buffer(self):
+
+ self.read_lock.acquire()
+
+ gy_buffer = self.__gy_buffer.copy()
+
+ self.read_lock.release()
+
+ return gy_buffer
+
+ # read gp data
+ def read_gaze_data(self, timestamp):
+
+ if len(self.__gp_buffer):
+
+ self.read_lock.acquire()
+
+ # TODO : find closest timestamp data
+ gp_data = self.__gp_buffer[-1].copy()
+
+ self.read_lock.release()
+
+ return gp_data
+
+ else:
+
+ return {}
+
+ # read gp buffer
+ def read_gaze_buffer(self):
+
+ self.read_lock.acquire()
+
+ gp_buffer = self.__gp_buffer.copy()
+
+ self.read_lock.release()
+
+ return gp_buffer
+
+ # read pts data
+ def read_pts_data(self, timestamp):
+
+ if len(self.__pts_buffer):
+
+ self.read_lock.acquire()
+
+ # TODO : find closest timestamp data
+ pts_data = self.__pts_buffer[-1].copy()
+
+ self.read_lock.release()
+
+ return pts_data
+
+ else:
+
+ return {}
+
+ # read pts buffer
+ def read_pts_buffer(self):
+
+ self.read_lock.acquire()
+
+ pts_buffer = self.__pts_buffer.copy()
+
+ self.read_lock.release()
+
+ return pts_buffer
+
+ # thread stop
+ def stop(self):
+
+ self.stop_event.set()
+ threading.Thread.join(self)
diff --git a/src/argaze/TobiiGlassesPro2/TobiiVideo.py b/src/argaze/TobiiGlassesPro2/TobiiVideo.py
new file mode 100644
index 0000000..8777a02
--- /dev/null
+++ b/src/argaze/TobiiGlassesPro2/TobiiVideo.py
@@ -0,0 +1,95 @@
+import threading
+
+import av
+import numpy
+
+class TobiiVideoThread(threading.Thread):
+
+ # initialisation
+ def __init__(self, controller):
+
+ threading.Thread.__init__(self)
+ self.stop_event = threading.Event()
+ self.read_lock = threading.Lock()
+
+ self.controller = controller
+
+ self.fps = self.controller.get_video_freq()
+
+ self.read_lock.acquire()
+
+ self.__frame = numpy.zeros((1, 1, 3), numpy.uint8)
+ self.__width = 0
+ self.__height = 0
+ self.__pts_buffer = []
+
+ self.read_lock.release()
+
+ # destruction
+ def __del__(self):
+ pass
+
+ # thread start
+ def run(self):
+
+ # start Tobii glasses stream capture
+ self.__container = av.open(f'rtsp://{self.controller.get_address()}:8554/live/scene', options={'rtsp_transport': 'tcp'})
+ self.__stream = self.__container.streams.video[0]
+
+ for f in self.__container.decode(self.__stream):
+
+ if self.stop_event.isSet():
+ break
+
+ self.read_lock.acquire()
+
+ self.__frame = f.to_ndarray(format='bgr24')
+ self.__width = f.width
+ self.__height = f.height
+ self.__pts_buffer.append({'TIME':f.time, 'PTS': f.pts})
+
+ #print(f'Tobii Video Frame: pts = {f.pts}, time = {f.time}, format = {f.width}, {f.height}')
+
+ self.read_lock.release()
+
+ # read frame
+ def read(self) :
+
+ # if stopped, return blank frame
+ if self.stop_event.isSet():
+ return numpy.zeros((1, 1, 3), numpy.uint8)
+
+ # else
+ self.read_lock.acquire()
+
+ frame_copy = self.__frame.copy()
+ width_copy = self.__width
+ height_copy = self.__height
+
+ if len(self.__pts_buffer):
+ time_copy = self.__pts_buffer[-1]['TIME']
+ pts_copy = self.__pts_buffer[-1]['PTS']
+ else:
+ time_copy = -1
+ pts_copy = -1
+
+ self.read_lock.release()
+
+ return frame_copy, width_copy, height_copy, time_copy, pts_copy
+
+ # read pts buffer
+ def read_pts_buffer(self):
+
+ self.read_lock.acquire()
+
+ pts_buffer = self.__pts_buffer.copy()
+
+ self.read_lock.release()
+
+ return pts_buffer
+
+ # thread stop
+ def stop(self):
+
+ self.stop_event.set()
+ threading.Thread.join(self)
diff --git a/src/argaze/TobiiGlassesPro2/__init__.py b/src/argaze/TobiiGlassesPro2/__init__.py
new file mode 100644
index 0000000..3884106
--- /dev/null
+++ b/src/argaze/TobiiGlassesPro2/__init__.py
@@ -0,0 +1 @@
+__all__ = ['TobiiController', 'TobiiData', 'TobiiVideo'] \ No newline at end of file
diff --git a/src/argaze/TobiiGlassesPro2/utils/_calibration_target.afdesign b/src/argaze/TobiiGlassesPro2/utils/_calibration_target.afdesign
new file mode 100644
index 0000000..5578e0b
--- /dev/null
+++ b/src/argaze/TobiiGlassesPro2/utils/_calibration_target.afdesign
Binary files differ
diff --git a/src/argaze/TobiiGlassesPro2/utils/calibration_target_A4.pdf b/src/argaze/TobiiGlassesPro2/utils/calibration_target_A4.pdf
new file mode 100644
index 0000000..dfdbe0a
--- /dev/null
+++ b/src/argaze/TobiiGlassesPro2/utils/calibration_target_A4.pdf
Binary files differ
diff --git a/src/argaze/__init__.py b/src/argaze/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/argaze/__init__.py
diff --git a/src/examples/README.md b/src/examples/README.md
new file mode 100644
index 0000000..ac24dc1
--- /dev/null
+++ b/src/examples/README.md
@@ -0,0 +1,35 @@
+# ArGaze examples
+
+_All examples requires argaze package._
+
+## export_aruco_markers.py
+
+Generates ArUco markers to place into a scene.
+
+## export_calibration_board.py
+
+Generates ArUco board to calibrate a camera.
+
+## tobii_camera_calibration.py
+
+Captures board pictures and finally outputs camera calibration data into an \_export/tobii_camera.json file.
+
+* Print the **A3_board_35cmx25cm_markers_4X4_3cm.pdf** file from ArUcoMarkers/utils folder on a A3 sheet to get the correct square and markers length (3 cm).
+* Launch the script.
+* Place the board in order to view it entirely on screen and move the camera in many configurations (orientation and distance) : the script will automatically take pictures. Do this step with a good lighting and a clear background.
+* Once enough pictures have been captured (~20), press Esc key then, wait for the camera calibration processing.
+* Finally, open \_export/tobii_camera.json file to see the calibration data : the rms parameter should be between 0. and 1. if the calibration suceeded (lower is better).
+
+## tobii_argaze
+
+Coordinates several tasks to :
+
+* Receive gaze data from Tobii glasses Pro 2,
+* Detect ArUco markers inside video stream from Tobii glasses Pro 2,
+* Synchronise gaze data and ArUcos detection using timestamp,
+* Build 3D AR ROIs from **roi3D_scene.obj** file through ArUcos markers pose estimation and project them onto video frame coordinate system,
+* Check if gaze is inside an ROI,
+* Store gathered data into unified csv file for further analysis.
+
+The **scene.blend** file is a Blender project to build and export roi3D_scene.obj file.
+
diff --git a/src/examples/export_aruco_markers.py b/src/examples/export_aruco_markers.py
new file mode 100644
index 0000000..2d55931
--- /dev/null
+++ b/src/examples/export_aruco_markers.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+"""
+export_markers.py
+
+Author:
+ - Théo de la Hogue, theo.de-la-hogue@enac.fr
+
+"""
+import os
+from argaze.ArUcoMarkers import ArUcoMarkers
+
+# manage export folder
+current_folder = os.path.dirname(__file__)
+export_folder = os.path.join(current_folder, '_export/markers')
+if not os.path.exists(export_folder):
+ os.makedirs(export_folder)
+ print(f'\'_export/markers\' folder created')
+
+# create aruco markers
+aruco_markers = ArUcoMarkers.ArUcoMarkers('DICT_4X4_50')
+
+# export markers
+aruco_markers.export_all(export_folder, 300) # destination folder, dpi
diff --git a/src/examples/export_calibration_board.py b/src/examples/export_calibration_board.py
new file mode 100644
index 0000000..24d7461
--- /dev/null
+++ b/src/examples/export_calibration_board.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+"""
+export_board.py
+
+Author:
+ - Théo de la Hogue, theo.de-la-hogue@enac.fr
+
+"""
+import os
+from argaze.ArUcoMarkers import ArUcoBoard
+
+# manage export folder
+current_folder = os.path.dirname(__file__)
+export_folder = os.path.join(current_folder, '_export')
+if not os.path.exists(export_folder):
+ os.makedirs(export_folder)
+ print(f'\'_export\' folder created')
+
+# create aruco board
+aruco_board = ArUcoBoard.ArUcoBoard('DICT_4X4_50', 7, 5, 5, 3) # 7 columns, 5 rows, square size (cm), marker size (cm)
+
+# export aruco board
+aruco_board.export(export_folder, 50) # destination folder, dpi
diff --git a/src/examples/tobii_argaze/roi3D_scene.obj b/src/examples/tobii_argaze/roi3D_scene.obj
new file mode 100644
index 0000000..d0b7c51
--- /dev/null
+++ b/src/examples/tobii_argaze/roi3D_scene.obj
@@ -0,0 +1,67 @@
+# Blender v3.0.1 OBJ File: 'scene.blend'
+# www.blender.org
+o Marker_Plan
+v -3.000000 -3.000000 0.000000
+v 3.000000 -3.000000 0.000000
+v -3.000000 3.000000 0.000000
+v 3.000000 3.000000 0.000000
+s off
+f 1 2 4 3
+o Air_Speed_Plan.001
+v -41.971680 -4.745928 -2.684396
+v -39.497086 -4.745928 -2.684396
+v -41.971680 7.846082 -2.684396
+v -39.497086 7.846082 -2.684396
+s off
+f 5 6 8 7
+o Attitude_Plan.005
+v -38.940212 -3.709124 -2.684396
+v -30.117123 -3.709124 -2.684396
+v -38.940212 6.711202 -2.684396
+v -30.117123 6.711202 -2.684396
+s off
+f 9 10 12 11
+o Localiser_Plan.003
+v -38.940212 -7.889488 -2.684396
+v -30.117125 -7.889488 -2.684396
+v -38.940212 -4.223971 -2.684396
+v -30.117125 -4.223971 -2.684396
+s off
+f 13 14 16 15
+o Vertical_Speed_Plan.002
+v -29.570124 -4.718364 -2.684396
+v -26.876801 -4.713788 -2.684396
+v -29.528456 7.846082 -2.684396
+v -26.835133 7.850657 -2.684396
+s off
+f 17 18 20 19
+o PFD_Plan.004
+v -42.908882 -9.217942 -2.684396
+v -26.146378 -9.217942 -2.684396
+v -42.908882 14.918060 -2.684396
+v -26.146378 14.918060 -2.684396
+s off
+f 21 22 24 23
+o ND_Plan.107
+v -22.813946 -9.217942 -2.684396
+v -6.051440 -9.217942 -2.684396
+v -22.813946 14.918060 -2.684396
+v -6.051440 14.918060 -2.684396
+s off
+f 25 26 28 27
+o FCU_Plan.108
+v -6.507059 16.577757 26.295910
+v 50.183128 16.577757 26.295910
+v -6.507059 23.751425 26.295910
+v 50.183128 23.751425 26.295910
+s off
+f 29 30 32 31
+o Exterior_Plan.006
+v -46.568127 34.893536 7.561725
+v 12.047465 39.802032 9.644265
+v -46.951084 38.173790 10.614324
+v 11.661365 43.150181 12.620070
+v 13.887004 62.445206 40.607811
+v -35.566383 52.329830 33.684719
+s off
+f 33 34 36 37 38 35
diff --git a/src/examples/tobii_argaze/scene.blend b/src/examples/tobii_argaze/scene.blend
new file mode 100644
index 0000000..e7e5dda
--- /dev/null
+++ b/src/examples/tobii_argaze/scene.blend
Binary files differ
diff --git a/src/examples/tobii_argaze/tobii_argaze.py b/src/examples/tobii_argaze/tobii_argaze.py
new file mode 100644
index 0000000..8193a03
--- /dev/null
+++ b/src/examples/tobii_argaze/tobii_argaze.py
@@ -0,0 +1,180 @@
+#!/usr/bin/env python
+
+"""
+tobii_argaze.py
+
+Author:
+ - Théo de la Hogue, theo.de-la-hogue@enac.fr
+
+"""
+import os
+
+from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera
+from argaze.RegionOfInterest import *
+from argaze.TobiiGlassesPro2 import *
+
+import cv2 as cv
+import pandas
+import matplotlib.pyplot as mpyplot
+import matplotlib.patches as mpatches
+
+# tobii glasses ip address
+ip_address = '192.168.1.10'
+
+# manage export folder
+current_folder = os.path.dirname(__file__)
+export_folder = os.path.join(current_folder, '_export')
+if not os.path.exists(export_folder):
+ os.makedirs(export_folder)
+ print(f'\'_export\' folder created')
+
+# create tobii controller
+tobii_controller = TobiiController.TobiiController(ip_address, 'ArGaze', 1)
+
+# create tobii data thread
+tobii_data_thread = TobiiData.TobiiDataThread(tobii_controller)
+tobii_data_thread.start()
+
+# create tobii video thread
+tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
+tobii_video_thread.start()
+
+# create aruco camera
+aruco_camera = ArUcoCamera.ArUcoCamera()
+aruco_camera.load_calibration_file('tobii_camera.json')
+
+# create aruco tracker
+aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_4X4_50', 6, aruco_camera) # aruco dictionaries, marker length (cm), camera
+
+# create ROIs 3D scene
+roi3D_scene = ROI3DScene.ROI3DScene()
+roi3D_scene.load('roi3D_scene.obj')
+
+# start tobii glasses streaming
+tobii_controller.start_streaming()
+
+# process video frames
+last_frame_time = 0
+roi2D_buffer = []
+marker_buffer = []
+
+while True:
+
+ frame, frame_width, frame_height, frame_time, pts = tobii_video_thread.read()
+
+ # draw tobii gaze
+ # TODO : sync gaze data according frame pts
+ gp_data = tobii_data_thread.read_gaze_data(pts)
+ if 'TIMESTAMP' in gp_data:
+ pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height))
+ cv.circle(frame, pointer, 4, (0, 255, 255), -1)
+ else:
+ pointer = (0, 0)
+
+ # track markers with pose estimation and draw them
+ aruco_tracker.track(frame)
+ aruco_tracker.draw(frame)
+
+ # project 3D scenes related to each aruco markers
+ if aruco_tracker.get_markers_number():
+
+ for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
+
+ # TODO : select different 3D scenes depending on aruco id
+
+ marker_rotation = aruco_tracker.get_marker_rotation(i)
+ marker_translation = aruco_tracker.get_marker_translation(i)
+
+ roi3D_scene.set_rotation(marker_rotation)
+ roi3D_scene.set_translation(marker_translation)
+
+ # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
+ # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable.
+ roi2D_scene = roi3D_scene.project(frame, aruco_camera, False)
+
+ # check if gaze is inside 2D rois
+ roi2D_scene.inside(pointer)
+
+ # draw 2D rois
+ roi2D_scene.draw(frame)
+
+ # store roi2D into buffer
+ for roi2D in roi2D_scene:
+ roi2D['TIME'] = frame_time
+ del roi2D['VERTICES']
+ roi2D_buffer.append(roi2D)
+
+ # store marker into buffer
+ marker = {
+ 'TIME': frame_time,
+ 'ID': i,
+ 'X': marker_translation[0][0],
+ 'Y': marker_translation[0][1],
+ 'Z': marker_translation[0][2]
+ }
+ marker_buffer.append(marker)
+
+ cv.imshow(f'Live Scene', frame)
+
+ # quit on 'Esc' command
+ key = cv.waitKey(1)
+ if key == 27:
+ cv.destroyAllWindows()
+ last_frame_time = frame_time
+ break
+
+# stop tobii objects
+tobii_video_thread.stop()
+tobii_data_thread.stop()
+
+tobii_controller.stop_streaming()
+tobii_controller.close()
+
+# create a pandas DataFrame for each buffer
+ac_dataframe = pandas.DataFrame(tobii_data_thread.read_accelerometer_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
+gy_dataframe = pandas.DataFrame(tobii_data_thread.read_gyroscope_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
+gp_dataframe = pandas.DataFrame(tobii_data_thread.read_gaze_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y'])
+data_pts_dataframe = pandas.DataFrame(tobii_data_thread.read_pts_buffer(), columns=['TIMESTAMP', 'TIME', 'PTS'])
+video_pts_dataframe = pandas.DataFrame(tobii_video_thread.read_pts_buffer(), columns=['TIME', 'PTS'])
+roi2D_dataframe = pandas.DataFrame(roi2D_buffer, columns=['TIME', 'NAME', 'POINTER_INSIDE'])
+marker_dataframe = pandas.DataFrame(marker_buffer, columns=['TIME', 'ID', 'X', 'Y', 'Z'])
+
+# export all data frames
+ac_dataframe.to_csv(f'{export_folder}/accelerometer.csv', index=False)
+gy_dataframe.to_csv(f'{export_folder}/gyroscope.csv', index=False)
+gp_dataframe.to_csv(f'{export_folder}/gaze.csv', index=False)
+data_pts_dataframe.to_csv(f'{export_folder}/data_pts.csv', index=False)
+video_pts_dataframe.to_csv(f'{export_folder}/video_pts.csv', index=False)
+roi2D_dataframe.to_csv(f'{export_folder}/rois.csv', index=False)
+marker_dataframe.to_csv(f'{export_folder}/markers.csv', index=False)
+
+# edit figure
+figure = mpyplot.figure(figsize=(int(last_frame_time), 5))
+
+# plot gaze data
+subplot = figure.add_subplot(211)
+subplot.set_title('Gaze')
+
+subplot = gp_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (normalized)', legend=False)
+subplot = gp_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (normalized)', legend=False)
+
+x_patch = mpatches.Patch(color='#276FB6', label='X')
+y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
+subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
+
+
+# plot maker position data
+subplot = figure.add_subplot(212)
+subplot.set_title('Marker')
+
+subplot = marker_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (cm)', legend=False)
+subplot = marker_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (cm)', legend=False)
+
+x_patch = mpatches.Patch(color='#276FB6', label='X')
+y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
+subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
+
+# export figure
+mpyplot.tight_layout()
+mpyplot.savefig(f'{export_folder}/visualisation.svg')
+mpyplot.close('all') \ No newline at end of file
diff --git a/src/examples/tobii_argaze/tobii_camera.json b/src/examples/tobii_argaze/tobii_camera.json
new file mode 100644
index 0000000..b7b5108
--- /dev/null
+++ b/src/examples/tobii_argaze/tobii_camera.json
@@ -0,0 +1,29 @@
+{
+ "rms": 0.2778430441943373,
+ "camera matrix": [
+ [
+ 567.7948916261545,
+ 0.0,
+ 477.23038710185534
+ ],
+ [
+ 0.0,
+ 566.2897424860757,
+ 288.75352250724296
+ ],
+ [
+ 0.0,
+ 0.0,
+ 1.0
+ ]
+ ],
+ "distortion coefficients": [
+ [
+ 0.07351688052834335,
+ -0.18678684802766135,
+ 0.001473915039947321,
+ 0.0008389464646594935,
+ 0.13193649892597786
+ ]
+ ]
+} \ No newline at end of file
diff --git a/src/examples/tobii_camera_calibration.py b/src/examples/tobii_camera_calibration.py
new file mode 100644
index 0000000..0e621b5
--- /dev/null
+++ b/src/examples/tobii_camera_calibration.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+
+"""
+tobii_camera_calibration.py
+
+Author:
+ - Théo de la Hogue, theo.de-la-hogue@enac.fr
+
+This program:
+ - Captures board pictures with a full displayed board inside
+ - Outputs camera calibration data into a camera.json file
+
+Reference:
+ - https://automaticaddison.com/how-to-perform-pose-estimation-using-an-aruco-marker/
+"""
+import os
+import time
+
+from TobiiGlassesPro2 import TobiiController, TobiiVideo
+from ArUcoMarkers import ArUcoBoard, ArUcoTracker, ArUcoCamera
+
+import cv2 as cv
+
+# tobii glasses ip address
+ip_address = '192.168.1.10'
+
+# manage export folder
+current_folder = os.path.dirname(__file__)
+export_folder = os.path.join(current_folder, '_export')
+if not os.path.exists(export_folder):
+ os.makedirs(export_folder)
+ print(f'\'_export\' folder created')
+
+# create tobii controller
+tobii_controller = TobiiController.TobiiController(ip_address, 'ArGaze', 1)
+
+# create tobii video thread
+tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
+tobii_video_thread.start()
+
+# create aruco camera
+aruco_camera = ArUcoCamera.ArUcoCamera()
+
+# create aruco board
+aruco_board = ArUcoBoard.ArUcoBoard('DICT_4X4_50', 7, 5, 5, 3) # 7 columns, 5 rows, square size (cm), marker size (cm)
+
+# create aruco tracker
+aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_4X4_50', 6, aruco_camera) # aruco dictionaries, marker length (cm), camera
+
+# start tobii glasses streaming
+tobii_controller.start_streaming()
+
+print("Camera calibration starts")
+print("Waiting for calibration board...")
+
+frame_width = 0
+frame_height = 0
+
+expected_markers_number = len(aruco_board.get_ids())
+expected_corners_number = (aruco_board.get_size()[0] - 1 ) * (aruco_board.get_size()[1] - 1)
+
+# capture frame with a full displayed board
+while True:
+
+ frame, frame_width, frame_height, frame_time, frame_pts = tobii_video_thread.read()
+
+ # track all markers in the board
+ aruco_tracker.track_board(frame, aruco_board, expected_markers_number)
+
+ # draw only markers
+ aruco_tracker.draw(frame)
+
+ # draw current calibration data count
+ cv.putText(frame, f'Capture: {aruco_camera.get_calibration_data_count()}', (50, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
+ cv.imshow('Tobii Camera Calibration', frame)
+
+ # if all board corners are detected
+ if aruco_tracker.get_board_corners_number() == expected_corners_number:
+
+ # draw board corners to notify a capture is done
+ aruco_tracker.draw_board(frame)
+
+ # append data
+ aruco_camera.store_calibration_data(aruco_tracker.get_board_corners(), aruco_tracker.get_board_corners_ids())
+
+ cv.imshow(f'Tobii Camera Calibration', frame)
+
+ time.sleep(2)
+
+ # quit on 'Esc' command
+ key = cv.waitKey(1)
+ if key == 27:
+ cv.destroyAllWindows()
+ break
+
+# stop tobii objects
+tobii_video_thread.stop()
+
+tobii_controller.stop_streaming()
+tobii_controller.close()
+
+print('\nCalibrating camera...')
+aruco_camera.calibrate(aruco_board, frame_width, frame_height)
+
+print('\nCalibration succeeded!')
+print(f'\nRMS:\n{aruco_camera.get_rms()}')
+print(f'\nCamera matrix:\n{aruco_camera.get_K()}')
+print(f'\nDistortion coefficients:\n{aruco_camera.get_D()}')
+
+aruco_camera.save_calibration_file(os.join(export_folder,'tobii_camera.json'))
+
+print(f'\nCalibration data exported into tobii_camera.json file')
+