aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--README.md50
-rw-r--r--setup.py2
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoBoard.py20
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py20
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoMarkers.py28
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoTracker.py43
-rw-r--r--src/argaze/ArUcoMarkers/README.md14
-rw-r--r--src/argaze/ArUcoMarkers/__init__.py4
-rw-r--r--src/argaze/RegionOfInterest/README.md9
-rw-r--r--src/argaze/RegionOfInterest/ROI2DScene.py18
-rw-r--r--src/argaze/RegionOfInterest/ROI3DScene.py43
-rw-r--r--src/argaze/RegionOfInterest/__init__.py4
-rw-r--r--src/argaze/TobiiGlassesPro2/README.md161
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiController.py35
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiData.py96
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiVideo.py18
-rw-r--r--src/argaze/TobiiGlassesPro2/__init__.py4
-rw-r--r--src/argaze/TobiiGlassesPro2/utils/A4_calibration_target.pdf (renamed from src/argaze/TobiiGlassesPro2/utils/calibration_target_A4.pdf)bin1965 -> 1965 bytes
-rw-r--r--src/argaze/__init__.py4
-rw-r--r--src/argaze/utils/README.md31
-rw-r--r--src/argaze/utils/__init__.py4
-rw-r--r--src/argaze/utils/calibrate_tobii_camera.py118
-rw-r--r--src/argaze/utils/data/roi3D_scene.obj (renamed from src/examples/tobii_argaze/roi3D_scene.obj)0
-rw-r--r--src/argaze/utils/data/tobii_camera.json (renamed from src/examples/tobii_argaze/tobii_camera.json)0
-rw-r--r--src/argaze/utils/export_aruco_markers.py31
-rw-r--r--src/argaze/utils/export_calibration_board.py35
-rw-r--r--src/argaze/utils/track_aruco_rois_with_tobii_glasses.py194
-rw-r--r--src/examples/README.md35
-rw-r--r--src/examples/export_aruco_markers.py24
-rw-r--r--src/examples/export_calibration_board.py24
-rw-r--r--src/examples/tobii_argaze/scene.blendbin1929540 -> 0 bytes
-rw-r--r--src/examples/tobii_argaze/tobii_argaze.py180
-rw-r--r--src/examples/tobii_camera_calibration.py113
34 files changed, 757 insertions, 606 deletions
diff --git a/.gitignore b/.gitignore
index ce6fad6..70115cb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,5 +2,6 @@
__pycache__
_export
dist
+doc
*.egg-info
*.blend1 \ No newline at end of file
diff --git a/README.md b/README.md
index eee7721..902f796 100644
--- a/README.md
+++ b/README.md
@@ -1,32 +1,54 @@
-# ArGaze library
+An open-source python toolkit to deal with gaze tracking and analysis in Augmented Reality (AR) environnement.
-_An open-source python toolkit to deal with gaze tracking and analysis in Augmented Reality (AR) environnement._
+## Architecture
+
+The ArGaze toolkit is divided in submodules dedicated to various purposes:
+
+* ArUcoMarkers: ArUco markers generator, traking, camera calibration, ...
+* RegionOfInterest: Region Of Interest (ROI) scene management for 2D and 3D environment.
+* TobiiGlassesPro2: A gaze tracking device interface.
+* utils: Collection of ready-to-use high level feature scripts based on ArGaze toolkit.
## Installation
-In the ArGaze root folder:
+Consider that all inline commands below needs to be executed into ArGaze root folder.
+
+- Build package:
```
python -m build
```
-Then,
+- Then, install package (replace VERSION by what has been built into dist folder):
```
-pip install dist/argaze-VERSION.whl
+pip install ./dist/argaze-VERSION.whl
```
-(Replace VERSION by what will be built into dist folder)
+As Argaze library developper, you should prefer to install the package in developer mode to test live code changes:
-## Architecture
+```
+pip install -e .
+```
-The ArGaze toolkit is divided in submodules dedicated to various purposes:
+## Documentation
-* ArUcoMarkers: ArUco markers generator, traking, camera calibration, ...
-* RegionOfInterest: Region Of Interest (ROI) scene management for 2D and 3D environment.
-* TobiiGlassesPro2: a gaze tracking device interface.
-* _..._
+To generate html documentation using [pdoc](https://pdoc.dev/),
+
+- Install 'pdoc' package:
+
+```
+pip install pdoc
+```
-## Examples
+- Then, build documentation into doc folder:
-The ArGaze toolkit provides a set of python scripts to undertand how the submodules can be used together.
+```
+pdoc -o ./doc ./src/argaze/
+```
+
+As Argaze library developper, you should prefer to create a local html server to watch live documentation changes:
+
+```
+pdoc ./src/argaze/
+``` \ No newline at end of file
diff --git a/setup.py b/setup.py
index 6b32df2..efaa0d3 100644
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@ setup(
packages=find_packages(where='src'),
python_requires='>=3.6, <4',
- install_requires=['opencv-python', 'opencv-contrib-python', 'av', 'rtsp', 'tobiiglassesctrl'],
+ install_requires=['opencv-python', 'opencv-contrib-python', 'numpy', 'av', 'rtsp', 'tobiiglassesctrl'],
project_urls={
'Bug Reports': 'https://git.recherche.enac.fr/projects/argaze/issues',
diff --git a/src/argaze/ArUcoMarkers/ArUcoBoard.py b/src/argaze/ArUcoMarkers/ArUcoBoard.py
index fa76303..ce4abd8 100644
--- a/src/argaze/ArUcoMarkers/ArUcoBoard.py
+++ b/src/argaze/ArUcoMarkers/ArUcoBoard.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
import numpy
import cv2 as cv
import cv2.aruco as aruco
@@ -24,9 +26,10 @@ ARUCO_DICT = {
}
class ArUcoBoard():
+ """Calibration chess board with ArUco markers inside."""
- # initialisation
- def __init__(self, aruco_dictionary_name, columns, rows, square_size, marker_size):
+ def __init__(self, aruco_dictionary_name: str, columns: int, rows: int, square_size: float, marker_size: float):
+ """Create columnsxrows chess board with ArUco markers type at given sizes in centimeters."""
# check aruco dictionary name
if ARUCO_DICT.get(aruco_dictionary_name, None) is None:
@@ -49,27 +52,24 @@ class ArUcoBoard():
# create board model
self.__board = aruco.CharucoBoard_create(self.__columns, self.__rows, self.__square_size/100., self.__marker_size/100., self.__aruco_dict)
- # destruction
def __del__(self):
pass
- # access to the board model
def get_model(self):
-
+ """Get the board model."""
return self.__board
- # access to the board markers ids
def get_ids(self):
-
+ """Get board markers ids."""
return self.__board.ids
- # access to the number of columns and rows
def get_size(self):
+ """Get numbers of columns and rows."""
return self.__board.getChessboardSize()
- # save a picture of the calibration board
- def export(self, destination_folder, dpi):
+ def export(self, destination_folder: str, dpi: int):
+ """Save a picture of the calibration board."""
output_filename = f'board_{self.__columns*self.__square_size}cmx{self.__rows*self.__square_size}cm_markers_{self.__aruco_dict_format}_{self.__marker_size}cm.png'
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index 163391f..cc9632b 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -1,11 +1,16 @@
+#!/usr/bin/env python
+
import json
import numpy
import cv2.aruco as aruco
class ArUcoCamera():
+ """Camera with optical parameters."""
- # initialisation
def __init__(self):
+ """Define optical parameters with:
+ - camera matrix as K,
+ - camera distorsion coefficients as D."""
self.__rms = 0 # root mean square error
self.__K = [] # camera matrix (focal lengths and principal point)
@@ -16,12 +21,11 @@ class ArUcoCamera():
self.__corners_set = []
self.__corners_set_ids = []
- # destruction
def __del__(self):
pass
- # load camera calibration data
def load_calibration_file(self, camera_calibration_filepath):
+ """Load optical parameters from .json file."""
with open(camera_calibration_filepath) as calibration_file:
@@ -32,6 +36,7 @@ class ArUcoCamera():
self.__D = numpy.asarray(calibration_data['distortion coefficients'])
def save_calibration_file(self, camera_calibration_filepath):
+ """Save optical parameters into .json file."""
calibration_data = {'rms': self.__rms, 'camera matrix': self.__K.tolist(), 'distortion coefficients': self.__D.tolist()}
@@ -40,33 +45,40 @@ class ArUcoCamera():
json.dump(calibration_data, calibration_file, ensure_ascii=False, indent=4)
def get_rms(self):
+ """Get Root Mean Square (rms) error."""
return self.__rms
def get_K(self):
+ """Get camera matrix."""
return self.__K
def get_D(self):
+ """Get camera distorsion coefficients."""
return self.__D
def calibrate(self, board, frame_width, frame_height):
+ """Retrieve camera optical parameters from stored calibration data."""
if self.__corners_set_number > 0:
self.__rms, self.__K, self.__D, r, t = aruco.calibrateCameraCharuco(self.__corners_set, self.__corners_set_ids, board.get_model(), [frame_width, frame_height], None, None)
- def reset_calibration_data(self, corners, corners_ids):
+ def reset_calibration_data(self):
+ """Clear all calibration data."""
self.__corners_set_number = 0
self.__corners_set = []
self.__corners_set_ids = []
def store_calibration_data(self, corners, corners_ids):
+ """Store calibration data."""
self.__corners_set_number += 1
self.__corners_set.append(corners)
self.__corners_set_ids.append(corners_ids)
def get_calibration_data_count(self):
+ """Get how much calibration data are stored."""
return self.__corners_set_number
diff --git a/src/argaze/ArUcoMarkers/ArUcoMarkers.py b/src/argaze/ArUcoMarkers/ArUcoMarkers.py
index 1499218..9a5c519 100644
--- a/src/argaze/ArUcoMarkers/ArUcoMarkers.py
+++ b/src/argaze/ArUcoMarkers/ArUcoMarkers.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
import numpy
import cv2 as cv
import cv2.aruco as aruco
@@ -24,9 +26,28 @@ ARUCO_DICT = {
}
class ArUcoMarkers():
+ """Markers factory."""
- # initialisation
def __init__(self, aruco_dictionary_name):
+ """Define which markers library to edit:
+ - DICT_4X4_50
+ - DICT_4X4_100
+ - DICT_4X4_250
+ - DICT_4X4_1000
+ - DICT_5X5_50
+ - DICT_5X5_100
+ - DICT_5X5_250
+ - DICT_5X5_1000
+ - DICT_6X6_50
+ - DICT_6X6_100
+ - DICT_6X6_250
+ - DICT_6X6_1000
+ - DICT_7X7_50
+ - DICT_7X7_100
+ - DICT_7X7_250
+ - DICT_7X7_1000
+ - DICT_ARUCO_ORIGINAL
+ """
# check aruco dictionary name
if ARUCO_DICT.get(aruco_dictionary_name, None) is None:
@@ -40,12 +61,11 @@ class ArUcoMarkers():
# load ArUco dictionary
self.__aruco_dict = aruco.Dictionary_get(ARUCO_DICT[aruco_dictionary_name])
- # destruction
def __del__(self):
pass
- # save one marker
def export(self, destination_folder, dpi, i):
+ """Save one marker into a .png file."""
if i >= 0 and i < self.__aruco_dict_number:
@@ -61,8 +81,8 @@ class ArUcoMarkers():
else:
raise ValueError(f'Bad ArUco index: {i}')
- # save all markers
def export_all(self, destination_folder, dpi):
+ """Save all library markers into separated .png files."""
for i in range(self.__aruco_dict_number):
diff --git a/src/argaze/ArUcoMarkers/ArUcoTracker.py b/src/argaze/ArUcoMarkers/ArUcoTracker.py
index 1b05e4a..f5adb4c 100644
--- a/src/argaze/ArUcoMarkers/ArUcoTracker.py
+++ b/src/argaze/ArUcoMarkers/ArUcoTracker.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
import numpy
import cv2 as cv
import cv2.aruco as aruco
@@ -24,9 +26,28 @@ ARUCO_DICT = {
}
class ArUcoTracker():
+ """Track ArUco markers into a frame."""
- # initialisation
def __init__(self, aruco_dictionary_name, marker_length, camera):
+ """Define which markers library to track and their size:
+ - DICT_4X4_50
+ - DICT_4X4_100
+ - DICT_4X4_250
+ - DICT_4X4_1000
+ - DICT_5X5_50
+ - DICT_5X5_100
+ - DICT_5X5_250
+ - DICT_5X5_1000
+ - DICT_6X6_50
+ - DICT_6X6_100
+ - DICT_6X6_250
+ - DICT_6X6_1000
+ - DICT_7X7_50
+ - DICT_7X7_100
+ - DICT_7X7_250
+ - DICT_7X7_1000
+ - DICT_ARUCO_ORIGINAL
+ """
# check aruco dictionary name
if ARUCO_DICT.get(aruco_dictionary_name, None) is None:
@@ -58,12 +79,11 @@ class ArUcoTracker():
self.__board_corners = []
self.__board_corners_ids = []
- # destruction
def __del__(self):
pass
- # track ArUco markers in frame
def track(self, frame, estimate_pose = True):
+ """Track ArUco markers in frame."""
# DON'T MIRROR FRAME : it makes the markers detection to fail
@@ -82,8 +102,8 @@ class ArUcoTracker():
self.__tvecs = []
self.__points = []
- # track ArUco markers board in frame setting up the number of detected markers needed to agree detection
def track_board(self, frame, board, expected_markers_number):
+ """Track ArUco markers board in frame setting up the number of detected markers needed to agree detection."""
# DON'T MIRROR FRAME : it makes the markers detection to fail
@@ -104,8 +124,8 @@ class ArUcoTracker():
self.__board_corners = []
self.__board_corners_ids = []
- # draw tracked markers in frame
def draw(self, frame):
+ """Draw tracked markers in frame."""
# draw detected markers square
if len(self.__markers_corners) > 0:
@@ -119,39 +139,46 @@ class ArUcoTracker():
aruco.drawAxis(frame, self.__camera.get_K(), self.__camera.get_D(), self.__rvecs[i], self.__tvecs[i], self.__marker_length)
- # draw tracked board corners in frame
def draw_board(self, frame):
+ """Draw tracked board corners in frame."""
if self.__board != None:
cv.drawChessboardCorners(frame, ((self.__board.get_size()[0] - 1 ), (self.__board.get_size()[1] - 1)), self.__board_corners, True)
- # access to tracked markers data
def get_markers_number(self):
+ """Get tracked markers number."""
return len(self.__markers_corners)
def get_markers_ids(self):
+ """Get tracked markers identifers."""
return self.__markers_ids
def get_marker_corners(self, i):
+ """Get marker i corners."""
return self.__markers_corners[i]
def get_marker_rotation(self, i):
+ """Get marker i rotation vector."""
return self.__rvecs[i]
def get_marker_translation(self, i):
+ """Get marker i translation vector."""
return self.__tvecs[i]
def get_marker_points(self, i):
+ """Get marker i points."""
return self.__points[i]
- # access to tracked board data
def get_board_corners_number(self):
+ """Get tracked board corners number."""
return self.__board_corners_number
def get_board_corners_ids(self):
+ """Get tracked board corners identifiers."""
return self.__board_corners_ids
def get_board_corners(self):
+ """Get tracked board corners."""
return self.__board_corners
diff --git a/src/argaze/ArUcoMarkers/README.md b/src/argaze/ArUcoMarkers/README.md
index 205ec91..f882cce 100644
--- a/src/argaze/ArUcoMarkers/README.md
+++ b/src/argaze/ArUcoMarkers/README.md
@@ -1,13 +1,7 @@
-# ArUcoMarkers
+Class interface to work with [OpenCV ArUco markers](https://docs.opencv.org/4.x/d5/dae/tutorial_aruco_detection.html).
-_Class interface to work with [OpenCV ArUco markers](https://docs.opencv.org/4.x/d5/dae/tutorial_aruco_detection.html)._
+## Utils
+Print **A3_board_35cmx25cm_markers_4X4_3cm.pdf** onto A3 paper sheet to get board at expected dimensions.
-## ArUcoBoard.py
-
-## ArUcoCamera.py
-
-## ArUcoMarkers.py
-
-## ArUcoTracker.py
-
+Print **A4_markers_4x4_3cm.pdf** onto A4 paper sheet to get markers at expected dimensions. \ No newline at end of file
diff --git a/src/argaze/ArUcoMarkers/__init__.py b/src/argaze/ArUcoMarkers/__init__.py
index 188e407..1d214be 100644
--- a/src/argaze/ArUcoMarkers/__init__.py
+++ b/src/argaze/ArUcoMarkers/__init__.py
@@ -1 +1,5 @@
+"""
+.. include:: README.md
+"""
+__docformat__ = "restructuredtext"
__all__ = ['ArUcoBoard', 'ArUcoCamera', 'ArUcoMarkers', 'ArUcoTracker'] \ No newline at end of file
diff --git a/src/argaze/RegionOfInterest/README.md b/src/argaze/RegionOfInterest/README.md
index 6cff14e..7c22479 100644
--- a/src/argaze/RegionOfInterest/README.md
+++ b/src/argaze/RegionOfInterest/README.md
@@ -1,11 +1,4 @@
-# RegionOfInterest
-
-_Class interface to manage [ROIs](https://en.wikipedia.org/wiki/Region_of_interest)._
-
-
-## ROI2DScene.py
-
-## ROI3DScene.py
+Class interface to manage [ROIs](https://en.wikipedia.org/wiki/Region_of_interest).
diff --git a/src/argaze/RegionOfInterest/ROI2DScene.py b/src/argaze/RegionOfInterest/ROI2DScene.py
index 6e0faf4..d025cb2 100644
--- a/src/argaze/RegionOfInterest/ROI2DScene.py
+++ b/src/argaze/RegionOfInterest/ROI2DScene.py
@@ -1,29 +1,37 @@
+#!/usr/bin/env python
+
import cv2 as cv
import matplotlib.path as mpath
class ROI2DScene(list):
+ """List of ROI3D dictionary.
+ ```
+ {
+ 'NAME': str,
+ 'VERTICES': array of (x, y) tuples,
+ 'POINTER_INSIDE': bool
+ }
+ ```
+ """
- # subclass list
def __new__(cls):
return super(ROI2DScene, cls).__new__(cls)
- # initialisation
def __init__(self):
pass
- # destruction
def __del__(self):
pass
- # check if a (x, y) pointer is inside rois
def inside(self, pointer):
+ """Check if a (x, y) pointer is inside ROIs."""
for roi in self:
roi['POINTER_INSIDE'] = mpath.Path(roi['VERTICES']).contains_points([pointer])[0]
- # draw projected polygons
def draw(self, frame):
+ """Draw ROI polygons on frame."""
for roi in self:
diff --git a/src/argaze/RegionOfInterest/ROI3DScene.py b/src/argaze/RegionOfInterest/ROI3DScene.py
index d292d2a..cf55e63 100644
--- a/src/argaze/RegionOfInterest/ROI3DScene.py
+++ b/src/argaze/RegionOfInterest/ROI3DScene.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
import math
import re
@@ -8,27 +10,29 @@ import cv2 as cv
import matplotlib.path as mpath
class ROI3DScene(list):
+ """List of ROI3D dictionary.
+ ```
+ {
+ 'NAME': str,
+ 'VERTICES': array of (x, y, z) tuples
+ }
+ ```
+ """
- # subclass list
def __new__(cls):
return super(ROI3DScene, cls).__new__(cls)
- # initialisation
def __init__(self):
# define rotation and translation matrix
self.__rotation = [0, 0, 0]
self.__translation = [0, 0, 0]
- # define a zero distorsion matrix
- self.__D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
-
- # destruction
def __del__(self):
pass
- # load scen from .obj file
- def load(self, obj_filepath):
+ def load(self, obj_filepath: str):
+ """Load ROI3D scene from .obj file."""
# regex rules for .obj file parsing
OBJ_RX_DICT = {
@@ -101,24 +105,20 @@ class ROI3DScene(list):
roi3D['VERTICES'] = [ vertices[i-1] for i in roi3D['FACE'] ]
roi3D.pop('FACE', None)
- # print scene
- for roi3D in self:
- name = roi3D['NAME']
- vertices = roi3D['VERTICES']
-
except IOError:
raise IOError(f'File not found: {obj_filepath}')
- def set_rotation(self, rvec):
-
+ def set_rotation(self, rvec: list):
+ """Set scene rotation vector."""
self.__rotation = rvec
- def set_translation(self, tvec):
-
+ def set_translation(self, tvec: list):
+ """Set scene translation vector."""
self.__translation = tvec
- # project 3D scene onto 2D scene through a camera
- def project(self, frame, camera, apply_distorsion = True):
+ def project(self, K, D):
+ """Project 3D scene onto 2D scene according optical parameters.
+ **Returns:** AOI2DScene"""
roi2D_scene = ROI2DScene.ROI2DScene()
@@ -126,12 +126,13 @@ class ROI3DScene(list):
vertices_3D = numpy.array(roi3D['VERTICES']).astype('float32')
- vertices_2D, J = cv.projectPoints(vertices_3D, self.__rotation, self.__translation, camera.get_K(), camera.get_D() if apply_distorsion else self.__D0)
+ vertices_2D, J = cv.projectPoints(vertices_3D, self.__rotation, self.__translation, K, D)
vertices_2D = vertices_2D.astype('int').reshape((len(vertices_2D), 2))
roi2D = {
'NAME': roi3D['NAME'],
- 'VERTICES': vertices_2D
+ 'VERTICES': vertices_2D,
+ 'POINTER_INSIDE': False
}
roi2D_scene.append(roi2D)
diff --git a/src/argaze/RegionOfInterest/__init__.py b/src/argaze/RegionOfInterest/__init__.py
index f81f73f..8c16ff2 100644
--- a/src/argaze/RegionOfInterest/__init__.py
+++ b/src/argaze/RegionOfInterest/__init__.py
@@ -1 +1,5 @@
+"""
+.. include:: README.md
+"""
+__docformat__ = "restructuredtext"
__all__ = ['ROI2DScene', 'ROI3DScene'] \ No newline at end of file
diff --git a/src/argaze/TobiiGlassesPro2/README.md b/src/argaze/TobiiGlassesPro2/README.md
index 378deb5..fcacf41 100644
--- a/src/argaze/TobiiGlassesPro2/README.md
+++ b/src/argaze/TobiiGlassesPro2/README.md
@@ -1,113 +1,76 @@
-# TobiiGlassesPro2
+Class interface to handle TobbiGlassesPro2 data and video stream.
+It is based on [TobiiGlassesPyController package](https://github.com/ddetommaso/TobiiGlassesPyController).
-_This module provides a class interface to handle TobbiGlassesPro2 data and video stream.
-It is based on [TobiiGlassesPyController package](https://github.com/ddetommaso/TobiiGlassesPyController)._
+## Utils
-## To connect Tobii glasses on Mac OS
+Print **A4_calibration_target.pdf** onto A4 paper sheet to get markers at expected dimensions.
-* Install DHCP server: brew install isc-dhcp
+## Local network configuration
+
+If the tobii Glasses aren't connected to a router, here is how to configure a local DHCP server to enable device connection.
+
+* Install DHCP server:
+```
+brew install isc-dhcp
+```
* Setup DHCP server:
- * /usr/local/etc/dhcpd.conf
- * /usr/local/etc/master.conf
-* Setup USB LAN Interface:
- * ip: 192.168.1.1
- * subnet: 255.255.255.0
- * router: 192.168.1.254
+**/usr/local/etc/dhcpd.conf**
+```
+# NECESSARY TO BE A DHCP SERVER
+authoritative;
-* Launch Tobii glasses interface to enable USB LAN Interface before to Launch DHCP server.
+# DHCP CONFIGURATION INFORMATION
+default-lease-time 43200;
+max-lease-time 86400;
+server-name "dhcpserver.robotron.lan";
-* Launch DHCP server: sudo /usr/local/sbin/dhcpd
+# DNS SERVERS DHCP WILL PUSH TO CLIENTS
+option domain-name-servers 192.168.1.1;
-* Kill DHCP server: sudo kill `cat /usr/local/var/dhcpd/dhcpd.pid`
+# SEARCH DOMAINS DHCP WILL PUSH TO CLIENTS
+option domain-name "robotron.lan";
-* Watch DHCP server: sudo log stream --info --debug --predicate "process == 'dhcpd'"
+# DHCP STATIC IP ASSIGNMENTS FILE
+include "/usr/local/etc/master.conf";
-## Tobii data structure
+# SUBNET FOR IP ADDRESSES MANUALLY/STATICALLY ASSIGNED ONLY
+subnet 192.168.1.0 netmask 255.255.255.0 {
+ option broadcast-address 192.168.1.255;
+ option subnet-mask 255.255.255.0;
+ option routers 192.168.1.254;
+}
+```
-Each data frame have the following structure:
+**/usr/local/etc/master.conf**
```
-{
- 'mems': {
- 'ac': {
- 'ts': 1042711827,
- 's': 0,
- 'ac': [0.549, -9.868, 2.203]
- },
- 'gy': {
- 'ts': 1042723807,
- 's': 0,
- 'gy': [2.94, -2.384, 1.428]
- }
- },
- 'right_eye': {
- 'pc': {
- 'ts': 1042743553,
- 's': 0, 'gidx': 39971,
- 'pc': [-35.24, -25.51, -31.66],
- 'eye': 'right'
- },
- 'pd': {
- 'ts': 1042743553,
- 's': 0,
- 'gidx': 39971,
- 'pd': 3.72,
- 'eye': 'right'
- },
- 'gd': {
- 'ts': 1042743553,
- 's': 0,
- 'gidx': 39971,
- 'gd': [0.1833, 0.307, 0.9339],
- 'eye': 'right'
- }
- },
- 'left_eye': {
- 'pc': {
- 'ts': 1042743553,
- 's': 0,
- 'gidx': 39971,
- 'pc': [29.96, -27.92, -40.9],
- 'eye': 'left'
- },
- 'pd': {
- 'ts': 1042743553,
- 's': 0,
- 'gidx': 39971,
- 'pd': 4.42,
- 'eye': 'left'
- },
- 'gd': {
- 'ts': 1042743553,
- 's': 0,
- 'gidx': 39971,
- 'gd': [0.1528, 0.2977, 0.9423],
- 'eye': 'left'
- }
- },
- 'gp': {
- 'ts': 1042743553,
- 's': 0,
- 'gidx': 39971,
- 'l': 82832,
- 'gp': [0.3975, 0.2228]
- },
- 'gp3': {
- 'ts': 1042743553,
- 's': 0,
- 'gidx': 39971,
- 'gp3': [313.64, 542.2, 1728.85]
- },
- 'pts': {
- 'ts': 1042123917,
- 's': 0,
- 'pts': 93695,
- 'pv': 7
- },
- 'vts': {
- 'ts': -1
- }
-}
-``` \ No newline at end of file
+# Static IP assignments
+## SUBNET - 192.168.1.0/24
+host tobiiglasses { hardware ethernet 74:fe:48:34:7c:92; fixed-address 192.168.1.10; }
+```
+Replace 74:fe:48:34:7c:92 by the correct MAC address.
+
+* Setup USB LAN Interface:
+```
+ip: 192.168.1.1
+subnet: 255.255.255.0
+router: 192.168.1.254
+```
+
+* Launch Tobii glasses interface to enable USB LAN Interface before to Launch DHCP server.
+
+* Launch DHCP server:
+```
+sudo /usr/local/sbin/dhcpd
+```
+* Kill DHCP server:
+```
+sudo kill `cat /usr/local/var/dhcpd/dhcpd.pid`
+```
+
+* Watch DHCP server activity:
+```
+sudo log stream --info --debug --predicate "process == 'dhcpd'"
+```
diff --git a/src/argaze/TobiiGlassesPro2/TobiiController.py b/src/argaze/TobiiGlassesPro2/TobiiController.py
index aafa225..10af5fe 100644
--- a/src/argaze/TobiiGlassesPro2/TobiiController.py
+++ b/src/argaze/TobiiGlassesPro2/TobiiController.py
@@ -1,24 +1,33 @@
+#!/usr/bin/env python
+
import tobiiglassesctrl
class TobiiController(tobiiglassesctrl.TobiiGlassesController):
+ """As TobiiController inherits from TobiiGlassesPyController, here is its [code](https://github.com/ddetommaso/TobiiGlassesPyController/blob/master/tobiiglassesctrl/controller.py)."""
+
+ project_id = None
+ """Project identifier."""
+
+ participant_id = None
+ """Participant identifier."""
+
+ calibration_id = None
+ """Calibration identifier."""
- # initialisation
def __init__(self, ip_address, project_name, participant_id):
+ """Create a project, a participant and start calibration."""
super().__init__(ip_address, video_scene = True)
- # edit project and participant
- project_id = super().create_project(project_name)
- participant_id = super().create_participant(project_id, project_name)
- '''
- # start calibration
- input("Position Tobbi glasses calibration target then presse 'Enter' to start calibration.")
- calibration_id = super().create_calibration(project_id, participant_id)
- super().start_calibration(calibration_id)
-
- if not super().wait_until_calibration_is_done(calibration_id):
- ogging.error('TobiiController.__init__() : Calibration has failed')
- '''
+ self.project_id = super().create_project(project_name)
+ self.participant_id = super().create_participant(project_id, project_name)
+
+ input('Position Tobbi glasses calibration target then presse \'Enter\' to start calibration.')
+ self.calibration_id = super().create_calibration(self.project_id, self.participant_id)
+ super().start_calibration(self.calibration_id)
+
+ if not super().wait_until_calibration_is_done(self.calibration_id):
+ raise Error('Tobii calibration failed')
# destruction
def __del__(self):
diff --git a/src/argaze/TobiiGlassesPro2/TobiiData.py b/src/argaze/TobiiGlassesPro2/TobiiData.py
index 6bfc89e..8e0a8b3 100644
--- a/src/argaze/TobiiGlassesPro2/TobiiData.py
+++ b/src/argaze/TobiiGlassesPro2/TobiiData.py
@@ -1,12 +1,18 @@
+#!/usr/bin/env python
+
import threading
import time
+from argaze.TobiiGlassesPro2 import TobiiController
+
class TobiiDataThread(threading.Thread):
+ """Handle data reception in a separate thread."""
- # initialisation
- def __init__(self, controller):
+ def __init__(self, controller: TobiiController.TobiiController):
+ """Initialise thread super class and prepare data reception."""
threading.Thread.__init__(self)
+
self.stop_event = threading.Event()
self.read_lock = threading.Lock()
@@ -15,18 +21,16 @@ class TobiiDataThread(threading.Thread):
self.fps = self.controller.get_et_freq()
self.sleep = 1./self.fps
- self.__ac_buffer = [] # accelerometer
- self.__gy_buffer = [] # gyroscope
- self.__gp_buffer = [] # gaze point
- self.__pts_buffer = [] # presentation timestamp
+ self.__ac_buffer = []
+ self.__gy_buffer = []
+ self.__gp_buffer = []
+ self.__pts_buffer = []
self.__start_ts = 0
- # destruction
def __del__(self):
pass
- # extract ac data
def __get_ac(self, data):
ac_value = data['mems']['ac']['ac']
@@ -41,7 +45,6 @@ class TobiiDataThread(threading.Thread):
return ac_data
- # extract gy data
def __get_gy(self, data):
gy_value = data['mems']['gy']['gy']
@@ -56,7 +59,6 @@ class TobiiDataThread(threading.Thread):
return gy_data
- # extract gp data
def __get_gp(self, data):
gp_value = data['gp']['gp']
@@ -70,7 +72,6 @@ class TobiiDataThread(threading.Thread):
return gp_data
- # extract pts data
def __get_pts(self, data):
pts_value = data['pts']['pts']
@@ -83,8 +84,8 @@ class TobiiDataThread(threading.Thread):
return pts_data
- # thread start
def run(self):
+ """Data reception function."""
while not self.stop_event.isSet():
@@ -148,8 +149,19 @@ class TobiiDataThread(threading.Thread):
self.read_lock.release()
- # read ac data
- def read_accelerometer_data(self, timestamp):
+ def read_accelerometer_data(self, timestamp: int = -1):
+ """Get accelerometer data at a given timestamp.
+ **Returns:** accelerometer dictionary
+ ```
+ {
+ 'TIMESTAMP': int,
+ 'TIME': int,
+ 'X': float,
+ 'Y': float,
+ 'Z': float
+ }
+ ```
+ """
if len(self.__ac_buffer):
@@ -166,8 +178,9 @@ class TobiiDataThread(threading.Thread):
return {}
- # read ac buffer
def read_accelerometer_buffer(self):
+ """Get accelerometer data buffer.
+ **Returns:** accelerometer dictionary array"""
self.read_lock.acquire()
@@ -177,8 +190,19 @@ class TobiiDataThread(threading.Thread):
return ac_buffer
- # read gy data
- def read_gyroscope_data(self, timestamp):
+ def read_gyroscope_data(self, timestamp: int = -1):
+ """Get gyroscope data at a given timestamp.
+ **Returns:** gyroscope dictionary
+ ```
+ {
+ 'TIMESTAMP': int,
+ 'TIME': int,
+ 'X': float,
+ 'Y': float,
+ 'Z': float
+ }
+ ```
+ """
if len(self.__gy_buffer):
@@ -195,8 +219,9 @@ class TobiiDataThread(threading.Thread):
return {}
- # read gy buffer
def read_gyroscope_buffer(self):
+ """Get gyroscope data buffer.
+ **Returns:** gyroscope dictionary array"""
self.read_lock.acquire()
@@ -206,8 +231,18 @@ class TobiiDataThread(threading.Thread):
return gy_buffer
- # read gp data
- def read_gaze_data(self, timestamp):
+ def read_gaze_data(self, timestamp: int = -1):
+ """Get gaze data at a given timestamp.
+ **Returns:** gaze dictionary
+ ```
+ {
+ 'TIMESTAMP': int,
+ 'TIME': int,
+ 'X': float,
+ 'Y': float
+ }
+ ```
+ """
if len(self.__gp_buffer):
@@ -224,8 +259,9 @@ class TobiiDataThread(threading.Thread):
return {}
- # read gp buffer
def read_gaze_buffer(self):
+ """Get gaze data buffer.
+ **Returns:** gaze dictionary array"""
self.read_lock.acquire()
@@ -235,8 +271,17 @@ class TobiiDataThread(threading.Thread):
return gp_buffer
- # read pts data
- def read_pts_data(self, timestamp):
+ def read_pts_data(self, timestamp: int = -1):
+ """Get Presentation Time Stamp (pts) data at a given timestamp.
+ **Returns:** pts dictionary
+ ```
+ {
+ 'TIMESTAMP': int,
+ 'TIME': int,
+ 'PTS': int
+ }
+ ```
+ """
if len(self.__pts_buffer):
@@ -253,8 +298,9 @@ class TobiiDataThread(threading.Thread):
return {}
- # read pts buffer
def read_pts_buffer(self):
+ """Get Presentation Time Stamp (pts) data buffer.
+ **Returns:** pts dictionary array"""
self.read_lock.acquire()
@@ -264,8 +310,8 @@ class TobiiDataThread(threading.Thread):
return pts_buffer
- # thread stop
def stop(self):
+ """Stop data reception definitively."""
self.stop_event.set()
threading.Thread.join(self)
diff --git a/src/argaze/TobiiGlassesPro2/TobiiVideo.py b/src/argaze/TobiiGlassesPro2/TobiiVideo.py
index 8777a02..babc30b 100644
--- a/src/argaze/TobiiGlassesPro2/TobiiVideo.py
+++ b/src/argaze/TobiiGlassesPro2/TobiiVideo.py
@@ -1,12 +1,15 @@
+#!/usr/bin/env python
+
import threading
import av
import numpy
class TobiiVideoThread(threading.Thread):
+ """Handle video camera stream capture in a separate thread."""
- # initialisation
def __init__(self, controller):
+ """Initialise thread super class and prepare camera video stream reception."""
threading.Thread.__init__(self)
self.stop_event = threading.Event()
@@ -25,12 +28,11 @@ class TobiiVideoThread(threading.Thread):
self.read_lock.release()
- # destruction
def __del__(self):
pass
- # thread start
def run(self):
+ """Video camera stream capture function."""
# start Tobii glasses stream capture
self.__container = av.open(f'rtsp://{self.controller.get_address()}:8554/live/scene', options={'rtsp_transport': 'tcp'})
@@ -48,12 +50,11 @@ class TobiiVideoThread(threading.Thread):
self.__height = f.height
self.__pts_buffer.append({'TIME':f.time, 'PTS': f.pts})
- #print(f'Tobii Video Frame: pts = {f.pts}, time = {f.time}, format = {f.width}, {f.height}')
-
self.read_lock.release()
- # read frame
def read(self) :
+ """Read video frame.
+ **Returns:** frame, frame width, frame height, frame time, frame pts."""
# if stopped, return blank frame
if self.stop_event.isSet():
@@ -77,8 +78,9 @@ class TobiiVideoThread(threading.Thread):
return frame_copy, width_copy, height_copy, time_copy, pts_copy
- # read pts buffer
+
def read_pts_buffer(self):
+ """Get Presentation Time Stamp data buffer."""
self.read_lock.acquire()
@@ -88,8 +90,8 @@ class TobiiVideoThread(threading.Thread):
return pts_buffer
- # thread stop
def stop(self):
+ """Stop video camera stream capture definitively."""
self.stop_event.set()
threading.Thread.join(self)
diff --git a/src/argaze/TobiiGlassesPro2/__init__.py b/src/argaze/TobiiGlassesPro2/__init__.py
index 3884106..7d712c6 100644
--- a/src/argaze/TobiiGlassesPro2/__init__.py
+++ b/src/argaze/TobiiGlassesPro2/__init__.py
@@ -1 +1,5 @@
+"""
+.. include:: README.md
+"""
+__docformat__ = "restructuredtext"
__all__ = ['TobiiController', 'TobiiData', 'TobiiVideo'] \ No newline at end of file
diff --git a/src/argaze/TobiiGlassesPro2/utils/calibration_target_A4.pdf b/src/argaze/TobiiGlassesPro2/utils/A4_calibration_target.pdf
index dfdbe0a..dfdbe0a 100644
--- a/src/argaze/TobiiGlassesPro2/utils/calibration_target_A4.pdf
+++ b/src/argaze/TobiiGlassesPro2/utils/A4_calibration_target.pdf
Binary files differ
diff --git a/src/argaze/__init__.py b/src/argaze/__init__.py
index e69de29..0252f36 100644
--- a/src/argaze/__init__.py
+++ b/src/argaze/__init__.py
@@ -0,0 +1,4 @@
+"""
+.. include:: ../../README.md
+"""
+__docformat__ = "restructuredtext" \ No newline at end of file
diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md
new file mode 100644
index 0000000..d0b6024
--- /dev/null
+++ b/src/argaze/utils/README.md
@@ -0,0 +1,31 @@
+Collection of command-line high level features based on ArGaze toolkit.
+
+Use -help to get arguments documentation.
+
+## Ready-to-use commands
+
+Consider that all inline commands below needs to be executed into ArGaze root folder.
+
+- Export 50 4x4 markers at 300 dpi into an export folder:
+
+```
+python ./src/argaze/utils/export_aruco_markers.py -o export
+```
+
+- Export a 7 columns and 5 rows calibration board with 5cm squares and 3cm markers inside at 50 dpi into an export folder:
+
+```
+python ./src/argaze/utils/export_calibration_board.py 7 5 5 3 -o export
+```
+
+- Calibrate Tobii Glasses Pro 2 camera (replace IP_ADDRESS) using a 7 columns and 5 rows calibration board with 5cm squares and 3cm markers inside. Then, export its optical parameters into an tobii_camera.json file:
+
+```
+python ./src/argaze/utils/tobii_camera_calibration.py 7 5 5 3 -t IP_ADDRESS -o tobii_camera.json
+```
+
+- Track any 6cm ArUco marker into Tobii camera video stream. Load an roi3D_scene.obj file, position it virtually like the detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI. Export all collected datas into an export folder.
+
+```
+python ./src/argaze/utils/track_aruco_rois_with_tobii_glasses.py -c data/tobii_camera.json -s data/roi3D_scene.obj -o export
+``` \ No newline at end of file
diff --git a/src/argaze/utils/__init__.py b/src/argaze/utils/__init__.py
new file mode 100644
index 0000000..1448ed6
--- /dev/null
+++ b/src/argaze/utils/__init__.py
@@ -0,0 +1,4 @@
+"""
+.. include:: README.md
+"""
+__docformat__ = "restructuredtext" \ No newline at end of file
diff --git a/src/argaze/utils/calibrate_tobii_camera.py b/src/argaze/utils/calibrate_tobii_camera.py
new file mode 100644
index 0000000..ea35386
--- /dev/null
+++ b/src/argaze/utils/calibrate_tobii_camera.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+
+import argparse
+import os
+import time
+
+from argaze.TobiiGlassesPro2 import TobiiController, TobiiVideo
+from argaze.ArUcoMarkers import ArUcoBoard, ArUcoTracker, ArUcoCamera
+
+import cv2 as cv
+
+def main():
+ """
+ Captures board pictures and finally outputs camera calibration data into a .json file.
+
+ - Export and print a calibration board using
+ - Place the calibration board in order to view it entirely on screen and move the camera in many configurations (orientation and distance) : the script will automatically take pictures. Do this step with a good lighting and a clear background.
+ - Once enough pictures have been captured (~20), press Esc key then, wait for the camera calibration processing.
+ - Finally, check rms parameter: it should be between 0. and 1. if the calibration suceeded (lower is better).
+
+ ### Reference:
+ - [Camera calibration using ArUco marker tutorial](https://automaticaddison.com/how-to-perform-camera-calibration-using-opencv/)
+ """
+
+ # manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
+ parser.add_argument('columns', metavar='COLS_NUMBER', type=int, default=7, help='number of columns')
+ parser.add_argument('rows', metavar='ROWS_NUMBER', type=int, default=5, help='number of rows')
+ parser.add_argument('square_size', metavar='SQUARE_SIZE', type=int, default=5, help='square size (cm)')
+ parser.add_argument('marker_size', metavar='MARKER_SIZE', type=int, default=5, help='marker size (cm)')
+ parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip')
+ parser.add_argument('-o', '--output', metavar='OUT', type=str, default='.', help='destination filepath')
+ parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_4X4_50', help='aruco marker dictionnary')
+ args = parser.parse_args()
+
+ # create tobii controller
+ tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'ArGaze', 1)
+
+ # create tobii video thread
+ tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
+ tobii_video_thread.start()
+
+ # create aruco camera
+ aruco_camera = ArUcoCamera.ArUcoCamera()
+
+ # create aruco board
+ aruco_board = ArUcoBoard.ArUcoBoard(args.dictionary, args.columns, args.rows, args.square_size, args.marker_size)
+
+ # create aruco tracker
+ aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)
+
+ # start tobii glasses streaming
+ tobii_controller.start_streaming()
+
+ print("Camera calibration starts")
+ print("Waiting for calibration board...")
+
+ frame_width = 0
+ frame_height = 0
+
+ expected_markers_number = len(aruco_board.get_ids())
+ expected_corners_number = (aruco_board.get_size()[0] - 1 ) * (aruco_board.get_size()[1] - 1)
+
+ # capture frame with a full displayed board
+ while True:
+
+ frame, frame_width, frame_height, frame_time, frame_pts = tobii_video_thread.read()
+
+ # track all markers in the board
+ aruco_tracker.track_board(frame, aruco_board, expected_markers_number)
+
+ # draw only markers
+ aruco_tracker.draw(frame)
+
+ # draw current calibration data count
+ cv.putText(frame, f'Capture: {aruco_camera.get_calibration_data_count()}', (50, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
+ cv.imshow('Tobii Camera Calibration', frame)
+
+ # if all board corners are detected
+ if aruco_tracker.get_board_corners_number() == expected_corners_number:
+
+ # draw board corners to notify a capture is done
+ aruco_tracker.draw_board(frame)
+
+ # append data
+ aruco_camera.store_calibration_data(aruco_tracker.get_board_corners(), aruco_tracker.get_board_corners_ids())
+
+ cv.imshow(f'Tobii Camera Calibration', frame)
+
+ time.sleep(2)
+
+ # quit on 'Esc' command
+ key = cv.waitKey(1)
+ if key == 27:
+ cv.destroyAllWindows()
+ break
+
+ # stop tobii objects
+ tobii_video_thread.stop()
+
+ tobii_controller.stop_streaming()
+ tobii_controller.close()
+
+ print('\nCalibrating camera...')
+ aruco_camera.calibrate(aruco_board, frame_width, frame_height)
+
+ print('\nCalibration succeeded!')
+ print(f'\nRMS:\n{aruco_camera.get_rms()}')
+ print(f'\nCamera matrix:\n{aruco_camera.get_K()}')
+ print(f'\nDistortion coefficients:\n{aruco_camera.get_D()}')
+
+ aruco_camera.save_calibration_file(args.output)
+
+ print(f'\nCalibration data exported into {args.output} file')
+
+if __name__ == '__main__':
+
+ main()
diff --git a/src/examples/tobii_argaze/roi3D_scene.obj b/src/argaze/utils/data/roi3D_scene.obj
index d0b7c51..d0b7c51 100644
--- a/src/examples/tobii_argaze/roi3D_scene.obj
+++ b/src/argaze/utils/data/roi3D_scene.obj
diff --git a/src/examples/tobii_argaze/tobii_camera.json b/src/argaze/utils/data/tobii_camera.json
index b7b5108..b7b5108 100644
--- a/src/examples/tobii_argaze/tobii_camera.json
+++ b/src/argaze/utils/data/tobii_camera.json
diff --git a/src/argaze/utils/export_aruco_markers.py b/src/argaze/utils/export_aruco_markers.py
new file mode 100644
index 0000000..e76b95e
--- /dev/null
+++ b/src/argaze/utils/export_aruco_markers.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+import argparse
+import os
+
+from argaze.ArUcoMarkers import ArUcoMarkers
+
+def main():
+ """Generates ArUco markers to place into a scene."""
+
+ # manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__)
+ parser.add_argument('-o', '--output', metavar='OUT', type=str, default='.', help='destination path')
+ parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_4X4_50', help='aruco marker dictionnary')
+ parser.add_argument('-r', '--resolution', metavar='RES', type=int, default=300, help='picture resolution in dpi')
+ args = parser.parse_args()
+
+ # manage destination folder
+ if not os.path.exists(args.output):
+ os.makedirs(args.output)
+ print(f'{args.output} folder created')
+
+ # create aruco markers
+ aruco_markers = ArUcoMarkers.ArUcoMarkers(args.dictionary)
+
+ # export markers
+ aruco_markers.export_all(args.output, args.resolution)
+
+if __name__ == '__main__':
+
+ main() \ No newline at end of file
diff --git a/src/argaze/utils/export_calibration_board.py b/src/argaze/utils/export_calibration_board.py
new file mode 100644
index 0000000..2bcdef2
--- /dev/null
+++ b/src/argaze/utils/export_calibration_board.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+import argparse
+import os
+
+from argaze.ArUcoMarkers import ArUcoBoard
+
+def main():
+ """Generates ArUco board to calibrate a camera."""
+
+ # manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__)
+ parser.add_argument('columns', metavar='COLS_NUMBER', type=int, default=7, help='number of columns')
+ parser.add_argument('rows', metavar='ROWS_NUMBER', type=int, default=5, help='number of rows')
+ parser.add_argument('square_size', metavar='SQUARE_SIZE', type=int, default=5, help='square size (cm)')
+ parser.add_argument('marker_size', metavar='MARKER_SIZE', type=int, default=5, help='marker size (cm)')
+ parser.add_argument('-o', '--output', metavar='OUT', type=str, default='.', help='destination path')
+ parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_4X4_50', help='aruco marker dictionnary')
+ parser.add_argument('-r', '--resolution', metavar='RES', type=int, default=50, help='picture resolution in dpi')
+ args = parser.parse_args()
+
+ # manage destination folder
+ if not os.path.exists(args.output):
+ os.makedirs(args.output)
+ print(f'{args.output} folder created')
+
+ # create aruco board
+ aruco_board = ArUcoBoard.ArUcoBoard(args.dictionary, args.columns, args.rows, args.square_size, args.marker_size)
+
+ # export aruco board
+ aruco_board.export(args.output, args.resolution)
+
+if __name__ == '__main__':
+
+ main() \ No newline at end of file
diff --git a/src/argaze/utils/track_aruco_rois_with_tobii_glasses.py b/src/argaze/utils/track_aruco_rois_with_tobii_glasses.py
new file mode 100644
index 0000000..6091b89
--- /dev/null
+++ b/src/argaze/utils/track_aruco_rois_with_tobii_glasses.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python
+
+import argparse
+import os
+
+from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera
+from argaze.RegionOfInterest import *
+from argaze.TobiiGlassesPro2 import *
+
+import cv2 as cv
+import pandas
+import matplotlib.pyplot as mpyplot
+import matplotlib.patches as mpatches
+
+def main():
+ """
+ Track any ArUco marker into Tobii Glasses Pro 2 camera video stream.
+ From a loaded ROI scene .obj file, position the scene virtually like the detected ArUco markers and project the scene into camera frame.
+ Then, detect if Tobii gaze point is inside any ROI.
+ Export all collected datas into an export folder for further analysis.
+ """
+
+ # manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
+ parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip')
+ parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath')
+ parser.add_argument('-s', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath')
+ parser.add_argument('-o', '--output', metavar='OUT', type=str, default='.', help='destination path')
+ parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_4X4_50', help='aruco marker dictionnary')
+ parser.add_argument('-m', '--marker_size', metavar='MKR', type=int, default=6, help='aruco marker size (cm)')
+ args = parser.parse_args()
+
+ # create tobii controller
+ tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'ArGaze', 1)
+
+ # create tobii data thread
+ tobii_data_thread = TobiiData.TobiiDataThread(tobii_controller)
+ tobii_data_thread.start()
+
+ # create tobii video thread
+ tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
+ tobii_video_thread.start()
+
+ # create aruco camera
+ aruco_camera = ArUcoCamera.ArUcoCamera()
+ aruco_camera.load_calibration_file(args.camera_calibration)
+
+ # create aruco tracker
+ aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, 6, aruco_camera) # aruco dictionaries, marker length (cm), camera
+
+ # create ROIs 3D scene
+ roi3D_scene = ROI3DScene.ROI3DScene()
+ roi3D_scene.load(args.roi_scene)
+
+ # start tobii glasses streaming
+ tobii_controller.start_streaming()
+
+ # process video frames
+ last_frame_time = 0
+ roi2D_buffer = []
+ marker_buffer = []
+
+ while True:
+
+ frame, frame_width, frame_height, frame_time, pts = tobii_video_thread.read()
+
+ # draw tobii gaze
+ # TODO : sync gaze data according frame pts
+ gp_data = tobii_data_thread.read_gaze_data(pts)
+ if 'TIMESTAMP' in gp_data:
+ pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height))
+ cv.circle(frame, pointer, 4, (0, 255, 255), -1)
+ else:
+ pointer = (0, 0)
+
+ # track markers with pose estimation and draw them
+ aruco_tracker.track(frame)
+ aruco_tracker.draw(frame)
+
+ # project 3D scenes related to each aruco markers
+ if aruco_tracker.get_markers_number():
+
+ for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
+
+ # TODO : select different 3D scenes depending on aruco id
+
+ marker_rotation = aruco_tracker.get_marker_rotation(i)
+ marker_translation = aruco_tracker.get_marker_translation(i)
+
+ roi3D_scene.set_rotation(marker_rotation)
+ roi3D_scene.set_translation(marker_translation)
+
+ # zero distorsion matrix
+ D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
+
+ # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
+ # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable.
+ roi2D_scene = roi3D_scene.project(aruco_camera.getK(), D0)
+
+ # check if gaze is inside 2D rois
+ roi2D_scene.inside(pointer)
+
+ # draw 2D rois
+ roi2D_scene.draw(frame)
+
+ # store roi2D into buffer
+ for roi2D in roi2D_scene:
+ roi2D['TIME'] = frame_time
+ del roi2D['VERTICES']
+ roi2D_buffer.append(roi2D)
+
+ # store marker into buffer
+ marker = {
+ 'TIME': frame_time,
+ 'ID': i,
+ 'X': marker_translation[0][0],
+ 'Y': marker_translation[0][1],
+ 'Z': marker_translation[0][2]
+ }
+ marker_buffer.append(marker)
+
+ cv.imshow(f'Live Scene', frame)
+
+ # quit on 'Esc' command
+ key = cv.waitKey(1)
+ if key == 27:
+ cv.destroyAllWindows()
+ last_frame_time = frame_time
+ break
+
+ # stop tobii objects
+ tobii_video_thread.stop()
+ tobii_data_thread.stop()
+
+ tobii_controller.stop_streaming()
+ tobii_controller.close()
+
+ # create a pandas DataFrame for each buffer
+ ac_dataframe = pandas.DataFrame(tobii_data_thread.read_accelerometer_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
+ gy_dataframe = pandas.DataFrame(tobii_data_thread.read_gyroscope_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
+ gp_dataframe = pandas.DataFrame(tobii_data_thread.read_gaze_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y'])
+ data_pts_dataframe = pandas.DataFrame(tobii_data_thread.read_pts_buffer(), columns=['TIMESTAMP', 'TIME', 'PTS'])
+ video_pts_dataframe = pandas.DataFrame(tobii_video_thread.read_pts_buffer(), columns=['TIME', 'PTS'])
+ roi2D_dataframe = pandas.DataFrame(roi2D_buffer, columns=['TIME', 'NAME', 'POINTER_INSIDE'])
+ marker_dataframe = pandas.DataFrame(marker_buffer, columns=['TIME', 'ID', 'X', 'Y', 'Z'])
+
+ # manage export folder
+ if not os.path.exists(args.output):
+ os.makedirs(args.output)
+ print(f'{args.output} folder created')
+
+ # export all data frames
+ ac_dataframe.to_csv(f'{args.output}/accelerometer.csv', index=False)
+ gy_dataframe.to_csv(f'{args.output}/gyroscope.csv', index=False)
+ gp_dataframe.to_csv(f'{args.output}/gaze.csv', index=False)
+ data_pts_dataframe.to_csv(f'{args.output}/data_pts.csv', index=False)
+ video_pts_dataframe.to_csv(f'{args.output}/video_pts.csv', index=False)
+ roi2D_dataframe.to_csv(f'{args.output}/rois.csv', index=False)
+ marker_dataframe.to_csv(f'{args.output}/markers.csv', index=False)
+
+ # edit figure
+ figure = mpyplot.figure(figsize=(int(last_frame_time), 5))
+
+ # plot gaze data
+ subplot = figure.add_subplot(211)
+ subplot.set_title('Gaze')
+
+ subplot = gp_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (normalized)', legend=False)
+ subplot = gp_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (normalized)', legend=False)
+
+ x_patch = mpatches.Patch(color='#276FB6', label='X')
+ y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
+ subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
+
+
+ # plot maker position data
+ subplot = figure.add_subplot(212)
+ subplot.set_title('Marker')
+
+ subplot = marker_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (cm)', legend=False)
+ subplot = marker_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (cm)', legend=False)
+
+ x_patch = mpatches.Patch(color='#276FB6', label='X')
+ y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
+ subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
+
+ # export figure
+ mpyplot.tight_layout()
+ mpyplot.savefig(f'{args.output}/visualisation.svg')
+ mpyplot.close('all')
+
+if __name__ == '__main__':
+
+ main() \ No newline at end of file
diff --git a/src/examples/README.md b/src/examples/README.md
deleted file mode 100644
index ac24dc1..0000000
--- a/src/examples/README.md
+++ /dev/null
@@ -1,35 +0,0 @@
-# ArGaze examples
-
-_All examples requires argaze package._
-
-## export_aruco_markers.py
-
-Generates ArUco markers to place into a scene.
-
-## export_calibration_board.py
-
-Generates ArUco board to calibrate a camera.
-
-## tobii_camera_calibration.py
-
-Captures board pictures and finally outputs camera calibration data into an \_export/tobii_camera.json file.
-
-* Print the **A3_board_35cmx25cm_markers_4X4_3cm.pdf** file from ArUcoMarkers/utils folder on a A3 sheet to get the correct square and markers length (3 cm).
-* Launch the script.
-* Place the board in order to view it entirely on screen and move the camera in many configurations (orientation and distance) : the script will automatically take pictures. Do this step with a good lighting and a clear background.
-* Once enough pictures have been captured (~20), press Esc key then, wait for the camera calibration processing.
-* Finally, open \_export/tobii_camera.json file to see the calibration data : the rms parameter should be between 0. and 1. if the calibration suceeded (lower is better).
-
-## tobii_argaze
-
-Coordinates several tasks to :
-
-* Receive gaze data from Tobii glasses Pro 2,
-* Detect ArUco markers inside video stream from Tobii glasses Pro 2,
-* Synchronise gaze data and ArUcos detection using timestamp,
-* Build 3D AR ROIs from **roi3D_scene.obj** file through ArUcos markers pose estimation and project them onto video frame coordinate system,
-* Check if gaze is inside an ROI,
-* Store gathered data into unified csv file for further analysis.
-
-The **scene.blend** file is a Blender project to build and export roi3D_scene.obj file.
-
diff --git a/src/examples/export_aruco_markers.py b/src/examples/export_aruco_markers.py
deleted file mode 100644
index 2d55931..0000000
--- a/src/examples/export_aruco_markers.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-"""
-export_markers.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-"""
-import os
-from argaze.ArUcoMarkers import ArUcoMarkers
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export/markers')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export/markers\' folder created')
-
-# create aruco markers
-aruco_markers = ArUcoMarkers.ArUcoMarkers('DICT_4X4_50')
-
-# export markers
-aruco_markers.export_all(export_folder, 300) # destination folder, dpi
diff --git a/src/examples/export_calibration_board.py b/src/examples/export_calibration_board.py
deleted file mode 100644
index 24d7461..0000000
--- a/src/examples/export_calibration_board.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-"""
-export_board.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-"""
-import os
-from argaze.ArUcoMarkers import ArUcoBoard
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export\' folder created')
-
-# create aruco board
-aruco_board = ArUcoBoard.ArUcoBoard('DICT_4X4_50', 7, 5, 5, 3) # 7 columns, 5 rows, square size (cm), marker size (cm)
-
-# export aruco board
-aruco_board.export(export_folder, 50) # destination folder, dpi
diff --git a/src/examples/tobii_argaze/scene.blend b/src/examples/tobii_argaze/scene.blend
deleted file mode 100644
index e7e5dda..0000000
--- a/src/examples/tobii_argaze/scene.blend
+++ /dev/null
Binary files differ
diff --git a/src/examples/tobii_argaze/tobii_argaze.py b/src/examples/tobii_argaze/tobii_argaze.py
deleted file mode 100644
index 8193a03..0000000
--- a/src/examples/tobii_argaze/tobii_argaze.py
+++ /dev/null
@@ -1,180 +0,0 @@
-#!/usr/bin/env python
-
-"""
-tobii_argaze.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-"""
-import os
-
-from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera
-from argaze.RegionOfInterest import *
-from argaze.TobiiGlassesPro2 import *
-
-import cv2 as cv
-import pandas
-import matplotlib.pyplot as mpyplot
-import matplotlib.patches as mpatches
-
-# tobii glasses ip address
-ip_address = '192.168.1.10'
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export\' folder created')
-
-# create tobii controller
-tobii_controller = TobiiController.TobiiController(ip_address, 'ArGaze', 1)
-
-# create tobii data thread
-tobii_data_thread = TobiiData.TobiiDataThread(tobii_controller)
-tobii_data_thread.start()
-
-# create tobii video thread
-tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
-tobii_video_thread.start()
-
-# create aruco camera
-aruco_camera = ArUcoCamera.ArUcoCamera()
-aruco_camera.load_calibration_file('tobii_camera.json')
-
-# create aruco tracker
-aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_4X4_50', 6, aruco_camera) # aruco dictionaries, marker length (cm), camera
-
-# create ROIs 3D scene
-roi3D_scene = ROI3DScene.ROI3DScene()
-roi3D_scene.load('roi3D_scene.obj')
-
-# start tobii glasses streaming
-tobii_controller.start_streaming()
-
-# process video frames
-last_frame_time = 0
-roi2D_buffer = []
-marker_buffer = []
-
-while True:
-
- frame, frame_width, frame_height, frame_time, pts = tobii_video_thread.read()
-
- # draw tobii gaze
- # TODO : sync gaze data according frame pts
- gp_data = tobii_data_thread.read_gaze_data(pts)
- if 'TIMESTAMP' in gp_data:
- pointer = (int(gp_data['X'] * frame_width), int(gp_data['Y'] * frame_height))
- cv.circle(frame, pointer, 4, (0, 255, 255), -1)
- else:
- pointer = (0, 0)
-
- # track markers with pose estimation and draw them
- aruco_tracker.track(frame)
- aruco_tracker.draw(frame)
-
- # project 3D scenes related to each aruco markers
- if aruco_tracker.get_markers_number():
-
- for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
-
- # TODO : select different 3D scenes depending on aruco id
-
- marker_rotation = aruco_tracker.get_marker_rotation(i)
- marker_translation = aruco_tracker.get_marker_translation(i)
-
- roi3D_scene.set_rotation(marker_rotation)
- roi3D_scene.set_translation(marker_translation)
-
- # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
- # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable.
- roi2D_scene = roi3D_scene.project(frame, aruco_camera, False)
-
- # check if gaze is inside 2D rois
- roi2D_scene.inside(pointer)
-
- # draw 2D rois
- roi2D_scene.draw(frame)
-
- # store roi2D into buffer
- for roi2D in roi2D_scene:
- roi2D['TIME'] = frame_time
- del roi2D['VERTICES']
- roi2D_buffer.append(roi2D)
-
- # store marker into buffer
- marker = {
- 'TIME': frame_time,
- 'ID': i,
- 'X': marker_translation[0][0],
- 'Y': marker_translation[0][1],
- 'Z': marker_translation[0][2]
- }
- marker_buffer.append(marker)
-
- cv.imshow(f'Live Scene', frame)
-
- # quit on 'Esc' command
- key = cv.waitKey(1)
- if key == 27:
- cv.destroyAllWindows()
- last_frame_time = frame_time
- break
-
-# stop tobii objects
-tobii_video_thread.stop()
-tobii_data_thread.stop()
-
-tobii_controller.stop_streaming()
-tobii_controller.close()
-
-# create a pandas DataFrame for each buffer
-ac_dataframe = pandas.DataFrame(tobii_data_thread.read_accelerometer_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
-gy_dataframe = pandas.DataFrame(tobii_data_thread.read_gyroscope_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y', 'Z'])
-gp_dataframe = pandas.DataFrame(tobii_data_thread.read_gaze_buffer(), columns=['TIMESTAMP', 'TIME', 'X', 'Y'])
-data_pts_dataframe = pandas.DataFrame(tobii_data_thread.read_pts_buffer(), columns=['TIMESTAMP', 'TIME', 'PTS'])
-video_pts_dataframe = pandas.DataFrame(tobii_video_thread.read_pts_buffer(), columns=['TIME', 'PTS'])
-roi2D_dataframe = pandas.DataFrame(roi2D_buffer, columns=['TIME', 'NAME', 'POINTER_INSIDE'])
-marker_dataframe = pandas.DataFrame(marker_buffer, columns=['TIME', 'ID', 'X', 'Y', 'Z'])
-
-# export all data frames
-ac_dataframe.to_csv(f'{export_folder}/accelerometer.csv', index=False)
-gy_dataframe.to_csv(f'{export_folder}/gyroscope.csv', index=False)
-gp_dataframe.to_csv(f'{export_folder}/gaze.csv', index=False)
-data_pts_dataframe.to_csv(f'{export_folder}/data_pts.csv', index=False)
-video_pts_dataframe.to_csv(f'{export_folder}/video_pts.csv', index=False)
-roi2D_dataframe.to_csv(f'{export_folder}/rois.csv', index=False)
-marker_dataframe.to_csv(f'{export_folder}/markers.csv', index=False)
-
-# edit figure
-figure = mpyplot.figure(figsize=(int(last_frame_time), 5))
-
-# plot gaze data
-subplot = figure.add_subplot(211)
-subplot.set_title('Gaze')
-
-subplot = gp_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (normalized)', legend=False)
-subplot = gp_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (normalized)', legend=False)
-
-x_patch = mpatches.Patch(color='#276FB6', label='X')
-y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
-subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
-
-
-# plot maker position data
-subplot = figure.add_subplot(212)
-subplot.set_title('Marker')
-
-subplot = marker_dataframe.plot(x='TIME', y='X', xlim=(0, last_frame_time), ax=subplot, color='#276FB6', xlabel='Time (s)', ylabel='X (cm)', legend=False)
-subplot = marker_dataframe.plot(x='TIME', y='Y', xlim=(0, last_frame_time), ax=subplot.twinx(), color='#9427B6', xlabel='Time (s)', ylabel='Y (cm)', legend=False)
-
-x_patch = mpatches.Patch(color='#276FB6', label='X')
-y_speed_patch = mpatches.Patch(color='#9427B6', label='Y')
-subplot.legend(handles=[x_patch, y_speed_patch], loc='upper left')
-
-# export figure
-mpyplot.tight_layout()
-mpyplot.savefig(f'{export_folder}/visualisation.svg')
-mpyplot.close('all') \ No newline at end of file
diff --git a/src/examples/tobii_camera_calibration.py b/src/examples/tobii_camera_calibration.py
deleted file mode 100644
index 0e621b5..0000000
--- a/src/examples/tobii_camera_calibration.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python
-
-"""
-tobii_camera_calibration.py
-
-Author:
- - Théo de la Hogue, theo.de-la-hogue@enac.fr
-
-This program:
- - Captures board pictures with a full displayed board inside
- - Outputs camera calibration data into a camera.json file
-
-Reference:
- - https://automaticaddison.com/how-to-perform-pose-estimation-using-an-aruco-marker/
-"""
-import os
-import time
-
-from TobiiGlassesPro2 import TobiiController, TobiiVideo
-from ArUcoMarkers import ArUcoBoard, ArUcoTracker, ArUcoCamera
-
-import cv2 as cv
-
-# tobii glasses ip address
-ip_address = '192.168.1.10'
-
-# manage export folder
-current_folder = os.path.dirname(__file__)
-export_folder = os.path.join(current_folder, '_export')
-if not os.path.exists(export_folder):
- os.makedirs(export_folder)
- print(f'\'_export\' folder created')
-
-# create tobii controller
-tobii_controller = TobiiController.TobiiController(ip_address, 'ArGaze', 1)
-
-# create tobii video thread
-tobii_video_thread = TobiiVideo.TobiiVideoThread(tobii_controller)
-tobii_video_thread.start()
-
-# create aruco camera
-aruco_camera = ArUcoCamera.ArUcoCamera()
-
-# create aruco board
-aruco_board = ArUcoBoard.ArUcoBoard('DICT_4X4_50', 7, 5, 5, 3) # 7 columns, 5 rows, square size (cm), marker size (cm)
-
-# create aruco tracker
-aruco_tracker = ArUcoTracker.ArUcoTracker('DICT_4X4_50', 6, aruco_camera) # aruco dictionaries, marker length (cm), camera
-
-# start tobii glasses streaming
-tobii_controller.start_streaming()
-
-print("Camera calibration starts")
-print("Waiting for calibration board...")
-
-frame_width = 0
-frame_height = 0
-
-expected_markers_number = len(aruco_board.get_ids())
-expected_corners_number = (aruco_board.get_size()[0] - 1 ) * (aruco_board.get_size()[1] - 1)
-
-# capture frame with a full displayed board
-while True:
-
- frame, frame_width, frame_height, frame_time, frame_pts = tobii_video_thread.read()
-
- # track all markers in the board
- aruco_tracker.track_board(frame, aruco_board, expected_markers_number)
-
- # draw only markers
- aruco_tracker.draw(frame)
-
- # draw current calibration data count
- cv.putText(frame, f'Capture: {aruco_camera.get_calibration_data_count()}', (50, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
- cv.imshow('Tobii Camera Calibration', frame)
-
- # if all board corners are detected
- if aruco_tracker.get_board_corners_number() == expected_corners_number:
-
- # draw board corners to notify a capture is done
- aruco_tracker.draw_board(frame)
-
- # append data
- aruco_camera.store_calibration_data(aruco_tracker.get_board_corners(), aruco_tracker.get_board_corners_ids())
-
- cv.imshow(f'Tobii Camera Calibration', frame)
-
- time.sleep(2)
-
- # quit on 'Esc' command
- key = cv.waitKey(1)
- if key == 27:
- cv.destroyAllWindows()
- break
-
-# stop tobii objects
-tobii_video_thread.stop()
-
-tobii_controller.stop_streaming()
-tobii_controller.close()
-
-print('\nCalibrating camera...')
-aruco_camera.calibrate(aruco_board, frame_width, frame_height)
-
-print('\nCalibration succeeded!')
-print(f'\nRMS:\n{aruco_camera.get_rms()}')
-print(f'\nCamera matrix:\n{aruco_camera.get_K()}')
-print(f'\nDistortion coefficients:\n{aruco_camera.get_D()}')
-
-aruco_camera.save_calibration_file(os.join(export_folder,'tobii_camera.json'))
-
-print(f'\nCalibration data exported into tobii_camera.json file')
-