aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2023-02-15 18:16:54 +0100
committerThéo de la Hogue2023-02-15 18:16:54 +0100
commit9ce64a6c47156fe28e484633e2c8932c682fbf85 (patch)
treef291d9edb63e46775bd01836ccf464daeb422851 /src
parent70145ed6af27153e76dcb695d5116c6cd194b004 (diff)
downloadargaze-9ce64a6c47156fe28e484633e2c8932c682fbf85.zip
argaze-9ce64a6c47156fe28e484633e2c8932c682fbf85.tar.gz
argaze-9ce64a6c47156fe28e484633e2c8932c682fbf85.tar.bz2
argaze-9ce64a6c47156fe28e484633e2c8932c682fbf85.tar.xz
Major rewrite to allow multi ArScene managements with new ArFeatures.
Diffstat (limited to 'src')
-rw-r--r--src/argaze.test/ArScene.py8
-rw-r--r--src/argaze.test/ArUcoMarkers/ArUcoDetector.py105
-rw-r--r--src/argaze.test/ArUcoMarkers/ArUcoTracker.py105
-rw-r--r--src/argaze.test/utils/scene.json2
-rw-r--r--src/argaze/ArFeatures.py (renamed from src/argaze/ArScene.py)182
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py8
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoDetector.py303
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoScene.py59
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoTracker.py267
-rw-r--r--src/argaze/ArUcoMarkers/README.md4
-rw-r--r--src/argaze/ArUcoMarkers/__init__.py2
-rw-r--r--src/argaze/AreaOfInterest/AOI3DScene.py2
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py14
-rw-r--r--src/argaze/GazeAnalysis/DispersionBasedGazeMovementIdentifier.py4
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiEntities.py4
-rw-r--r--src/argaze/__init__.py2
-rw-r--r--src/argaze/utils/README.md4
-rw-r--r--src/argaze/utils/tobii_camera_calibrate.py18
-rw-r--r--src/argaze/utils/tobii_segment_arscene_edit.py18
-rw-r--r--src/argaze/utils/tobii_segment_arscene_export.py26
-rw-r--r--src/argaze/utils/tobii_stream_arscene_display.py16
21 files changed, 611 insertions, 542 deletions
diff --git a/src/argaze.test/ArScene.py b/src/argaze.test/ArScene.py
index 449530b..5180c12 100644
--- a/src/argaze.test/ArScene.py
+++ b/src/argaze.test/ArScene.py
@@ -31,10 +31,10 @@ class TestArSceneClass(unittest.TestCase):
self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_camera.K, [[1.0, 0.0, 1.0], [0.0, 1.0, 1.0], [0.0, 0.0, 1.0]]))
self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_camera.D, [-1.0, -0.5, 0.0, 0.5, 1.0]))
- # Check ArUco tracker
- self.assertEqual(ar_scene.aruco_tracker.tracking_data.cornerRefinementMethod, 3)
- self.assertEqual(ar_scene.aruco_tracker.tracking_data.aprilTagQuadSigma, 2)
- self.assertEqual(ar_scene.aruco_tracker.tracking_data.aprilTagDeglitch, 1)
+ # Check ArUco detecter
+ self.assertEqual(ar_scene.aruco_detector.detection_parameters.cornerRefinementMethod, 3)
+ self.assertEqual(ar_scene.aruco_detector.detection_parameters.aprilTagQuadSigma, 2)
+ self.assertEqual(ar_scene.aruco_detector.detection_parameters.aprilTagDeglitch, 1)
# Check ArUco scene
self.assertEqual(ar_scene.angle_tolerance, 1.0)
diff --git a/src/argaze.test/ArUcoMarkers/ArUcoDetector.py b/src/argaze.test/ArUcoMarkers/ArUcoDetector.py
new file mode 100644
index 0000000..ab29024
--- /dev/null
+++ b/src/argaze.test/ArUcoMarkers/ArUcoDetector.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+import unittest
+import os
+import math
+
+from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoCamera, ArUcoDetector, ArUcoBoard
+
+import cv2 as cv
+import numpy
+
+class TestDetectionParametersClass(unittest.TestCase):
+ """Test DetectionParameters class."""
+
+ def test_from_json(self):
+ """Test DetectionParameters creation from json file."""
+
+ # Edit traking data file path
+ current_directory = os.path.dirname(os.path.abspath(__file__))
+ json_filepath = os.path.join(current_directory, 'utils/detecter.json')
+
+ # Load project
+ detection_parameters = ArUcoDetector.DetectionParameters.from_json(json_filepath)
+
+ # Check data
+ self.assertEqual(detection_parameters.cornerRefinementMethod, 3)
+ self.assertEqual(detection_parameters.aprilTagQuadSigma, 2)
+ self.assertEqual(detection_parameters.aprilTagDeglitch, 1)
+
+ # Check bad data access fails
+ with self.assertRaises(AttributeError):
+
+ detection_parameters.unknown_data = 1
+
+class TestArUcoDetectorClass(unittest.TestCase):
+ """Test ArUcoDetector class."""
+
+ def test_new(self):
+ """Test ArUcoDetector creation."""
+
+ aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_ARUCO_ORIGINAL')
+ aruco_camera = ArUcoCamera.ArUcoCamera()
+ aruco_detector = ArUcoDetector.ArUcoDetector(aruco_dictionary, 3, aruco_camera)
+
+ # Check ArUcoDetector creation
+ self.assertEqual(aruco_detector.detected_markers_number, 0)
+ self.assertEqual(aruco_detector.detected_markers, {})
+
+ def test_detect(self):
+ """Test detect method."""
+
+ aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_ARUCO_ORIGINAL')
+ aruco_camera = ArUcoCamera.ArUcoCamera()
+ aruco_detector = ArUcoDetector.ArUcoDetector(aruco_dictionary, 3, aruco_camera)
+
+ # Load picture Full HD to test ArUcoMarker detection
+ current_directory = os.path.dirname(os.path.abspath(__file__))
+ frame = cv.imread(os.path.join(current_directory, 'utils/full_hd_marker.png'))
+
+ # Check ArUcoMarker detection
+ aruco_detector.detect(frame)
+
+ self.assertEqual(aruco_detector.detected_markers_number, 1)
+
+ self.assertEqual(aruco_detector.detected_markers[0].dictionary, aruco_dictionary)
+ self.assertEqual(aruco_detector.detected_markers[0].identifier, 0)
+ self.assertEqual(aruco_detector.detected_markers[0].size, 3)
+
+ # Check corner positions with -/+ 10 pixels precision
+ self.assertIsNone(numpy.testing.assert_almost_equal(aruco_detector.detected_markers[0].corners[0][0].astype(int), numpy.array([3823, 2073]), decimal=-1))
+ self.assertIsNone(numpy.testing.assert_almost_equal(aruco_detector.detected_markers[0].corners[0][1].astype(int), numpy.array([4177, 2073]), decimal=-1))
+ self.assertIsNone(numpy.testing.assert_almost_equal(aruco_detector.detected_markers[0].corners[0][2].astype(int), numpy.array([4177, 2427]), decimal=-1))
+ self.assertIsNone(numpy.testing.assert_almost_equal(aruco_detector.detected_markers[0].corners[0][3].astype(int), numpy.array([3823, 2427]), decimal=-1))
+
+ # Check marker translation with -/+ 0.1 cm precision and rotation with -/+ 0.001 radian precision
+ self.assertIsNone(numpy.testing.assert_almost_equal(aruco_detector.detected_markers[0].translation, numpy.array([33.87, 19.05, 0.]), decimal=1))
+ self.assertIsNone(numpy.testing.assert_almost_equal(aruco_detector.detected_markers[0].rotation, numpy.array([math.pi, 0., 0.]), decimal=3))
+
+ # Check detect metrics
+ detect_count, markers_count = aruco_detector.detection_metrics
+ self.assertEqual(detect_count, 1)
+ self.assertEqual(markers_count[0], 1)
+
+ def test_detect_board(self):
+ """Test detect board method."""
+
+ aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_ARUCO_ORIGINAL')
+ aruco_board = ArUcoBoard.ArUcoBoard(aruco_dictionary, 7, 5, 5, 3)
+ aruco_camera = ArUcoCamera.ArUcoCamera()
+ aruco_detector = ArUcoDetector.ArUcoDetector(aruco_dictionary, 3, aruco_camera)
+
+ # Load picture Full HD to test ArUcoMarker board detection
+ current_directory = os.path.dirname(os.path.abspath(__file__))
+ frame = cv.imread(os.path.join(current_directory, 'utils/full_hd_board.png'))
+
+ # Check ArUcoMarker board detection
+ aruco_detector.detect_board(frame, aruco_board, aruco_board.markers_number)
+
+ self.assertEqual(aruco_detector.board_corners_number, aruco_board.corners_number)
+ self.assertEqual(len(aruco_detector.board_corners), 24)
+ self.assertEqual(len(aruco_detector.board_corners_identifier), 24)
+
+if __name__ == '__main__':
+
+ unittest.main() \ No newline at end of file
diff --git a/src/argaze.test/ArUcoMarkers/ArUcoTracker.py b/src/argaze.test/ArUcoMarkers/ArUcoTracker.py
deleted file mode 100644
index 87373ea..0000000
--- a/src/argaze.test/ArUcoMarkers/ArUcoTracker.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-import unittest
-import os
-import math
-
-from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoCamera, ArUcoTracker, ArUcoBoard
-
-import cv2 as cv
-import numpy
-
-class TestTrackingDataClass(unittest.TestCase):
- """Test TrackingData class."""
-
- def test_from_json(self):
- """Test TrackingData creation from json file."""
-
- # Edit traking data file path
- current_directory = os.path.dirname(os.path.abspath(__file__))
- json_filepath = os.path.join(current_directory, 'utils/tracker.json')
-
- # Load project
- tracking_data = ArUcoTracker.TrackingData.from_json(json_filepath)
-
- # Check data
- self.assertEqual(tracking_data.cornerRefinementMethod, 3)
- self.assertEqual(tracking_data.aprilTagQuadSigma, 2)
- self.assertEqual(tracking_data.aprilTagDeglitch, 1)
-
- # Check bad data access fails
- with self.assertRaises(AttributeError):
-
- tracking_data.unknown_data = 1
-
-class TestArUcoTrackerClass(unittest.TestCase):
- """Test ArUcoTracker class."""
-
- def test_new(self):
- """Test ArUcoTracker creation."""
-
- aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_ARUCO_ORIGINAL')
- aruco_camera = ArUcoCamera.ArUcoCamera()
- aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_dictionary, 3, aruco_camera)
-
- # Check ArUcoTracker creation
- self.assertEqual(aruco_tracker.tracked_markers_number, 0)
- self.assertEqual(aruco_tracker.tracked_markers, {})
-
- def test_track(self):
- """Test track method."""
-
- aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_ARUCO_ORIGINAL')
- aruco_camera = ArUcoCamera.ArUcoCamera()
- aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_dictionary, 3, aruco_camera)
-
- # Load picture Full HD to test ArUcoMarker tracking
- current_directory = os.path.dirname(os.path.abspath(__file__))
- frame = cv.imread(os.path.join(current_directory, 'utils/full_hd_marker.png'))
-
- # Check ArUcoMarker tracking
- aruco_tracker.track(frame)
-
- self.assertEqual(aruco_tracker.tracked_markers_number, 1)
-
- self.assertEqual(aruco_tracker.tracked_markers[0].dictionary, aruco_dictionary)
- self.assertEqual(aruco_tracker.tracked_markers[0].identifier, 0)
- self.assertEqual(aruco_tracker.tracked_markers[0].size, 3)
-
- # Check corner positions with -/+ 10 pixels precision
- self.assertIsNone(numpy.testing.assert_almost_equal(aruco_tracker.tracked_markers[0].corners[0][0].astype(int), numpy.array([3823, 2073]), decimal=-1))
- self.assertIsNone(numpy.testing.assert_almost_equal(aruco_tracker.tracked_markers[0].corners[0][1].astype(int), numpy.array([4177, 2073]), decimal=-1))
- self.assertIsNone(numpy.testing.assert_almost_equal(aruco_tracker.tracked_markers[0].corners[0][2].astype(int), numpy.array([4177, 2427]), decimal=-1))
- self.assertIsNone(numpy.testing.assert_almost_equal(aruco_tracker.tracked_markers[0].corners[0][3].astype(int), numpy.array([3823, 2427]), decimal=-1))
-
- # Check marker translation with -/+ 0.1 cm precision and rotation with -/+ 0.001 radian precision
- self.assertIsNone(numpy.testing.assert_almost_equal(aruco_tracker.tracked_markers[0].translation, numpy.array([33.87, 19.05, 0.]), decimal=1))
- self.assertIsNone(numpy.testing.assert_almost_equal(aruco_tracker.tracked_markers[0].rotation, numpy.array([math.pi, 0., 0.]), decimal=3))
-
- # Check track metrics
- track_count, markers_count = aruco_tracker.track_metrics
- self.assertEqual(track_count, 1)
- self.assertEqual(markers_count[0], 1)
-
- def test_track_board(self):
- """Test track board method."""
-
- aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_ARUCO_ORIGINAL')
- aruco_board = ArUcoBoard.ArUcoBoard(aruco_dictionary, 7, 5, 5, 3)
- aruco_camera = ArUcoCamera.ArUcoCamera()
- aruco_tracker = ArUcoTracker.ArUcoTracker(aruco_dictionary, 3, aruco_camera)
-
- # Load picture Full HD to test ArUcoMarker board tracking
- current_directory = os.path.dirname(os.path.abspath(__file__))
- frame = cv.imread(os.path.join(current_directory, 'utils/full_hd_board.png'))
-
- # Check ArUcoMarker board tracking
- aruco_tracker.track_board(frame, aruco_board, aruco_board.markers_number)
-
- self.assertEqual(aruco_tracker.board_corners_number, aruco_board.corners_number)
- self.assertEqual(len(aruco_tracker.board_corners), 24)
- self.assertEqual(len(aruco_tracker.board_corners_identifier), 24)
-
-if __name__ == '__main__':
-
- unittest.main() \ No newline at end of file
diff --git a/src/argaze.test/utils/scene.json b/src/argaze.test/utils/scene.json
index 4e1cc20..25aacc8 100644
--- a/src/argaze.test/utils/scene.json
+++ b/src/argaze.test/utils/scene.json
@@ -33,7 +33,7 @@
1.0
]
},
- "aruco_tracker": {
+ "aruco_detector": {
"cornerRefinementMethod": 3,
"aprilTagQuadSigma": 2,
"aprilTagDeglitch": 1
diff --git a/src/argaze/ArScene.py b/src/argaze/ArFeatures.py
index 184f6d6..25fd10d 100644
--- a/src/argaze/ArScene.py
+++ b/src/argaze/ArFeatures.py
@@ -10,19 +10,76 @@ from argaze.AreaOfInterest import *
import numpy
+ArEnvironmentType = TypeVar('ArEnvironment', bound="ArEnvironment")
+# Type definition for type annotation convenience
+
ArSceneType = TypeVar('ArScene', bound="ArScene")
# Type definition for type annotation convenience
AOI2DSceneType = TypeVar('AOI2DScene', bound="AOI2DScene")
# Type definition for type annotation convenience
+@dataclass
+class ArEnvironment():
+ """Define an Augmented Reality environment based ArUco marker detection."""
+
+ name: str
+ """Environement name."""
+
+ aruco_detector: ArUcoDetector.ArUcoDetector = field(init=False, default_factory=ArUcoDetector.ArUcoDetector)
+ """ArUco detecor."""
+
+ def __init__(self, **kwargs):
+
+ self.name = kwargs.pop('name')
+
+ self.aruco_detector = ArUcoDetector.ArUcoDetector(**kwargs.pop('aruco_detector'))
+
+ self.__scenes = {}
+ for name, scene_kwargs in kwargs.items():
+
+ self.__scenes[name] = ArScene(self, **scene_kwargs)
+
+ def __getitem__(self, name) -> ArSceneType:
+ """Get an ArScene of the environment."""
+
+ return self.__scenes[name]
+
+ @classmethod
+ def from_json(self, json_filepath: str) -> ArSceneType:
+ """Load ArEnvironment from .json file."""
+
+ with open(json_filepath) as configuration_file:
+
+ return ArEnvironment(**json.load(configuration_file))
+
+ def __str__(self) -> str:
+ """String display"""
+
+ output = f'ArUcoDetector:\n{self.aruco_detector}\n'
+
+ for name, scene in self.__scenes.items():
+ output += f'\"{name}\" ArScene:\n{scene}\n'
+
+ return output
+
+ def items(self) -> Tuple[str, ArSceneType]:
+ """Iterate over scenes."""
+
+ return self.__scenes.items()
+
+ def keys(self) -> list[str]:
+ """Get scenes name."""
+
+ return self.__scenes.keys()
+
class PoseEstimationFailed(Exception):
"""Exception raised by ArScene project method when the pose can't be estimated due to unconsistencies."""
def __init__(self, message, unconsistencies=None):
super().__init__(message)
-
+
self.unconsistencies = unconsistencies
class SceneProjectionFailed(Exception):
@@ -34,22 +91,7 @@ class SceneProjectionFailed(Exception):
@dataclass
class ArScene():
- """Define an Augmented Reality environnement thanks to ArUco markers and project it onto incoming frames."""
-
- name: str
- """Project name."""
-
- aruco_dictionary: ArUcoMarkersDictionary.ArUcoMarkersDictionary = field(init=False, default_factory=ArUcoMarkersDictionary.ArUcoMarkersDictionary)
- """ArUco markers dictionary."""
-
- aruco_marker_size: float = field(init=False)
- """Size of ArUco markers in centimeter."""
-
- aruco_camera: ArUcoCamera.ArUcoCamera = field(init=False, default_factory=ArUcoCamera.ArUcoCamera)
- """ArUco camera ..."""
-
- aruco_tracker: ArUcoTracker.ArUcoTracker = field(init=False, default_factory=ArUcoTracker.ArUcoTracker)
- """ArUco tracker ..."""
+ """Define an Augmented Reality scene based ArUco markers and AOI scenes."""
aruco_scene: ArUcoScene.ArUcoScene = field(init=False, default_factory=ArUcoScene.ArUcoScene)
"""ArUco scene ..."""
@@ -69,15 +111,9 @@ class ArScene():
aruco_aoi: dict
"""Dictionary of AOI defined by list of markers identifier and markers corners index tuples."""
- def __init__(self, **kwargs):
-
- self.aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary(kwargs.pop('aruco_dictionary'))
+ def __init__(self, ar_environment: ArEnvironment, **kwargs):
- self.aruco_marker_size = kwargs.pop('aruco_marker_size')
-
- self.aruco_camera = ArUcoCamera.ArUcoCamera(**kwargs.pop('aruco_camera'))
-
- self.aruco_tracker = ArUcoTracker.ArUcoTracker(self.aruco_dictionary, self.aruco_marker_size, self.aruco_camera, **kwargs.pop('aruco_tracker'))
+ self.__ar_environment = ar_environment
# Check aruco_scene value type
aruco_scene_value = kwargs.pop('aruco_scene')
@@ -85,9 +121,9 @@ class ArScene():
# str: relative path to .obj file
if type(aruco_scene_value) == str:
- aruco_scene_value = os.path.join(self.__current_directory, aruco_scene_value)
+ aruco_scene_value = os.path.join(os.getcwd(), aruco_scene_value)
- self.aruco_scene = ArUcoScene.ArUcoScene(self.aruco_dictionary, self.aruco_marker_size, aruco_scene_value)
+ self.aruco_scene = ArUcoScene.ArUcoScene(self.__ar_environment.aruco_detector.dictionary, self.__ar_environment.aruco_detector.marker_size, aruco_scene_value)
# Check aoi_scene value type
aoi_scene_value = kwargs.pop('aoi_scene')
@@ -95,7 +131,7 @@ class ArScene():
# str: relative path to .obj file
if type(aoi_scene_value) == str:
- obj_filepath = os.path.join(self.__current_directory, aoi_scene_value)
+ obj_filepath = os.path.join(os.getcwd(), aoi_scene_value)
self.aoi_scene = AOI3DScene.AOI3DScene.from_obj(obj_filepath)
# dict: all AOI
@@ -118,69 +154,48 @@ class ArScene():
# Estimate pose from axis markers
aruco_axis_names = []
for marker_id in markers_id:
- aruco_axis_names.append(f'{self.aruco_dictionary.name}#{marker_id}')
+ aruco_axis_names.append(f'{ar_environment.aruco_detector.dictionary.name}#{marker_id}')
aruco_axis_string[axis_name] = aruco_axis_names
self.aruco_axis = aruco_axis_string
- # Preprocess a default whole scene projection to speed up further aruco aoi processings
- _, tvec, rvec, K = self.whole_pose()
- self.__default_whole_scene_projection = self.aoi_scene.project(tvec, rvec, K)
-
- # DEBUG
- print('self.__default_whole_scene_projection:\n', self.__default_whole_scene_projection )
+ # Preprocess orthogonal projection to speed up further aruco aoi processings
+ self.__orthogonal_projection_cache = self.orthogonal_projection
@classmethod
def from_json(self, json_filepath: str) -> ArSceneType:
- """Load ArGaze project from .json file."""
+ """Load ArScene from .json file."""
with open(json_filepath) as configuration_file:
- # Store current directory to allow relative path loading
- self.__current_directory = os.path.dirname(os.path.abspath(json_filepath))
-
return ArScene(**json.load(configuration_file))
def __str__(self) -> str:
"""String display"""
- output = ''
- output += f'\nArUcoCamera: {self.aruco_camera}'
- output += f'\n\nArUcoTracker tracking data: {self.aruco_tracker.tracking_data}'
- output += f'\n\nArUcoScene: {self.aruco_scene}'
- output += f'\n\nAOIScene: {self.aoi_scene}'
- output += '\n'
+ output = f'ArUcoScene:\n{self.aruco_scene}\n'
+ output += f'AOIScene:\n{self.aoi_scene}\n'
return output
- def whole_pose(self, width: float = 1., height: float = 0.) -> Tuple[numpy.array, numpy.array, numpy.array, numpy.array]:
- """Edit translation vector, rotation vector and camera intrinsic parameters to project the whole scene into a frame.
+ @property
+ def orthogonal_projection(self) -> AOI2DSceneType:
+ """Orthogonal projection of the aoi whole scene."""
- * **Arguments:**
- - frame width
- - frame height: optional, if None the height will be setup according given width and scene dimensions ratio.
- """
scene_size = self.aoi_scene.size
- print('scene_size=', scene_size)
- frame_size = numpy.array([width, scene_size[1]/scene_size[0]*width if height == 0. else height])
- print('frame_size=', frame_size)
-
- # Center, step back and rotate camera to get whole scene into field of view
+ # Center, step back and rotate pose to get whole scene into field of view
tvec = self.aoi_scene.center*[-1, 1, 0] + [0, 0, scene_size[1]]
- print('tvec=', tvec)
-
rvec = numpy.array([[-numpy.pi, 0.0, 0.0]])
# Edit intrinsic camera parameter to capture whole scene
- K = numpy.array([[frame_size[1], 0.0, frame_size[0]/2], [0.0, frame_size[1], frame_size[1]/2], [0.0, 0.0, 1.0]])
- print('K=', K)
+ K = numpy.array([[scene_size[1]/scene_size[0], 0.0, 0.5], [0.0, 1., 0.5], [0.0, 0.0, 1.0]])
- return frame_size, tvec, rvec, K
+ return self.aoi_scene.project(tvec, rvec, K)
- def estimate_pose(self, frame) -> Tuple[numpy.array, numpy.array, dict]:
- """Estimate scene pose from ArUco markers into frame.
+ def estimate_pose(self, detected_markers) -> Tuple[numpy.array, numpy.array, dict]:
+ """Estimate scene pose from detected ArUco markers.
* **Returns:**
- scene translation vector
@@ -188,14 +203,12 @@ class ArScene():
- dict of markers used to estimate the pose
"""
- self.aruco_tracker.track(frame)
-
# Pose estimation fails when no marker is detected
- if len(self.aruco_tracker.tracked_markers) == 0:
+ if len(detected_markers) == 0:
raise PoseEstimationFailed('No marker detected')
- scene_markers, _ = self.aruco_scene.filter_markers(self.aruco_tracker.tracked_markers)
+ scene_markers, _ = self.aruco_scene.filter_markers(detected_markers)
# Pose estimation fails when no marker belongs to the scene
if len(scene_markers) == 0:
@@ -243,7 +256,7 @@ class ArScene():
return tvec, rmat, consistent_markers
def project(self, tvec: numpy.array, rvec: numpy.array, visual_hfov=0) -> AOI2DSceneType:
- """Project AOI scene into frame according estimated pose and optionals horizontal fieald of view clipping angle.
+ """Project AOI scene according estimated pose and optional horizontal field of view clipping angle.
* **Arguments:**
- translation vector
@@ -270,7 +283,7 @@ class ArScene():
aoi_scene_copy = self.aoi_scene.copy()
- aoi_scene_projection = aoi_scene_copy.project(tvec, rvec, self.aruco_camera.K)
+ aoi_scene_projection = aoi_scene_copy.project(tvec, rvec, self.__ar_environment.aruco_detector.camera.K)
# Warn user when the projected scene is empty
if len(aoi_scene_projection) == 0:
@@ -279,23 +292,14 @@ class ArScene():
return aoi_scene_projection
- def project_aruco_aoi(self, frame) -> AOI2DSceneType:
- """Edit AOI scene from ArUco markers into frame as defined in aruco_aoi dictionary."""
-
- self.aruco_tracker.track(frame, estimate_pose=False)
+ def build_aruco_aoi_scene(self, detected_markers) -> AOI2DSceneType:
+ """Build AOI scene from ArUco markers into frame as defined in aruco_aoi dictionary."""
# AOI projection fails when no marker is detected
- if len(self.aruco_tracker.tracked_markers) == 0:
+ if len(detected_markers) == 0:
raise SceneProjectionFailed('No marker detected')
- scene_markers, _ = self.aruco_scene.filter_markers(self.aruco_tracker.tracked_markers)
-
- # AOI projection fails when no marker belongs to the scene
- if len(scene_markers) == 0:
-
- raise SceneProjectionFailed('No marker belongs to the scene')
-
aruco_aoi_scene = {}
for aruco_aoi_name, aoi in self.aruco_aoi.items():
@@ -304,7 +308,15 @@ class ArScene():
aoi_corners = []
for corner in ["upper_left_corner", "upper_right_corner", "lower_right_corner", "lower_left_corner"]:
- aoi_corners.append(self.aruco_tracker.tracked_markers[aoi[corner]["marker_identifier"]].corners[0][aoi[corner]["marker_corner_index"]])
+ marker_identifier = aoi[corner]["marker_identifier"]
+
+ try:
+
+ aoi_corners.append(detected_markers[marker_identifier].corners[0][aoi[corner]["marker_corner_index"]])
+
+ except Exception as e:
+
+ raise SceneProjectionFailed(f'Missing marker #{e} to build ArUco AOI scene')
aruco_aoi_scene[aruco_aoi_name] = AOIFeatures.AreaOfInterest(aoi_corners)
@@ -313,7 +325,7 @@ class ArScene():
if aruco_aoi_name != inner_aoi_name:
- aoi_corners = [numpy.array(aruco_aoi_scene[aruco_aoi_name].outter_axis(inner)) for inner in self.__default_whole_scene_projection[inner_aoi_name]]
+ aoi_corners = [numpy.array(aruco_aoi_scene[aruco_aoi_name].outter_axis(inner)) for inner in self.__orthogonal_projection_cache[inner_aoi_name]]
aruco_aoi_scene[inner_aoi_name] = AOIFeatures.AreaOfInterest(aoi_corners)
return AOI2DScene.AOI2DScene(aruco_aoi_scene)
@@ -321,11 +333,11 @@ class ArScene():
def draw_axis(self, frame):
"""Draw scene axis into frame."""
- self.aruco_scene.draw_axis(frame, self.aruco_camera.K, self.aruco_camera.D)
+ self.aruco_scene.draw_axis(frame, self.__ar_environment.aruco_detector.camera.K, self.__ar_environment.aruco_detector.camera.D)
def draw_places(self, frame):
"""Draw scene places into frame."""
- self.aruco_scene.draw_places(frame, self.aruco_camera.K, self.aruco_camera.D)
+ self.aruco_scene.draw_places(frame, self.__ar_environment.aruco_detector.camera.K, self.__ar_environment.aruco_detector.camera.D)
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index c535523..eea4393 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -46,10 +46,10 @@ class CalibrationData():
def __str__(self) -> str:
"""String display"""
- output = f'\n\trms: {self.rms}'
- output += f'\n\tdimensions: {self.dimensions}'
- output += f'\n\tK: {self.K}'
- output += f'\n\tD: {self.D}'
+ output = f'\trms: {self.rms}\n'
+ output += f'\tdimensions: {self.dimensions}\n'
+ output += f'\tK: {self.K}\n'
+ output += f'\tD: {self.D}\n'
return output
diff --git a/src/argaze/ArUcoMarkers/ArUcoDetector.py b/src/argaze/ArUcoMarkers/ArUcoDetector.py
new file mode 100644
index 0000000..86bcbbf
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/ArUcoDetector.py
@@ -0,0 +1,303 @@
+#!/usr/bin/env python
+
+from typing import TypeVar, Tuple
+from dataclasses import dataclass, field
+import json
+from collections import Counter
+
+from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoMarker, ArUcoCamera
+
+import numpy
+import cv2 as cv
+import cv2.aruco as aruco
+
+ArUcoMarkerDictionaryType = TypeVar('ArUcoMarkerDictionary', bound="ArUcoMarkerDictionary")
+# Type definition for type annotation convenience
+
+ArUcoMarkerType = TypeVar('ArUcoMarker', bound="ArUcoMarker")
+# Type definition for type annotation convenience
+
+ArUcoCameraType = TypeVar('ArUcoCamera', bound="ArUcoCamera")
+# Type definition for type annotation convenience
+
+DetectorParametersType = TypeVar('DetectorParameters', bound="DetectorParameters")
+# Type definition for type annotation convenience
+
+ArUcoDetectorType = TypeVar('ArUcoDetector', bound="ArUcoDetector")
+# Type definition for type annotation convenience
+
+class DetectorParameters():
+ """Define ArUco marker detector parameters.
+
+ .. note:: More details on [opencv page](https://docs.opencv.org/4.x/d1/dcd/structcv_1_1aruco_1_1DetectorParameters.html)
+ """
+
+ __parameters = aruco.DetectorParameters_create()
+ __parameters_names = [
+ 'adaptiveThreshConstant',
+ 'adaptiveThreshWinSizeMax',
+ 'adaptiveThreshWinSizeMin',
+ 'adaptiveThreshWinSizeStep',
+ 'aprilTagCriticalRad',
+ 'aprilTagDeglitch',
+ 'aprilTagMaxLineFitMse',
+ 'aprilTagMaxNmaxima',
+ 'aprilTagMinClusterPixels',
+ 'aprilTagMinWhiteBlackDiff',
+ 'aprilTagQuadDecimate',
+ 'aprilTagQuadSigma',
+ 'cornerRefinementMaxIterations',
+ 'cornerRefinementMethod',
+ 'cornerRefinementMinAccuracy',
+ 'cornerRefinementWinSize',
+ 'markerBorderBits',
+ 'minMarkerPerimeterRate',
+ 'maxMarkerPerimeterRate',
+ 'minMarkerDistanceRate',
+ 'detectInvertedMarker',
+ 'errorCorrectionRate',
+ 'maxErroneousBitsInBorderRate',
+ 'minCornerDistanceRate',
+ 'minDistanceToBorder',
+ 'minOtsuStdDev',
+ 'perspectiveRemoveIgnoredMarginPerCell',
+ 'perspectiveRemovePixelPerCell',
+ 'polygonalApproxAccuracyRate'
+ ]
+
+ def __init__(self, **kwargs):
+
+ for parameter, value in kwargs.items():
+
+ setattr(self.__parameters, parameter, value)
+
+ self.__dict__.update(kwargs)
+
+ def __setattr__(self, parameter, value):
+
+ setattr(self.__parameters, parameter, value)
+
+ def __getattr__(self, parameter):
+
+ return getattr(self.__parameters, parameter)
+
+ @classmethod
+ def from_json(self, json_filepath) -> DetectorParametersType:
+ """Load detector parameters from .json file."""
+
+ with open(json_filepath) as configuration_file:
+
+ return DetectorParameters(**json.load(configuration_file))
+
+ def __str__(self, print_all=False) -> str:
+ """Detector paremeters string representation."""
+
+ output = ''
+
+ for parameter in self.__parameters_names:
+
+ if parameter in self.__dict__.keys():
+
+ output += f'\t*{parameter}: {getattr(self.__parameters, parameter)}\n'
+
+ elif print_all:
+
+ output += f'\t{parameter}: {getattr(self.__parameters, parameter)}\n'
+
+ return output
+
+ @property
+ def internal(self):
+ return self.__parameters
+
+class ArUcoDetector():
+ """ArUco markers detector."""
+
+ dictionary: ArUcoMarkersDictionary.ArUcoMarkersDictionary = field(init=False, default_factory=ArUcoMarkersDictionary.ArUcoMarkersDictionary)
+ """ArUco markers dictionary to detect."""
+
+ marker_size: float = field(init=False)
+ """Size of ArUco markers to detect in centimeter."""
+
+ camera: ArUcoCamera.ArUcoCamera = field(init=False, default_factory=ArUcoCamera.ArUcoCamera)
+ """ArUco camera ..."""
+
+ def __init__(self, **kwargs):
+
+ self.dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary(kwargs.pop('dictionary'))
+ self.marker_size = kwargs.pop('marker_size')
+ self.camera = ArUcoCamera.ArUcoCamera(**kwargs.pop('camera'))
+
+ # Init detector parameters
+ self.__parameters = DetectorParameters(**kwargs.pop('parameters'))
+
+ # Init detected markers data
+ self.__detected_markers = {}
+ self.__detected_markers_corners = []
+ self.__detected_markers_ids = []
+
+ # Init detected board data
+ self.__board = None
+ self.__board_corners_number = 0
+ self.__board_corners = []
+ self.__board_corners_ids = []
+
+ # Init detect metrics data
+ self.__detection_count = 0
+ self.__detected_ids = []
+
+ @classmethod
+ def from_json(self, json_filepath: str) -> ArUcoDetectorType:
+ """Load ArUcoDetector setup from .json file."""
+
+ with open(json_filepath) as configuration_file:
+
+ return ArUcoDetector(**json.load(configuration_file))
+
+ def __str__(self) -> str:
+ """String display"""
+
+ output = f'Camera:\n{self.camera}\n'
+ output += f'Parameters:\n{self.__parameters}\n'
+
+ return output
+
+ @property
+ def parameters(self):
+ """ArUco marker detector parameters."""
+
+ return self.__parameters
+
+ def detect(self, frame):
+ """Detect all ArUco markers into a frame.
+
+ .. danger:: DON'T MIRROR FRAME
+ It makes the markers detection to fail.
+ """
+
+ # Reset detected markers data
+ self.__detected_markers, self.__detected_markers_corners, self.__detected_markers_ids = {}, [], []
+
+ # Detect markers into gray picture
+ self.__detected_markers_corners, self.__detected_markers_ids, _ = aruco.detectMarkers(cv.cvtColor(frame, cv.COLOR_BGR2GRAY), self.dictionary.markers, parameters = self.__parameters.internal)
+
+ # Is there detected markers ?
+ if len(self.__detected_markers_corners) > 0:
+
+ # Gather detected markers data and update metrics
+ self.__detection_count += 1
+
+ for i, marker_id in enumerate(self.__detected_markers_ids.T[0]):
+
+ marker = ArUcoMarker.ArUcoMarker(self.dictionary, marker_id, self.marker_size)
+
+ marker.corners = self.__detected_markers_corners[i]
+
+ # No pose estimation: call estimate_pose to get one
+ marker.translation = numpy.empty([0])
+ marker.rotation = numpy.empty([0])
+ marker.points = numpy.empty([0])
+
+ self.__detected_markers[marker_id] = marker
+
+ self.__detected_ids.append(marker_id)
+
+ def estimate_pose(self):
+ """Estimate pose of current detected markers."""
+
+ # Is there detected markers ?
+ if len(self.__detected_markers_corners) > 0:
+
+ markers_rvecs, markers_tvecs, markers_points = aruco.estimatePoseSingleMarkers(self.__detected_markers_corners, self.marker_size, numpy.array(self.camera.K), numpy.array(self.camera.D))
+
+ for i, marker_id in enumerate(self.__detected_markers_ids.T[0]):
+
+ marker = self.__detected_markers[marker_id]
+
+ marker.translation = markers_tvecs[i][0]
+ marker.rotation, _ = cv.Rodrigues(markers_rvecs[i][0])
+ marker.points = markers_points.reshape(4, 3)
+
+ @property
+ def detected_markers(self) -> dict[ArUcoMarkerType]:
+ """Access to detected markers dictionary."""
+
+ return self.__detected_markers
+
+ @property
+ def detected_markers_number(self) -> int:
+ """Return detected markers number."""
+
+ return len(list(self.__detected_markers.keys()))
+
+ def draw_detected_markers(self, frame):
+ """Draw traked markers."""
+
+ for marker_id, marker in self.__detected_markers.items():
+
+ marker.draw(frame, self.camera.K, self.camera.D)
+
+ def detect_board(self, frame, board, expected_markers_number):
+ """Detect ArUco markers board in frame setting up the number of detected markers needed to agree detection.
+
+ .. danger:: DON'T MIRROR FRAME
+ It makes the markers detection to fail.
+ """
+
+ # detect markers from gray picture
+ gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
+ self.__detected_markers_corners, self.__detected_markers_ids, _ = aruco.detectMarkers(gray, self.dictionary.markers, parameters = self.__parameters.internal)
+
+ # if all board markers are detected
+ if len(self.__detected_markers_corners) == expected_markers_number:
+
+ self.__board = board
+ self.__board_corners_number, self.__board_corners, self.__board_corners_ids = aruco.interpolateCornersCharuco(self.__detected_markers_corners, self.__detected_markers_ids, gray, self.__board.model)
+
+ else:
+
+ self.__board = None
+ self.__board_corners_number = 0
+ self.__board_corners = []
+ self.__board_corners_ids = []
+
+ def draw_board(self, frame):
+ """Draw detected board corners in frame."""
+
+ if self.__board != None:
+
+ cv.drawChessboardCorners(frame, ((self.__board.size[0] - 1 ), (self.__board.size[1] - 1)), self.__board_corners, True)
+
+ def reset_detection_metrics(self):
+ """Enable marker detection metrics."""
+
+ self.__detection_count = 0
+ self.__detected_ids = []
+
+ @property
+ def detection_metrics(self) -> Tuple[int, dict]:
+ """Get marker detection metrics.
+ * **Returns:**
+ - number of detect function call
+ - dict with number of detection for each marker identifier"""
+
+ return self.__detection_count, Counter(self.__detected_ids)
+
+ @property
+ def board_corners_number(self) -> int:
+ """Get detected board corners number."""
+
+ return self.__board_corners_number
+
+ @property
+ def board_corners_identifier(self) -> list[int]:
+ """Get detected board corners identifier."""
+
+ return self.__board_corners_ids
+
+ @property
+ def board_corners(self) -> list:
+ """Get detected board corners."""
+
+ return self.__board_corners
+
diff --git a/src/argaze/ArUcoMarkers/ArUcoScene.py b/src/argaze/ArUcoMarkers/ArUcoScene.py
index 0bdefa6..a54c100 100644
--- a/src/argaze/ArUcoMarkers/ArUcoScene.py
+++ b/src/argaze/ArUcoMarkers/ArUcoScene.py
@@ -36,7 +36,7 @@ class Place():
"""ArUco marker linked to the place."""
class ArUcoScene():
- """Define abstract class to handle group of ArUco markers as one unique spatial entity and estimate its pose."""
+ """Handle group of ArUco markers as one unique spatial entity and estimate its pose."""
def __init__(self, dictionary: ArUcoMarkersDictionary.ArUcoMarkersDictionary, marker_size: float, data_places: dict | str = None):
"""Define scene attributes."""
@@ -173,7 +173,6 @@ class ArUcoScene():
return list(self.__identifier_cache.keys())
-
def __make_rotation_matrix(self, x, y, z):
# Create rotation matrix around x axis
@@ -310,8 +309,8 @@ class ArUcoScene():
except IOError:
raise IOError(f'File not found: {obj_filepath}')
- def filter_markers(self, tracked_markers) -> Tuple[dict, dict]:
- """Sort markers belonging to the scene from a given tracked markers list (cf ArUcoTracker.track()).
+ def filter_markers(self, detected_markers) -> Tuple[dict, dict]:
+ """Sort markers belonging to the scene from a given detected markers list (cf ArUcoDetector.detect()).
* **Returns:**
- dict of markers belonging to this scene
@@ -321,7 +320,7 @@ class ArUcoScene():
scene_markers = {}
remaining_markers = {}
- for (marker_id, marker) in tracked_markers.items():
+ for (marker_id, marker) in detected_markers.items():
try:
name = self.__identifier_cache[marker_id]
@@ -518,24 +517,32 @@ class ArUcoScene():
for name, place in self.__places.items():
- T = self.__places[name].translation
- R = self.__places[name].rotation
-
- # Draw place axis
- axisPoints = (T + numpy.float32([R.dot([l/2, 0, 0]), R.dot([0, l/2, 0]), R.dot([0, 0, l/2]), R.dot([0, 0, 0])])).reshape(-1, 3)
- axisPoints, _ = cv.projectPoints(axisPoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
- axisPoints = axisPoints.astype(int)
-
- cv.line(frame, tuple(axisPoints[3].ravel()), tuple(axisPoints[0].ravel()), (n,n,f), 6) # X (red)
- cv.line(frame, tuple(axisPoints[3].ravel()), tuple(axisPoints[1].ravel()), (n,f,n), 6) # Y (green)
- cv.line(frame, tuple(axisPoints[3].ravel()), tuple(axisPoints[2].ravel()), (f,n,n), 6) # Z (blue)
-
- # Draw place
- placePoints = (T + numpy.float32([R.dot([-l, -l, 0]), R.dot([l, -l, 0]), R.dot([l, l, 0]), R.dot([-l, l, 0])])).reshape(-1, 3)
- placePoints, _ = cv.projectPoints(placePoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
- placePoints = placePoints.astype(int)
-
- cv.line(frame, tuple(placePoints[0].ravel()), tuple(placePoints[1].ravel()), (f,f,f), 3)
- cv.line(frame, tuple(placePoints[1].ravel()), tuple(placePoints[2].ravel()), (f,f,f), 3)
- cv.line(frame, tuple(placePoints[2].ravel()), tuple(placePoints[3].ravel()), (f,f,f), 3)
- cv.line(frame, tuple(placePoints[3].ravel()), tuple(placePoints[0].ravel()), (f,f,f), 3)
+ try:
+
+ T = self.__places[name].translation
+ R = self.__places[name].rotation
+
+ # Draw place axis
+ axisPoints = (T + numpy.float32([R.dot([l/2, 0, 0]), R.dot([0, l/2, 0]), R.dot([0, 0, l/2]), R.dot([0, 0, 0])])).reshape(-1, 3)
+ axisPoints, _ = cv.projectPoints(axisPoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
+ axisPoints = axisPoints.astype(int)
+
+ cv.line(frame, tuple(axisPoints[3].ravel()), tuple(axisPoints[0].ravel()), (n,n,f), 6) # X (red)
+ cv.line(frame, tuple(axisPoints[3].ravel()), tuple(axisPoints[1].ravel()), (n,f,n), 6) # Y (green)
+ cv.line(frame, tuple(axisPoints[3].ravel()), tuple(axisPoints[2].ravel()), (f,n,n), 6) # Z (blue)
+
+ # Draw place
+ placePoints = (T + numpy.float32([R.dot([-l, -l, 0]), R.dot([l, -l, 0]), R.dot([l, l, 0]), R.dot([-l, l, 0])])).reshape(-1, 3)
+ placePoints, _ = cv.projectPoints(placePoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
+ placePoints = placePoints.astype(int)
+
+ cv.line(frame, tuple(placePoints[0].ravel()), tuple(placePoints[1].ravel()), (f,f,f), 3)
+ cv.line(frame, tuple(placePoints[1].ravel()), tuple(placePoints[2].ravel()), (f,f,f), 3)
+ cv.line(frame, tuple(placePoints[2].ravel()), tuple(placePoints[3].ravel()), (f,f,f), 3)
+ cv.line(frame, tuple(placePoints[3].ravel()), tuple(placePoints[0].ravel()), (f,f,f), 3)
+
+ except cv.error as e:
+
+ print('ArUcoScene.draw_places: ', e)
+ print('T: ', T)
+ print('R: ', R)
diff --git a/src/argaze/ArUcoMarkers/ArUcoTracker.py b/src/argaze/ArUcoMarkers/ArUcoTracker.py
deleted file mode 100644
index 37c567e..0000000
--- a/src/argaze/ArUcoMarkers/ArUcoTracker.py
+++ /dev/null
@@ -1,267 +0,0 @@
-#!/usr/bin/env python
-
-from typing import TypeVar, Tuple
-import json
-from collections import Counter
-
-from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoMarker, ArUcoCamera
-
-import numpy
-import cv2 as cv
-import cv2.aruco as aruco
-
-ArUcoMarkerType = TypeVar('ArUcoMarker', bound="ArUcoMarker")
-# Type definition for type annotation convenience
-
-TrackingDataType = TypeVar('TrackingData', bound="TrackingData")
-# Type definition for type annotation convenience
-
-DetectorParametersType = TypeVar('')
-
-class TrackingData():
- """Define ArUco marker tracking data.
-
- .. note:: More details on [opencv page](https://docs.opencv.org/4.x/d1/dcd/structcv_1_1aruco_1_1DetectorParameters.html)
- """
-
- __detector_parameters = aruco.DetectorParameters_create()
- __detector_parameters_names = [
- 'adaptiveThreshConstant',
- 'adaptiveThreshWinSizeMax',
- 'adaptiveThreshWinSizeMin',
- 'adaptiveThreshWinSizeStep',
- 'aprilTagCriticalRad',
- 'aprilTagDeglitch',
- 'aprilTagMaxLineFitMse',
- 'aprilTagMaxNmaxima',
- 'aprilTagMinClusterPixels',
- 'aprilTagMinWhiteBlackDiff',
- 'aprilTagQuadDecimate',
- 'aprilTagQuadSigma',
- 'cornerRefinementMaxIterations',
- 'cornerRefinementMethod',
- 'cornerRefinementMinAccuracy',
- 'cornerRefinementWinSize',
- 'markerBorderBits',
- 'minMarkerPerimeterRate',
- 'maxMarkerPerimeterRate',
- 'minMarkerDistanceRate',
- 'detectInvertedMarker',
- 'errorCorrectionRate',
- 'maxErroneousBitsInBorderRate',
- 'minCornerDistanceRate',
- 'minDistanceToBorder',
- 'minOtsuStdDev',
- 'perspectiveRemoveIgnoredMarginPerCell',
- 'perspectiveRemovePixelPerCell',
- 'polygonalApproxAccuracyRate'
- ]
-
- def __init__(self, **kwargs):
-
- for parameter, value in kwargs.items():
-
- setattr(self.__detector_parameters, parameter, value)
-
- self.__dict__.update(kwargs)
-
- def __setattr__(self, parameter, value):
-
- setattr(self.__detector_parameters, parameter, value)
-
- def __getattr__(self, parameter):
-
- return getattr(self.__detector_parameters, parameter)
-
- @classmethod
- def from_json(self, json_filepath) -> TrackingDataType:
- """Load tracking data from .json file."""
-
- with open(json_filepath) as configuration_file:
-
- return TrackingData(**json.load(configuration_file))
-
- def __str__(self, print_all=False) -> str:
- """Tracking data string representation."""
-
- output = ''
-
- for parameter in self.__detector_parameters_names:
-
- if parameter in self.__dict__.keys():
-
- output += f'\n\t*{parameter}: {getattr(self.__detector_parameters, parameter)}'
-
- elif print_all:
-
- output += f'\n\t{parameter}: {getattr(self.__detector_parameters, parameter)}'
-
- return output
-
- @property
- def internal(self):
- return self.__detector_parameters
-
-class ArUcoTracker():
- """Track ArUco markers into a frame."""
-
- def __init__(self, dictionary: ArUcoMarkersDictionary.ArUcoMarkersDictionary, marker_size: float, camera: ArUcoCamera.ArUcoCamera, **kwargs):
- """Define which markers dictionary and size to track and camera."""
-
- self.__dictionary = dictionary
- self.__marker_size = marker_size
- self.__camera = camera
-
- # Init tracking data
- self.__tracking_data = TrackingData(**kwargs)
-
- # init tracked markers data
- self.__tracked_markers = {}
-
- # define tracked board data
- self.__board = None
- self.__board_corners_number = 0
- self.__board_corners = []
- self.__board_corners_ids = []
-
- # define track metrics data
- self.__track_count = 0
- self.__tracked_ids = []
-
- @property
- def marker_size(self) -> float:
- """ArUco marker length to track in centimeter."""
-
- return self.__marker_size
-
- @property
- def tracking_data(self):
- """ArUco marker tracking data."""
-
- return self.__tracking_data
-
- def track(self, frame, estimate_pose = True):
- """Track ArUco markers in frame.
-
- .. danger:: DON'T MIRROR FRAME
- It makes the markers detection to fail.
- """
-
- self.__tracked_markers = {}
- markers_corners, markers_ids, markers_rvecs, markers_tvecs, markers_points = [], [], [], [], []
-
- # Track markers into gray picture
- markers_corners, markers_ids, _ = aruco.detectMarkers(cv.cvtColor(frame, cv.COLOR_BGR2GRAY), self.__dictionary.markers, parameters = self.__tracking_data.internal)
-
- if len(markers_corners) > 0:
-
- # Pose estimation is optional
- if estimate_pose:
-
- markers_rvecs, markers_tvecs, markers_points = aruco.estimatePoseSingleMarkers(markers_corners, self.__marker_size, numpy.array(self.__camera.K), numpy.array(self.__camera.D))
-
- # Gather tracked markers data and update metrics
- self.__track_count += 1
-
- for i, marker_id in enumerate(markers_ids.T[0]):
-
- marker = ArUcoMarker.ArUcoMarker(self.__dictionary, marker_id, self.__marker_size)
-
- marker.corners = markers_corners[i]
-
- if estimate_pose:
- marker.translation = markers_tvecs[i][0]
- marker.rotation, _ = cv.Rodrigues(markers_rvecs[i][0])
- marker.points = markers_points.reshape(4, 3)
- else:
- marker.translation = numpy.empty([0])
- marker.rotation = numpy.empty([0])
- marker.points = numpy.empty([0])
-
- self.__tracked_markers[marker_id] = marker
-
- self.__tracked_ids.append(marker_id)
-
- @property
- def tracked_markers(self) -> dict[ArUcoMarkerType]:
- """Access to tracked markers dictionary."""
-
- return self.__tracked_markers
-
- @property
- def tracked_markers_number(self) -> int:
- """Return tracked markers number."""
-
- return len(list(self.__tracked_markers.keys()))
-
- def draw_tracked_markers(self, frame):
- """Draw traked markers."""
-
- for marker_id, marker in self.__tracked_markers.items():
-
- marker.draw(frame, self.__camera.K, self.__camera.D)
-
- def track_board(self, frame, board, expected_markers_number):
- """Track ArUco markers board in frame setting up the number of detected markers needed to agree detection.
-
- .. danger:: DON'T MIRROR FRAME
- It makes the markers detection to fail.
- """
-
- # detect markers from gray picture
- gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
- markers_corners, markers_ids, _ = aruco.detectMarkers(gray, self.__dictionary.markers, parameters = self.__tracking_data.internal)
-
- # if all board markers are detected
- if len(markers_corners) == expected_markers_number:
-
- self.__board = board
- self.__board_corners_number, self.__board_corners, self.__board_corners_ids = aruco.interpolateCornersCharuco(markers_corners, markers_ids, gray, self.__board.model)
-
- else:
-
- self.__board = None
- self.__board_corners_number = 0
- self.__board_corners = []
- self.__board_corners_ids = []
-
- def draw_board(self, frame):
- """Draw tracked board corners in frame."""
-
- if self.__board != None:
-
- cv.drawChessboardCorners(frame, ((self.__board.size[0] - 1 ), (self.__board.size[1] - 1)), self.__board_corners, True)
-
- def reset_track_metrics(self):
- """Enable marker tracking metrics."""
-
- self.__track_count = 0
- self.__tracked_ids = []
-
- @property
- def track_metrics(self) -> Tuple[int, dict]:
- """Get marker tracking metrics.
- * **Returns:**
- - number of track function call
- - dict with number of tracking detection for each marker identifier"""
-
- return self.__track_count, Counter(self.__tracked_ids)
-
- @property
- def board_corners_number(self) -> int:
- """Get tracked board corners number."""
-
- return self.__board_corners_number
-
- @property
- def board_corners_identifier(self) -> list[int]:
- """Get tracked board corners identifier."""
-
- return self.__board_corners_ids
-
- @property
- def board_corners(self) -> list:
- """Get tracked board corners."""
-
- return self.__board_corners
-
diff --git a/src/argaze/ArUcoMarkers/README.md b/src/argaze/ArUcoMarkers/README.md
index 821a582..bdc8f9e 100644
--- a/src/argaze/ArUcoMarkers/README.md
+++ b/src/argaze/ArUcoMarkers/README.md
@@ -2,7 +2,7 @@ Class interface to work with [OpenCV ArUco markers](https://docs.opencv.org/4.x/
## ArUco markers dictionary
-To work with ArUco markers, you need to choose a marker dictionary which have specific the format, the numbers of markers or the difference between each markers to avoid error in tracking.
+To work with ArUco markers, you need to choose a marker dictionary which have specific the format, the numbers of markers or the difference between each markers to avoid error in detection.
Here is more [about ArUco markers dictionaries](https://docs.opencv.org/3.4/d9/d6a/group__aruco.html#gac84398a9ed9dd01306592dd616c2c975)
## Utils
@@ -11,4 +11,4 @@ Print **A3_board_35cmx25cm_markers_4X4_3cm.pdf** onto A3 paper sheet to get boar
Print **A4_markers_4x4_3cm.pdf** onto A4 paper sheet to get markers at expected dimensions.
-Load **tracker_configuration.json** file with argaze utils **tobii_segment_aruco_aoi_export.py** script with -p option. This is an example file to illustrate how to setup [ArUco markers detection parameters](https://docs.opencv.org/4.x/d1/dcd/structcv_1_1aruco_1_1DetectorParameters.html). \ No newline at end of file
+Load **detecter_configuration.json** file with argaze utils **tobii_segment_aruco_aoi_export.py** script with -p option. This is an example file to illustrate how to setup [ArUco markers detection parameters](https://docs.opencv.org/4.x/d1/dcd/structcv_1_1aruco_1_1DetectorParameters.html). \ No newline at end of file
diff --git a/src/argaze/ArUcoMarkers/__init__.py b/src/argaze/ArUcoMarkers/__init__.py
index f5b9ca5..fa769f7 100644
--- a/src/argaze/ArUcoMarkers/__init__.py
+++ b/src/argaze/ArUcoMarkers/__init__.py
@@ -2,4 +2,4 @@
.. include:: README.md
"""
__docformat__ = "restructuredtext"
-__all__ = ['ArUcoMarkersDictionary', 'ArUcoMarker', 'ArUcoBoard', 'ArUcoCamera', 'ArUcoTracker', 'ArUcoScene'] \ No newline at end of file
+__all__ = ['ArUcoMarkersDictionary', 'ArUcoMarker', 'ArUcoBoard', 'ArUcoCamera', 'ArUcoDetector', 'ArUcoScene'] \ No newline at end of file
diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py
index 142738f..a4ca015 100644
--- a/src/argaze/AreaOfInterest/AOI3DScene.py
+++ b/src/argaze/AreaOfInterest/AOI3DScene.py
@@ -202,7 +202,7 @@ class AOI3DScene(AOIFeatures.AOIScene):
for name, aoi3D in self.items():
- vertices_2D, J = cv.projectPoints(aoi3D.astype(numpy.float32), R, T, numpy.array(K),numpy.array(D))
+ vertices_2D, J = cv.projectPoints(aoi3D.astype(numpy.float32), R, T, numpy.array(K), numpy.array(D))
aoi2D = vertices_2D.reshape((len(vertices_2D), 2)).view(AOIFeatures.AreaOfInterest)
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index 3dcccfd..ea57866 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -246,6 +246,20 @@ class AOIScene():
return output
+ def __mul__(self, scale_vector) -> AOISceneType:
+ """Scale scene by a vector."""
+
+ assert(len(scale_vector) == self.__dimension)
+
+ for name, area in self.__areas.items():
+
+ self.__areas[name] = self.__areas[name] * scale_vector
+
+ return self
+
+ # Allow n * scene operation
+ __rmul__ = __mul__
+
def items(self) -> Tuple[str, AreaOfInterest]:
"""Iterate over areas."""
diff --git a/src/argaze/GazeAnalysis/DispersionBasedGazeMovementIdentifier.py b/src/argaze/GazeAnalysis/DispersionBasedGazeMovementIdentifier.py
index 51286ee..6b5ec09 100644
--- a/src/argaze/GazeAnalysis/DispersionBasedGazeMovementIdentifier.py
+++ b/src/argaze/GazeAnalysis/DispersionBasedGazeMovementIdentifier.py
@@ -136,7 +136,7 @@ class GazeMovementIdentifier(GazeFeatures.GazeMovementIdentifier):
# Store unvalid gaze positions to count them
unvalid_gaze_positions = GazeFeatures.TimeStampedGazePositions()
- # Keep track of last valid timestamp
+ # Keep detect of last valid timestamp
ts_last_valid = ts_current
for ts_next, gaze_position_next in self.__ts_gaze_positions.items():
@@ -148,7 +148,7 @@ class GazeMovementIdentifier(GazeFeatures.GazeMovementIdentifier):
valid_gaze_positions[ts_next] = gaze_position_next
- # Keep track of last valid timestamp
+ # Keep detect of last valid timestamp
ts_last_valid = ts_next
# Store non valid position
diff --git a/src/argaze/TobiiGlassesPro2/TobiiEntities.py b/src/argaze/TobiiGlassesPro2/TobiiEntities.py
index 404f6d0..12c4032 100644
--- a/src/argaze/TobiiGlassesPro2/TobiiEntities.py
+++ b/src/argaze/TobiiGlassesPro2/TobiiEntities.py
@@ -162,13 +162,13 @@ class TobiiRecording:
@property
def eyetracker_samples(self) -> int:
- """Get numbers of recorded eye tracker samples."""
+ """Get numbers of recorded eye detecter samples."""
return self.__et_samples
@property
def eyetracker_samples_valid(self) -> int:
- """Get numbers of recorded eye tracker valid samples."""
+ """Get numbers of recorded eye detecter valid samples."""
return self.__et_samples_valid
diff --git a/src/argaze/__init__.py b/src/argaze/__init__.py
index f02a31a..d74219d 100644
--- a/src/argaze/__init__.py
+++ b/src/argaze/__init__.py
@@ -2,4 +2,4 @@
.. include:: ../../README.md
"""
__docformat__ = "restructuredtext"
-__all__ = ['utils','ArUcoMarkers','AreaOfInterest','GazeFeatures','DataStructures','GazeAnalysis','ArScene','TobiiGlassesPro2'] \ No newline at end of file
+__all__ = ['utils','ArUcoMarkers','AreaOfInterest','ArFeatures','GazeFeatures','DataStructures','GazeAnalysis','TobiiGlassesPro2'] \ No newline at end of file
diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md
index 9be98f4..e105753 100644
--- a/src/argaze/utils/README.md
+++ b/src/argaze/utils/README.md
@@ -84,13 +84,13 @@ python ./src/argaze/utils/tobii_segment_gaze_movements_export.py -s SEGMENT_PATH
# Tobii with ArUco
-Track ArUco markers (-md MARKER_DICT -ms MARKER_SIZE) into Tobii camera video stream (-t IP_ADDRESS). Load aoi scene .obj file related to each marker (-mi MARKER_ID, PATH_TO_AOI_SCENE), position each scene virtually relatively to its detected ArUco markers then project the scene into camera frame:
+Detect ArUco markers (-md MARKER_DICT -ms MARKER_SIZE) into Tobii camera video stream (-t IP_ADDRESS). Load aoi scene .obj file related to each marker (-mi MARKER_ID, PATH_TO_AOI_SCENE), position each scene virtually relatively to its detected ArUco markers then project the scene into camera frame:
```
python ./src/argaze/utils/tobii_stream_aruco_aoi_display.py -t IP_ADDRESS -c export/tobii_camera.json -md MARKER_DICT -ms MARKER_SIZE -mi '{"MARKER_ID":"PATH_TO_AOI_SCENE.obj",...}'
```
-Track ArUco markers (-md MARKER_DICT -ms MARKER_SIZE) into a Tobii camera video segment (-s SEGMENT_PATH) into a time range selection (-r IN OUT). Load aoi scene .obj file related to each marker (-mi MARKER_ID, PATH_TO_AOI_SCENE), position each scene virtually relatively to its detected ArUco markers then project the scene into camera frame. Export aoi video and data as a aruco_aoi.csv, aruco_aoi.mp4 files:
+Detect ArUco markers (-md MARKER_DICT -ms MARKER_SIZE) into a Tobii camera video segment (-s SEGMENT_PATH) into a time range selection (-r IN OUT). Load aoi scene .obj file related to each marker (-mi MARKER_ID, PATH_TO_AOI_SCENE), position each scene virtually relatively to its detected ArUco markers then project the scene into camera frame. Export aoi video and data as a aruco_aoi.csv, aruco_aoi.mp4 files:
```
python ./src/argaze/utils/tobii_segment_aruco_aoi_export.py -s SEGMENT_PATH -c export/tobii_camera.json -md MARKER_DICT -ms MARKER_SIZE -mi '{"MARKER_ID":"PATH_TO_AOI_SCENE.obj",...}' -r IN OUT
```
diff --git a/src/argaze/utils/tobii_camera_calibrate.py b/src/argaze/utils/tobii_camera_calibrate.py
index 6cfcd80..65b177e 100644
--- a/src/argaze/utils/tobii_camera_calibrate.py
+++ b/src/argaze/utils/tobii_camera_calibrate.py
@@ -5,7 +5,7 @@ import os
import time
from argaze.TobiiGlassesPro2 import TobiiController, TobiiVideo
-from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoBoard, ArUcoTracker, ArUcoCamera
+from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoBoard, ArUcoDetector, ArUcoCamera
import cv2 as cv
@@ -63,8 +63,8 @@ def main():
# Create aruco board
aruco_board = ArUcoBoard.ArUcoBoard(args.dictionary, args.columns, args.rows, args.square_size, args.marker_size)
- # Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)
+ # Create aruco detecter
+ aruco_detector = ArUcoDetector.ArUcoDetector(args.dictionary, args.marker_size, aruco_camera)
# Start tobii glasses streaming
tobii_controller.start_streaming()
@@ -83,24 +83,24 @@ def main():
# capture frame with a full displayed board
video_ts, video_frame = tobii_video_stream.read()
- # track all markers in the board
- aruco_tracker.track_board(video_frame.matrix, aruco_board, expected_markers_number)
+ # detect all markers in the board
+ aruco_detector.detect_board(video_frame.matrix, aruco_board, expected_markers_number)
# draw only markers
- aruco_tracker.draw_tracked_markers(video_frame.matrix)
+ aruco_detector.draw_detected_markers(video_frame.matrix)
# draw current calibration data count
cv.putText(video_frame.matrix, f'Capture: {aruco_camera.calibration_data_count}', (50, 50), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
cv.imshow('Tobii Camera Calibration', video_frame.matrix)
# if all board corners are detected
- if aruco_tracker.board_corners_number == expected_corners_number:
+ if aruco_detector.board_corners_number == expected_corners_number:
# draw board corners to notify a capture is done
- aruco_tracker.draw_board(video_frame.matrix)
+ aruco_detector.draw_board(video_frame.matrix)
# append data
- aruco_camera.store_calibration_data(aruco_tracker.board_corners, aruco_tracker.board_corners_identifier)
+ aruco_camera.store_calibration_data(aruco_detector.board_corners, aruco_detector.board_corners_identifier)
cv.imshow('Tobii Camera Calibration', video_frame.matrix)
diff --git a/src/argaze/utils/tobii_segment_arscene_edit.py b/src/argaze/utils/tobii_segment_arscene_edit.py
index b4f5445..b8a5745 100644
--- a/src/argaze/utils/tobii_segment_arscene_edit.py
+++ b/src/argaze/utils/tobii_segment_arscene_edit.py
@@ -137,7 +137,7 @@ def main():
last_frame_index = frame_index
last_frame = video_frame.copy()
- # Hide frame left and right borders before tracking to ignore markers outside focus area
+ # Hide frame left and right borders before detection to ignore markers outside focus area
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - 1/6)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
@@ -159,7 +159,7 @@ def main():
# Edit fake gaze position from pointer
gaze_position = GazeFeatures.GazePosition(pointer, precision=2)
- # Copy video frame to edit visualisation on it with out disrupting aruco tracking
+ # Copy video frame to edit visualisation on it with out disrupting aruco detection
visu_frame = video_frame.copy()
try:
@@ -173,20 +173,20 @@ def main():
cv.rectangle(visu_frame.matrix, (0, 100), (550, 150), (127, 127, 127), -1)
cv.putText(visu_frame.matrix, str(e), (20, 130), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
- # Draw tracked markers
- ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+ # Draw detected markers
+ ar_scene.aruco_detector.draw_detected_markers(visu_frame.matrix)
# Draw scene projection
aoi_scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
# Project 3D scene on each video frame and the visualisation frame
- if len(ar_scene.aruco_tracker.tracked_markers) > 0:
+ if len(ar_scene.aruco_detector.detected_markers) > 0:
# Write detected marker ids
- cv.putText(visu_frame.matrix, f'Detected markers: {list(ar_scene.aruco_tracker.tracked_markers.keys())}', (20, visu_frame.height - 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.putText(visu_frame.matrix, f'Detected markers: {list(ar_scene.aruco_detector.detected_markers.keys())}', (20, visu_frame.height - 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
# Update selected marker id by left_clicking on marker
- for (marker_id, marker) in ar_scene.aruco_tracker.tracked_markers.items():
+ for (marker_id, marker) in ar_scene.aruco_detector.detected_markers.items():
marker_aoi = marker.corners.reshape(4, 2).view(AOIFeatures.AreaOfInterest)
@@ -198,7 +198,7 @@ def main():
try:
# Retreive marker index
- selected_marker = ar_scene.aruco_tracker.tracked_markers[selected_marker_id]
+ selected_marker = ar_scene.aruco_detector.detected_markers[selected_marker_id]
marker_x, marker_y = selected_marker.center
'''
@@ -358,7 +358,7 @@ def main():
if key_pressed == 27:
break
- # Reload tracker configuration on 'c' key
+ # Reload detecter configuration on 'c' key
if key_pressed == 99:
load_configuration_file()
force_update = True
diff --git a/src/argaze/utils/tobii_segment_arscene_export.py b/src/argaze/utils/tobii_segment_arscene_export.py
index c4e45ea..cc180a2 100644
--- a/src/argaze/utils/tobii_segment_arscene_export.py
+++ b/src/argaze/utils/tobii_segment_arscene_export.py
@@ -16,7 +16,7 @@ import numpy
def main():
"""
- Track ArUcoScene into Tobii Glasses Pro 2 camera video record.
+ Detect ArUcoScene into Tobii Glasses Pro 2 camera video record.
"""
# Manage arguments
@@ -24,7 +24,7 @@ def main():
parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path')
parser.add_argument('-t', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)')
parser.add_argument('-p', '--project_path', metavar='ARGAZE_PROJECT', type=str, default=None, help='json argaze project filepath')
- parser.add_argument('-b', '--borders', metavar='BORDERS', type=float, default=16.666, help='define left and right borders mask (%) to not track aruco out of these borders')
+ parser.add_argument('-b', '--borders', metavar='BORDERS', type=float, default=16.666, help='define left and right borders mask (%) to not detect aruco out of these borders')
parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)')
parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
args = parser.parse_args()
@@ -127,7 +127,7 @@ def main():
# TODO: Get video fps to adapt
next_video_ts = video_ts + 40000
- # Copy video frame to edit visualisation on it without disrupting aruco tracking
+ # Copy video frame to edit visualisation on it without disrupting aruco detection
visu_frame = video_frame.copy()
# Prepare to store projected AOI
@@ -141,7 +141,7 @@ def main():
projected_aois['offset'] = nearest_vts.offset
- # Hide frame left and right borders before tracking to ignore markers outside focus area
+ # Hide frame left and right borders before detection to ignore markers outside focus area
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width*args.borders/100), int(video_frame.height)), (0, 0, 0), -1)
cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - args.borders/100)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
@@ -156,8 +156,8 @@ def main():
projected_aois[aoi_name] = numpy.rint(aoi_scene_projection[aoi_name]).astype(int)
- # Draw tracked markers
- ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+ # Draw detected markers
+ ar_scene.aruco_detector.draw_detected_markers(visu_frame.matrix)
# Draw AOI
aoi_scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
@@ -165,8 +165,8 @@ def main():
# Catch exceptions raised by estimate_pose and project methods
except (ArScene.PoseEstimationFailed, ArScene.SceneProjectionFailed) as e:
- # Draw tracked markers
- ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+ # Draw detected markers
+ ar_scene.aruco_detector.draw_detected_markers(visu_frame.matrix)
if str(e) == 'Unconsistent marker poses':
@@ -264,12 +264,12 @@ def main():
# End output video file
output_video.close()
- # Print aruco tracking metrics
- print('\n\nAruco marker tracking metrics')
- try_count, tracked_counts = ar_scene.aruco_tracker.track_metrics
+ # Print aruco detection metrics
+ print('\n\nAruco marker detection metrics')
+ try_count, detected_counts = ar_scene.aruco_detector.detection_metrics
- for marker_id, tracked_count in tracked_counts.items():
- print(f'\tMarkers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)')
+ for marker_id, detected_count in detected_counts.items():
+ print(f'\tMarkers {marker_id} has been detected in {detected_count} / {try_count} frames ({round(100 * detected_count / try_count, 2)} %)')
# Export aruco aoi data
ts_offset_aois.to_json(aoi_json_filepath)
diff --git a/src/argaze/utils/tobii_stream_arscene_display.py b/src/argaze/utils/tobii_stream_arscene_display.py
index 336de1f..dabaa9b 100644
--- a/src/argaze/utils/tobii_stream_arscene_display.py
+++ b/src/argaze/utils/tobii_stream_arscene_display.py
@@ -14,14 +14,14 @@ import numpy
def main():
"""
- Track ArUcoScene into Tobii Glasses Pro 2 camera video stream.
+ Detect ArUcoScene into Tobii Glasses Pro 2 camera video stream.
"""
# Manage arguments
parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default=None, help='tobii glasses ip')
parser.add_argument('-p', '--project_path', metavar='ARGAZE_PROJECT', type=str, default=None, help='json argaze project filepath')
- parser.add_argument('-b', '--borders', metavar='BORDERS', type=float, default=16.666, help='define left and right borders mask (%) to not track aruco out of these borders')
+ parser.add_argument('-b', '--borders', metavar='BORDERS', type=float, default=16.666, help='define left and right borders mask (%) to not detect aruco out of these borders')
parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
args = parser.parse_args()
@@ -73,10 +73,10 @@ def main():
# Read video stream
video_ts, video_frame = tobii_video_stream.read()
- # Copy video frame to edit visualisation on it without disrupting aruco tracking
+ # Copy video frame to edit visualisation on it without disrupting aruco detection
visu_frame = video_frame.copy()
- # Hide frame left and right borders before tracking to ignore markers outside focus area
+ # Hide frame left and right borders before detection to ignore markers outside focus area
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width*args.borders/100), int(video_frame.height)), (0, 0, 0), -1)
cv.rectangle(video_frame.matrix, (int(video_frame.width*(1 - args.borders/100)), 0), (int(video_frame.width), int(video_frame.height)), (0, 0, 0), -1)
@@ -98,14 +98,14 @@ def main():
# Draw AOI
aoi_scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
- # Draw tracked markers
- ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+ # Draw detected markers
+ ar_scene.aruco_detector.draw_detected_markers(visu_frame.matrix)
# Catch exceptions raised by estimate_pose and project methods
except (ArScene.PoseEstimationFailed, ArScene.SceneProjectionFailed) as e:
- # Draw tracked markers
- ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
+ # Draw detected markers
+ ar_scene.aruco_detector.draw_detected_markers(visu_frame.matrix)
cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
cv.putText(visu_frame.matrix, str(e), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)