aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2023-09-04 22:03:46 +0200
committerThéo de la Hogue2023-09-04 22:03:46 +0200
commitf4d60a6cd1e1d8810cf4b9ad7f63a8718069f73a (patch)
treee8b40f39f80bf7dab027748da9a34b0732a52e76
parent9215d4724912e3eb4a5673ba87cc736abe58be40 (diff)
downloadargaze-f4d60a6cd1e1d8810cf4b9ad7f63a8718069f73a.zip
argaze-f4d60a6cd1e1d8810cf4b9ad7f63a8718069f73a.tar.gz
argaze-f4d60a6cd1e1d8810cf4b9ad7f63a8718069f73a.tar.bz2
argaze-f4d60a6cd1e1d8810cf4b9ad7f63a8718069f73a.tar.xz
First work on new AR pipeline architecture. Class renaming and replacing.
-rw-r--r--docs/user_guide/ar_environment/environment_exploitation.md8
-rw-r--r--docs/user_guide/ar_environment/environment_setup.md10
-rw-r--r--docs/user_guide/aruco_markers/introduction.md2
-rw-r--r--docs/user_guide/aruco_markers/markers_scene_description.md34
-rw-r--r--docs/user_guide/utils/demonstrations_scripts.md2
-rw-r--r--mkdocs.yml6
-rw-r--r--src/argaze.test/ArFeatures.py75
-rw-r--r--src/argaze.test/ArUcoMarkers/ArUcoCamera.py74
-rw-r--r--src/argaze.test/ArUcoMarkers/ArUcoScene.py92
-rw-r--r--src/argaze.test/ArUcoMarkers/utils/aruco_camera.json (renamed from src/argaze.test/utils/environment.json)6
-rw-r--r--src/argaze.test/ArUcoMarkers/utils/scene.obj2
-rw-r--r--src/argaze/ArFeatures.py505
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py264
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py717
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoScene.py757
-rw-r--r--src/argaze/ArUcoMarkers/__init__.py2
-rw-r--r--src/argaze/utils/aruco_markers_scene_export.py10
-rw-r--r--src/argaze/utils/demo_augmented_reality_run.py45
-rw-r--r--src/argaze/utils/demo_environment/aoi_3d_scene.obj2
-rw-r--r--src/argaze/utils/demo_environment/aruco_markers_group.obj (renamed from src/argaze/utils/demo_environment/aruco_scene.obj)2
-rw-r--r--src/argaze/utils/demo_environment/demo_augmented_reality_setup.json82
-rw-r--r--src/argaze/utils/demo_gaze_analysis_run.py2
22 files changed, 1375 insertions, 1324 deletions
diff --git a/docs/user_guide/ar_environment/environment_exploitation.md b/docs/user_guide/ar_environment/environment_exploitation.md
index 28d61b9..9e4b236 100644
--- a/docs/user_guide/ar_environment/environment_exploitation.md
+++ b/docs/user_guide/ar_environment/environment_exploitation.md
@@ -1,19 +1,19 @@
Environment exploitation
========================
-Once loaded, [ArEnvironment](../../argaze.md/#argaze.ArFeatures.ArEnvironment) assets can be exploited as illustrated below:
+Once loaded, [ArCamera](../../argaze.md/#argaze.ArFeatures.ArCamera) assets can be exploited as illustrated below:
```python
# Access to AR environment ArUco detector passing it a image where to detect ArUco markers
-ar_environment.aruco_detector.detect_markers(image)
+ar_camera.aruco_detector.detect_markers(image)
# Access to an AR environment scene
-my_first_scene = ar_environment.scenes['my first AR scene']
+my_first_scene = ar_camera.scenes['my first AR scene']
try:
# Try to estimate AR scene pose from detected markers
- tvec, rmat, consistent_markers = my_first_scene.estimate_pose(ar_environment.aruco_detector.detected_markers)
+ tvec, rmat, consistent_markers = my_first_scene.estimate_pose(ar_camera.aruco_detector.detected_markers)
# Project AR scene into camera image according estimated pose
# Optional visual_hfov argument is set to 160° to clip AOI scene according a cone vision
diff --git a/docs/user_guide/ar_environment/environment_setup.md b/docs/user_guide/ar_environment/environment_setup.md
index f18cc61..1f26d26 100644
--- a/docs/user_guide/ar_environment/environment_setup.md
+++ b/docs/user_guide/ar_environment/environment_setup.md
@@ -1,9 +1,9 @@
Environment Setup
=================
-[ArEnvironment](../../argaze.md/#argaze.ArFeatures.ArEnvironment) setup is loaded from JSON file format.
+[ArCamera](../../argaze.md/#argaze.ArFeatures.ArCamera) setup is loaded from JSON file format.
-Each [ArEnvironment](../../argaze.md/#argaze.ArFeatures.ArEnvironment) defines a unique [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector.ArUcoDetector) dedicated to detection of markers from a specific [ArUcoMarkersDictionary](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersDictionary) and with a given size. However, it is possible to load multiple [ArScene](../../argaze.md/#argaze.ArFeatures.ArScene) into a same [ArEnvironment](../../argaze.md/#argaze.ArFeatures.ArEnvironment).
+Each [ArCamera](../../argaze.md/#argaze.ArFeatures.ArCamera) defines a unique [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector.ArUcoDetector) dedicated to detection of markers from a specific [ArUcoMarkersDictionary](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersDictionary) and with a given size. However, it is possible to load multiple [ArScene](../../argaze.md/#argaze.ArFeatures.ArScene) into a same [ArCamera](../../argaze.md/#argaze.ArFeatures.ArCamera).
Here is JSON environment file example where it is assumed that mentioned .obj files are located relatively to the environment file on disk.
@@ -54,13 +54,13 @@ Here is JSON environment file example where it is assumed that mentioned .obj fi
},
"scenes": {
"my first AR scene" : {
- "aruco_scene": "./first_scene/markers.obj",
+ "aruco_markers_group": "./first_scene/markers.obj",
"aoi_scene": "./first_scene/aoi.obj",
"angle_tolerance": 15.0,
"distance_tolerance": 2.54
},
"my second AR scene" : {
- "aruco_scene": "./second_scene/markers.obj",
+ "aruco_markers_group": "./second_scene/markers.obj",
"aoi_scene": "./second_scene/aoi.obj",
"angle_tolerance": 15.0,
"distance_tolerance": 2.54
@@ -73,5 +73,5 @@ Here is JSON environment file example where it is assumed that mentioned .obj fi
from argaze import ArFeatures
# Load AR environment
-ar_environment = ArFeatures.ArEnvironment.from_json('./environment.json')
+ar_camera = ArFeatures.ArCamera.from_json('./environment.json')
```
diff --git a/docs/user_guide/aruco_markers/introduction.md b/docs/user_guide/aruco_markers/introduction.md
index dc8d4cb..9d78de0 100644
--- a/docs/user_guide/aruco_markers/introduction.md
+++ b/docs/user_guide/aruco_markers/introduction.md
@@ -12,4 +12,4 @@ The ArGaze [ArUcoMarkers submodule](../../argaze.md/#argaze.ArUcoMarkers) eases
* [ArUcoBoard](../../argaze.md/#argaze.ArUcoMarkers.ArUcoBoard)
* [ArUcoOpticCalibrator](../../argaze.md/#argaze.ArUcoMarkers.ArUcoOpticCalibrator)
* [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector)
-* [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) \ No newline at end of file
+* [ArUcoMarkersGroup](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersGroup) \ No newline at end of file
diff --git a/docs/user_guide/aruco_markers/markers_scene_description.md b/docs/user_guide/aruco_markers/markers_scene_description.md
index e1cd651..c6dbf31 100644
--- a/docs/user_guide/aruco_markers/markers_scene_description.md
+++ b/docs/user_guide/aruco_markers/markers_scene_description.md
@@ -1,11 +1,11 @@
Markers scene description
=========================
-The ArGaze toolkit provides [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) class to describe where [ArUcoMarkers](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarker) are placed into a 3D model.
+The ArGaze toolkit provides [ArUcoMarkersGroup](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersGroup) class to describe where [ArUcoMarkers](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarker) are placed into a 3D model.
-![ArUco scene](../../img/aruco_scene.png)
+![ArUco scene](../../img/aruco_markers_group.png)
-[ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) is useful to:
+[ArUcoMarkersGroup](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersGroup) is useful to:
* filter markers that belongs to this predefined scene,
* check the consistency of detected markers according the place where each marker is expected to be,
@@ -37,16 +37,16 @@ f 5//2 6//2 8//2 7//2
...
```
-Here is a sample of code to show the loading of an [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) OBJ file description:
+Here is a sample of code to show the loading of an [ArUcoMarkersGroup](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersGroup) OBJ file description:
``` python
-from argaze.ArUcoMarkers import ArUcoScene
+from argaze.ArUcoMarkers import ArUcoMarkersGroup
# Create an ArUco scene from a OBJ file description
-aruco_scene = ArUcoScene.ArUcoScene.from_obj('./markers.obj')
+aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup.from_obj('./markers.obj')
# Print loaded marker places
-for place_id, place in aruco_scene.places.items():
+for place_id, place in aruco_markers_group.places.items():
print(f'place {place_id} for marker: ', place.marker.identifier)
print(f'place {place_id} translation: ', place.translation)
@@ -55,7 +55,7 @@ for place_id, place in aruco_scene.places.items():
### from JSON
-[ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) description can also be written in a JSON file format.
+[ArUcoMarkersGroup](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersGroup) description can also be written in a JSON file format.
``` json
{
@@ -83,13 +83,13 @@ for place_id, place in aruco_scene.places.items():
Here is a more advanced usage where ArUco scene is built from markers detected into an image:
``` python
-from argaze.ArUcoMarkers import ArUcoScene
+from argaze.ArUcoMarkers import ArUcoMarkersGroup
# Assuming markers have been detected and their pose estimated thanks to ArUcoDetector
...
# Build ArUco scene from detected markers
-aruco_scene = ArUcoScene.ArUcoScene(aruco_detector.marker_size, aruco_detector.dictionary, aruco_detector.detected_markers)
+aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup(aruco_detector.marker_size, aruco_detector.dictionary, aruco_detector.detected_markers)
```
## Markers filtering
@@ -97,7 +97,7 @@ aruco_scene = ArUcoScene.ArUcoScene(aruco_detector.marker_size, aruco_detector.d
Considering markers are detected, here is how to filter them to consider only those which belongs to the scene:
``` python
-scene_markers, remaining_markers = aruco_scene.filter_markers(aruco_detector.detected_markers)
+scene_markers, remaining_markers = aruco_markers_group.filter_markers(aruco_detector.detected_markers)
```
## Marker poses consistency
@@ -106,12 +106,12 @@ Then, scene markers poses can be validated by verifying their spatial consistenc
``` python
# Check scene markers consistency with 10° angle tolerance and 1 cm distance tolerance
-consistent_markers, unconsistent_markers, unconsistencies = aruco_scene.check_markers_consistency(scene_markers, 10, 1)
+consistent_markers, unconsistent_markers, unconsistencies = aruco_markers_group.check_markers_consistency(scene_markers, 10, 1)
```
## Scene pose estimation
-Several approaches are available to perform [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) pose estimation from markers belonging to the scene.
+Several approaches are available to perform [ArUcoMarkersGroup](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarkersGroup) pose estimation from markers belonging to the scene.
The first approach considers that scene pose can be estimated **from a single marker pose**:
@@ -120,20 +120,20 @@ The first approach considers that scene pose can be estimated **from a single ma
marker_id, marker = consistent_markers.popitem()
# Estimate scene pose from a single marker
-tvec, rmat = self.aruco_scene.estimate_pose_from_single_marker(marker)
+tvec, rmat = self.aruco_markers_group.estimate_pose_from_single_marker(marker)
```
The second approach considers that scene pose can be estimated by **averaging several marker poses**:
``` python
# Estimate scene pose from all consistent scene markers
-tvec, rmat = self.aruco_scene.estimate_pose_from_markers(consistent_markers)
+tvec, rmat = self.aruco_markers_group.estimate_pose_from_markers(consistent_markers)
```
The third approach is only available when ArUco markers are placed in such a configuration that is possible to **define orthogonal axis**:
``` python
-tvec, rmat = self.aruco_scene.estimate_pose_from_axis_markers(origin_marker, horizontal_axis_marker, vertical_axis_marker)
+tvec, rmat = self.aruco_markers_group.estimate_pose_from_axis_markers(origin_marker, horizontal_axis_marker, vertical_axis_marker)
```
## Scene exportation
@@ -142,5 +142,5 @@ As ArUco scene can be exported to OBJ file description to import it into most 3D
``` python
# Export an ArUco scene as OBJ file description
-aruco_scene.to_obj('markers.obj')
+aruco_markers_group.to_obj('markers.obj')
```
diff --git a/docs/user_guide/utils/demonstrations_scripts.md b/docs/user_guide/utils/demonstrations_scripts.md
index 5d2d760..4f73092 100644
--- a/docs/user_guide/utils/demonstrations_scripts.md
+++ b/docs/user_guide/utils/demonstrations_scripts.md
@@ -19,7 +19,7 @@ python ./src/argaze/utils/demo_gaze_analysis_run.py ./src/argaze/utils/demo_envi
## Augmented reality pipeline demonstration
-Load ArEnvironment from **demo_augmented_reality_setup.json** file then, detect ArUco markers into a demo video source and estimate environment pose.
+Load ArCamera from **demo_augmented_reality_setup.json** file then, detect ArUco markers into a demo video source and estimate environment pose.
```shell
python ./src/argaze/utils/demo_augmented_reality_run.py ./src/argaze/utils/demo_environment/demo_augmented_reality_setup.json -s ./src/argaze/utils/demo_environment/demo.mov
diff --git a/mkdocs.yml b/mkdocs.yml
index ceee03d..4681f20 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -37,9 +37,9 @@ nav:
# - user_guide/areas_of_interest/aoi_matching.md
# - user_guide/areas_of_interest/heatmap.md
# - Augmented Reality environment:
-# - user_guide/ar_environment/introduction.md
-# - user_guide/ar_environment/environment_setup.md
-# - user_guide/ar_environment/environment_exploitation.md
+# - user_guide/ar_camera/introduction.md
+# - user_guide/ar_camera/environment_setup.md
+# - user_guide/ar_camera/environment_exploitation.md
# - Gaze Analysis:
# - user_guide/gaze_analysis/introduction.md
# - user_guide/gaze_analysis/gaze_position.md
diff --git a/src/argaze.test/ArFeatures.py b/src/argaze.test/ArFeatures.py
deleted file mode 100644
index 765e9cf..0000000
--- a/src/argaze.test/ArFeatures.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-
-""" """
-
-__author__ = "Théo de la Hogue"
-__credits__ = []
-__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
-__license__ = "BSD"
-
-import unittest
-import os
-
-from argaze import ArFeatures
-
-import numpy
-
-class TestArEnvironmentClass(unittest.TestCase):
- """Test ArEnvironment class."""
-
- def test_from_json(self):
- """Test ArEnvironment creation from json file."""
-
- # Edit test environment file path
- current_directory = os.path.dirname(os.path.abspath(__file__))
- json_filepath = os.path.join(current_directory, 'utils/environment.json')
-
- # Load test environment
- ar_environment = ArFeatures.ArEnvironment.from_json(json_filepath)
-
- # Check environment meta data
- self.assertEqual(ar_environment.name, "TestEnvironment")
-
- # Check ArUco detector
- self.assertEqual(ar_environment.aruco_detector.dictionary.name, "DICT_ARUCO_ORIGINAL")
- self.assertEqual(ar_environment.aruco_detector.marker_size, 3.0)
- self.assertEqual(ar_environment.aruco_detector.parameters.cornerRefinementMethod, 3)
- self.assertEqual(ar_environment.aruco_detector.parameters.aprilTagQuadSigma, 2)
- self.assertEqual(ar_environment.aruco_detector.parameters.aprilTagDeglitch, 1)
-
- # Check ArUco detector optic parameters
- self.assertEqual(ar_environment.aruco_detector.optic_parameters.rms, 1.0)
- self.assertIsNone(numpy.testing.assert_array_equal(ar_environment.aruco_detector.optic_parameters.dimensions, [1920, 1080]))
- self.assertIsNone(numpy.testing.assert_array_equal(ar_environment.aruco_detector.optic_parameters.K, [[1.0, 0.0, 1.0], [0.0, 1.0, 1.0], [0.0, 0.0, 1.0]]))
- self.assertIsNone(numpy.testing.assert_array_equal(ar_environment.aruco_detector.optic_parameters.D, [-1.0, -0.5, 0.0, 0.5, 1.0]))
-
- # Check environment scenes
- self.assertEqual(len(ar_environment.scenes), 2)
- self.assertIsNone(numpy.testing.assert_array_equal(list(ar_environment.scenes.keys()), ["TestSceneA", "TestSceneB"]))
-
- # Load test scene
- ar_scene = ar_environment.scenes["TestSceneA"]
-
- # Check Aruco scene
- self.assertEqual(len(ar_scene.aruco_scene.places), 2)
- self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_scene.places[0].translation, [1, 0, 0]))
- self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_scene.places[0].rotation, [[1.,0.,0.],[0.,1.,0.],[0.,0.,1.]]))
- self.assertEqual(ar_scene.aruco_scene.places[0].marker.identifier, 0)
-
- self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_scene.places[1].translation, [0, 1, 0]))
- self.assertIsNone(numpy.testing.assert_array_almost_equal(ar_scene.aruco_scene.places[1].rotation, [[0.,0.,1.],[0., 1.,0.],[-1.,0.,0.]]))
- self.assertEqual(ar_scene.aruco_scene.places[1].marker.identifier, 1)
-
- # Check AOI scene
- self.assertEqual(len(ar_scene.aoi_scene.items()), 1)
- self.assertEqual(ar_scene.aoi_scene['Test'].points_number, 4)
- self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aoi_scene['Test'].size, [1., 1., 0.]))
-
- # Check ArScene
- self.assertEqual(ar_scene.angle_tolerance, 1.0)
- self.assertEqual(ar_scene.distance_tolerance, 2.0)
-
-
-if __name__ == '__main__':
-
- unittest.main() \ No newline at end of file
diff --git a/src/argaze.test/ArUcoMarkers/ArUcoCamera.py b/src/argaze.test/ArUcoMarkers/ArUcoCamera.py
new file mode 100644
index 0000000..6145f40
--- /dev/null
+++ b/src/argaze.test/ArUcoMarkers/ArUcoCamera.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+
+""" """
+
+__author__ = "Théo de la Hogue"
+__credits__ = []
+__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
+__license__ = "BSD"
+
+import unittest
+import os
+
+from argaze.ArUcoMarkers import ArUcoCamera
+
+import numpy
+
+class TestArUcoCameraClass(unittest.TestCase):
+ """Test ArUcoCamera class."""
+
+ def test_from_json(self):
+ """Test ArUcoCamera creation from json file."""
+
+ # Edit test aruco camera file path
+ current_directory = os.path.dirname(os.path.abspath(__file__))
+ json_filepath = os.path.join(current_directory, 'utils/aruco_camera.json')
+
+ # Load test aruco camera
+ aruco_camera = ArUcoCamera.ArUcoCamera.from_json(json_filepath)
+
+ # Check aruco camera meta data
+ self.assertEqual(aruco_camera.name, "TestArUcoCamera")
+
+ # Check ArUco detector
+ self.assertEqual(aruco_camera.aruco_detector.dictionary.name, "DICT_ARUCO_ORIGINAL")
+ self.assertEqual(aruco_camera.aruco_detector.marker_size, 3.0)
+ self.assertEqual(aruco_camera.aruco_detector.parameters.cornerRefinementMethod, 3)
+ self.assertEqual(aruco_camera.aruco_detector.parameters.aprilTagQuadSigma, 2)
+ self.assertEqual(aruco_camera.aruco_detector.parameters.aprilTagDeglitch, 1)
+
+ # Check ArUco detector optic parameters
+ self.assertEqual(aruco_camera.aruco_detector.optic_parameters.rms, 1.0)
+ self.assertIsNone(numpy.testing.assert_array_equal(aruco_camera.aruco_detector.optic_parameters.dimensions, [1920, 1080]))
+ self.assertIsNone(numpy.testing.assert_array_equal(aruco_camera.aruco_detector.optic_parameters.K, [[1.0, 0.0, 1.0], [0.0, 1.0, 1.0], [0.0, 0.0, 1.0]]))
+ self.assertIsNone(numpy.testing.assert_array_equal(aruco_camera.aruco_detector.optic_parameters.D, [-1.0, -0.5, 0.0, 0.5, 1.0]))
+
+ # Check camera scenes
+ self.assertEqual(len(aruco_camera.scenes), 2)
+ self.assertIsNone(numpy.testing.assert_array_equal(list(aruco_camera.scenes.keys()), ["TestSceneA", "TestSceneB"]))
+
+ # Load test scene
+ ar_scene = aruco_camera.scenes["TestSceneA"]
+
+ # Check Aruco scene
+ self.assertEqual(len(ar_scene.aruco_markers_group.places), 2)
+ self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_markers_group.places[0].translation, [1, 0, 0]))
+ self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_markers_group.places[0].rotation, [[1.,0.,0.],[0.,1.,0.],[0.,0.,1.]]))
+ self.assertEqual(ar_scene.aruco_markers_group.places[0].marker.identifier, 0)
+
+ self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aruco_markers_group.places[1].translation, [0, 1, 0]))
+ self.assertIsNone(numpy.testing.assert_array_almost_equal(ar_scene.aruco_markers_group.places[1].rotation, [[0.,0.,1.],[0., 1.,0.],[-1.,0.,0.]]))
+ self.assertEqual(ar_scene.aruco_markers_group.places[1].marker.identifier, 1)
+
+ # Check AOI scene
+ self.assertEqual(len(ar_scene.aoi_scene.items()), 1)
+ self.assertEqual(ar_scene.aoi_scene['Test'].points_number, 4)
+ self.assertIsNone(numpy.testing.assert_array_equal(ar_scene.aoi_scene['Test'].size, [1., 1., 0.]))
+
+ # Check ArScene
+ self.assertEqual(ar_scene.angle_tolerance, 1.0)
+ self.assertEqual(ar_scene.distance_tolerance, 2.0)
+
+if __name__ == '__main__':
+
+ unittest.main() \ No newline at end of file
diff --git a/src/argaze.test/ArUcoMarkers/ArUcoScene.py b/src/argaze.test/ArUcoMarkers/ArUcoScene.py
index f334542..628eac5 100644
--- a/src/argaze.test/ArUcoMarkers/ArUcoScene.py
+++ b/src/argaze.test/ArUcoMarkers/ArUcoScene.py
@@ -11,12 +11,12 @@ import unittest
import os
import math
-from argaze.ArUcoMarkers import ArUcoScene, ArUcoMarker
+from argaze.ArUcoMarkers import ArUcoMarkersGroup, ArUcoMarker
import cv2 as cv
import numpy
-class TestArUcoSceneClass(unittest.TestCase):
+class TestArUcoMarkersGroupClass(unittest.TestCase):
def new_from_obj(self):
@@ -25,7 +25,7 @@ class TestArUcoSceneClass(unittest.TestCase):
obj_filepath = os.path.join(current_directory, 'utils/scene.obj')
# Load file
- self.aruco_scene = ArUcoScene.ArUcoScene.from_obj(obj_filepath)
+ self.aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup.from_obj(obj_filepath)
def new_from_json(self):
@@ -34,7 +34,7 @@ class TestArUcoSceneClass(unittest.TestCase):
json_filepath = os.path.join(current_directory, 'utils/scene.json')
# Load file
- self.aruco_scene = ArUcoScene.ArUcoScene.from_json(json_filepath)
+ self.aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup.from_json(json_filepath)
def setup_markers(self):
@@ -47,56 +47,56 @@ class TestArUcoSceneClass(unittest.TestCase):
}
# Prepare scene markers and remaining markers
- self.scene_markers, self.remaining_markers = self.aruco_scene.filter_markers(self.detected_markers)
+ self.scene_markers, self.remaining_markers = self.aruco_markers_group.filter_markers(self.detected_markers)
def test_new_from_obj(self):
- """Test ArUcoScene creation."""
+ """Test ArUcoMarkersGroup creation."""
self.new_from_obj()
self.setup_markers()
- # Check ArUcoScene creation
- self.assertEqual(len(self.aruco_scene.places), 3)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.identifiers, [0, 1, 2]))
- self.assertEqual(self.aruco_scene.marker_size, 1.)
+ # Check ArUcoMarkersGroup creation
+ self.assertEqual(len(self.aruco_markers_group.places), 3)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.identifiers, [0, 1, 2]))
+ self.assertEqual(self.aruco_markers_group.marker_size, 1.)
- self.assertEqual(self.aruco_scene.places[0].marker.identifier, 0)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[0].translation, [0., 0., 0.]))
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[0].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
+ self.assertEqual(self.aruco_markers_group.places[0].marker.identifier, 0)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[0].translation, [0., 0., 0.]))
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[0].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
- self.assertEqual(self.aruco_scene.places[1].marker.identifier, 1)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[1].translation, [10., 10., 0.]))
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[1].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
+ self.assertEqual(self.aruco_markers_group.places[1].marker.identifier, 1)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[1].translation, [10., 10., 0.]))
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[1].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
- self.assertEqual(self.aruco_scene.places[2].marker.identifier, 2)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[2].translation, [0., 10., 0.]))
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[2].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
+ self.assertEqual(self.aruco_markers_group.places[2].marker.identifier, 2)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[2].translation, [0., 10., 0.]))
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[2].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
def test_new_from_json(self):
- """Test ArUcoScene creation."""
+ """Test ArUcoMarkersGroup creation."""
self.new_from_json()
self.setup_markers()
- # Check ArUcoScene creation
- self.assertEqual(len(self.aruco_scene.places), 3)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.identifiers, [0, 1, 2]))
- self.assertEqual(self.aruco_scene.marker_size, 1.)
+ # Check ArUcoMarkersGroup creation
+ self.assertEqual(len(self.aruco_markers_group.places), 3)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.identifiers, [0, 1, 2]))
+ self.assertEqual(self.aruco_markers_group.marker_size, 1.)
- self.assertEqual(self.aruco_scene.places[0].marker.identifier, 0)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[0].translation, [0., 0., 0.]))
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[0].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
+ self.assertEqual(self.aruco_markers_group.places[0].marker.identifier, 0)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[0].translation, [0., 0., 0.]))
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[0].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
- self.assertEqual(self.aruco_scene.places[1].marker.identifier, 1)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[1].translation, [10., 10., 0.]))
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[1].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
+ self.assertEqual(self.aruco_markers_group.places[1].marker.identifier, 1)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[1].translation, [10., 10., 0.]))
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[1].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
- self.assertEqual(self.aruco_scene.places[2].marker.identifier, 2)
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[2].translation, [0., 10., 0.]))
- self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_scene.places[2].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
+ self.assertEqual(self.aruco_markers_group.places[2].marker.identifier, 2)
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[2].translation, [0., 10., 0.]))
+ self.assertIsNone(numpy.testing.assert_array_equal(self.aruco_markers_group.places[2].rotation, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
def test_filter_markers(self):
- """Test ArUcoScene markers filtering."""
+ """Test ArUcoMarkersGroup markers filtering."""
self.new_from_obj()
self.setup_markers()
@@ -105,11 +105,11 @@ class TestArUcoSceneClass(unittest.TestCase):
self.assertEqual(len(self.scene_markers), 3)
self.assertEqual(len(self.remaining_markers), 1)
- self.assertIsNone(numpy.testing.assert_array_equal(list(self.scene_markers.keys()), self.aruco_scene.identifiers))
+ self.assertIsNone(numpy.testing.assert_array_equal(list(self.scene_markers.keys()), self.aruco_markers_group.identifiers))
self.assertIsNone(numpy.testing.assert_array_equal(list(self.remaining_markers.keys()), [3]))
def test_check_markers_consistency(self):
- """Test ArUcoScene markers consistency checking."""
+ """Test ArUcoMarkersGroup markers consistency checking."""
self.new_from_obj()
self.setup_markers()
@@ -125,7 +125,7 @@ class TestArUcoSceneClass(unittest.TestCase):
self.scene_markers[2].rotation = numpy.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
# Check consistency
- consistent_markers, unconsistent_markers, unconsistencies = self.aruco_scene.check_markers_consistency(self.scene_markers, 1, 1)
+ consistent_markers, unconsistent_markers, unconsistencies = self.aruco_markers_group.check_markers_consistency(self.scene_markers, 1, 1)
# Check consistent markers, unconsistent markers and unconsistencies
self.assertEqual(len(consistent_markers), 3)
@@ -133,13 +133,13 @@ class TestArUcoSceneClass(unittest.TestCase):
self.assertEqual(len(unconsistencies['rotation']), 0)
self.assertEqual(len(unconsistencies['translation']), 0)
- self.assertIsNone(numpy.testing.assert_array_equal(list(consistent_markers.keys()), self.aruco_scene.identifiers))
+ self.assertIsNone(numpy.testing.assert_array_equal(list(consistent_markers.keys()), self.aruco_markers_group.identifiers))
# Edit unconsistent marker poses
self.scene_markers[2].translation = numpy.array([5., 15., 5.])
# Check consistency
- consistent_markers, unconsistent_markers, unconsistencies = self.aruco_scene.check_markers_consistency(self.scene_markers, 1, 1)
+ consistent_markers, unconsistent_markers, unconsistencies = self.aruco_markers_group.check_markers_consistency(self.scene_markers, 1, 1)
# Check consistent markers, unconsistent markers and unconsistencies
self.assertEqual(len(consistent_markers), 2)
@@ -153,7 +153,7 @@ class TestArUcoSceneClass(unittest.TestCase):
self.assertIsNone(numpy.testing.assert_array_equal(list(unconsistencies['translation']['1/2'].keys()), ['current', 'expected']))
def test_estimate_pose_from_single_marker(self):
- """Test ArUcoScene pose estimation from single marker."""
+ """Test ArUcoMarkersGroup pose estimation from single marker."""
self.new_from_obj()
self.setup_markers()
@@ -163,13 +163,13 @@ class TestArUcoSceneClass(unittest.TestCase):
self.scene_markers[0].rotation = numpy.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
# Estimate pose
- tvec, rmat = self.aruco_scene.estimate_pose_from_single_marker(self.scene_markers[0])
+ tvec, rmat = self.aruco_markers_group.estimate_pose_from_single_marker(self.scene_markers[0])
self.assertIsNone(numpy.testing.assert_array_equal(tvec, [1., 1., 5.]))
self.assertIsNone(numpy.testing.assert_array_equal(rmat, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
def test_estimate_pose_from_markers(self):
- """Test ArUcoScene pose estimation from markers."""
+ """Test ArUcoMarkersGroup pose estimation from markers."""
self.new_from_obj()
self.setup_markers()
@@ -185,14 +185,14 @@ class TestArUcoSceneClass(unittest.TestCase):
self.scene_markers[2].rotation = numpy.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
# Estimate pose
- tvec, rmat = self.aruco_scene.estimate_pose_from_markers(self.scene_markers)
+ tvec, rmat = self.aruco_markers_group.estimate_pose_from_markers(self.scene_markers)
self.assertIsNone(numpy.testing.assert_array_equal(tvec, [1., 1., 5.]))
self.assertIsNone(numpy.testing.assert_array_equal(rmat, [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]))
- @unittest.skip("ArUcoScene estimate_pose_from_axis_markers method is broken.")
+ @unittest.skip("ArUcoMarkersGroup estimate_pose_from_axis_markers method is broken.")
def test_estimate_pose_from_axis_markers(self):
- """Test ArUcoScene pose estimation from axis markers."""
+ """Test ArUcoMarkersGroup pose estimation from axis markers."""
self.new_from_obj()
self.setup_markers()
@@ -208,7 +208,7 @@ class TestArUcoSceneClass(unittest.TestCase):
self.scene_markers[2].rotation = numpy.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
# Estimate pose
- tvec, rmat = self.aruco_scene.estimate_pose_from_axis_markers(self.scene_markers[2], self.scene_markers[1], self.scene_markers[0])
+ tvec, rmat = self.aruco_markers_group.estimate_pose_from_axis_markers(self.scene_markers[2], self.scene_markers[1], self.scene_markers[0])
self.assertIsNone(numpy.testing.assert_array_equal(tvec, [1., 1., 5.]))
self.assertIsNone(numpy.testing.assert_array_equal(rmat, [[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]]))
diff --git a/src/argaze.test/utils/environment.json b/src/argaze.test/ArUcoMarkers/utils/aruco_camera.json
index df1c771..7648916 100644
--- a/src/argaze.test/utils/environment.json
+++ b/src/argaze.test/ArUcoMarkers/utils/aruco_camera.json
@@ -1,5 +1,5 @@
{
- "name": "TestEnvironment",
+ "name": "TestArUcoCamera",
"aruco_detector": {
"dictionary": {
"name": "DICT_ARUCO_ORIGINAL"
@@ -44,7 +44,7 @@
},
"scenes": {
"TestSceneA" : {
- "aruco_scene": {
+ "aruco_markers_group": {
"marker_size": 3.0,
"dictionary": {
"name": "DICT_ARUCO_ORIGINAL"
@@ -65,7 +65,7 @@
"distance_tolerance": 2.0
},
"TestSceneB" : {
- "aruco_scene": {
+ "aruco_markers_group": {
"marker_size": 3.0,
"dictionary": {
"name": "DICT_ARUCO_ORIGINAL"
diff --git a/src/argaze.test/ArUcoMarkers/utils/scene.obj b/src/argaze.test/ArUcoMarkers/utils/scene.obj
index 16c22a0..c233da2 100644
--- a/src/argaze.test/ArUcoMarkers/utils/scene.obj
+++ b/src/argaze.test/ArUcoMarkers/utils/scene.obj
@@ -1,4 +1,4 @@
-# .OBJ file for ArUcoScene unitary test
+# .OBJ file for ArUcoMarkersGroup unitary test
o DICT_ARUCO_ORIGINAL#0_Marker
v -0.500000 -0.500000 0.000000
v 0.500000 -0.500000 0.000000
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index f68fe12..b7ac48c 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-"""Manage AR environement assets."""
+"""ArGaze pipeline assets."""
__author__ = "Théo de la Hogue"
__credits__ = []
@@ -17,7 +17,6 @@ import threading
import time
from argaze import DataStructures, GazeFeatures
-from argaze.ArUcoMarkers import *
from argaze.AreaOfInterest import *
from argaze.GazeAnalysis import *
@@ -33,7 +32,7 @@ ArFrameType = TypeVar('ArFrame', bound="ArFrame")
ArSceneType = TypeVar('ArScene', bound="ArScene")
# Type definition for type annotation convenience
-ArEnvironmentType = TypeVar('ArEnvironment', bound="ArEnvironment")
+ArCameraType = TypeVar('ArCamera', bound="ArCamera")
# Type definition for type annotation convenience
class PoseEstimationFailed(Exception):
@@ -49,7 +48,7 @@ class PoseEstimationFailed(Exception):
class SceneProjectionFailed(Exception):
"""
- Exception raised by ArEnvironment detect_and_project method when the scene can't be projected.
+ Exception raised by ArCamera detect_and_project method when the scene can't be projected.
"""
def __init__(self, message):
@@ -567,7 +566,7 @@ class ArFrame():
scan_path: GazeFeatures.ScanPath = field(default_factory=GazeFeatures.ScanPath)
scan_path_analyzers: dict = field(default_factory=dict)
heatmap: AOIFeatures.Heatmap = field(default_factory=AOIFeatures.Heatmap)
- background: numpy.array = field(default_factory=numpy.array)
+ background: numpy.array = field(default_factory=lambda : numpy.array([]))
layers: dict = field(default_factory=dict)
log: bool = field(default=False)
image_parameters: dict = field(default_factory=DEFAULT_ARFRAME_IMAGE_PARAMETERS)
@@ -1016,11 +1015,19 @@ class ArFrame():
draw_gaze_position: [GazeFeatures.GazePosition.draw](argaze.md/#argaze.GazeFeatures.GazePosition.draw) parameters (if None, no gaze position is drawn)
"""
+ print('type', type(self))
+
+ print('ArFrame.image 1')
+
# Use image_parameters attribute if no parameters
if background_weight is None and heatmap_weight is None and draw_scan_path is None and draw_layers is None and draw_gaze_position is None:
+ print('ArFrame.image 2')
+
return self.image(**self.image_parameters)
+ print('ArFrame.image 3')
+
# Lock frame exploitation
self.__look_lock.acquire()
@@ -1066,38 +1073,30 @@ class ArFrame():
# Unlock frame exploitation
self.__look_lock.release()
+ print('ArFrame.image', image.shape)
+
return image
@dataclass
class ArScene():
"""
- Define an Augmented Reality scene with ArUcoMarkers, ArLayers and ArFrames inside.
+ Define abstract Augmented Reality scene with ArLayers and ArFrames inside.
Parameters:
name: name of the scene
- aruco_scene: ArUco markers 3D scene description used to estimate scene pose from detected markers: see [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function below.
-
layers: dictionary of ArLayers to project once the pose is estimated: see [project][argaze.ArFeatures.ArScene.project] function below.
frames: dictionary to ArFrames to project once the pose is estimated: see [project][argaze.ArFeatures.ArScene.project] function below.
- aruco_axis: Optional dictionary to define orthogonal axis where each axis is defined by list of 3 markers identifier (first is origin). \
- This pose estimation strategy is used by [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function when at least 3 markers are detected.
-
- aruco_aoi: Optional dictionary of AOI defined by list of markers identifier and markers corners index tuples: see [build_aruco_aoi_scene][argaze.ArFeatures.ArScene.build_aruco_aoi_scene] function below.
-
angle_tolerance: Optional angle error tolerance to validate marker pose in degree used into [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function.
distance_tolerance: Optional distance error tolerance to validate marker pose in centimeter used into [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function.
"""
name: str
- aruco_scene: ArUcoScene.ArUcoScene = field(default_factory=ArUcoScene.ArUcoScene)
layers: dict = field(default_factory=dict)
frames: dict = field(default_factory=dict)
- aruco_axis: dict = field(default_factory=dict)
- aruco_aoi: dict = field(default_factory=dict)
angle_tolerance: float = field(default=0.)
distance_tolerance: float = field(default=0.)
@@ -1130,7 +1129,6 @@ class ArScene():
"""
output = f'parent:\n{self.parent.name}\n'
- output += f'ArUcoScene:\n{self.aruco_scene}\n'
if len(self.layers):
output += f'ArLayers:\n'
@@ -1157,8 +1155,15 @@ class ArScene():
self.__parent = parent
@classmethod
- def from_dict(self, scene_data, working_directory: str = None) -> ArSceneType:
+ def from_dict(self, scene_data: dict, working_directory: str = None) -> ArSceneType:
+ """
+ Load ArScene from dictionary.
+ Parameters:
+ scene_data: dictionary
+ working_directory: folder path where to load files when a dictionary value is a relative filepath.
+ """
+
# Load name
try:
@@ -1168,27 +1173,6 @@ class ArScene():
new_scene_name = None
- # Load aruco scene
- try:
-
- # Check aruco_scene value type
- aruco_scene_value = scene_data.pop('aruco_scene')
-
- # str: relative path to .obj file
- if type(aruco_scene_value) == str:
-
- aruco_scene_value = os.path.join(working_directory, aruco_scene_value)
- new_aruco_scene = ArUcoScene.ArUcoScene.from_obj(aruco_scene_value)
-
- # dict:
- else:
-
- new_aruco_scene = ArUcoScene.ArUcoScene(**aruco_scene_value)
-
- except KeyError:
-
- new_aruco_scene = None
-
# Load layers
new_layers = {}
@@ -1272,70 +1256,20 @@ class ArScene():
pass
- return ArScene(new_scene_name, new_aruco_scene, new_layers, new_frames, **scene_data)
+ return ArScene(new_scene_name, new_layers, new_frames, **scene_data)
- def estimate_pose(self, detected_markers) -> Tuple[numpy.array, numpy.array, str, dict]:
- """Estimate scene pose from detected ArUco markers.
+ def estimate_pose(self, detected_features) -> Tuple[numpy.array, numpy.array]:
+ """Define abstract estimate scene pose method.
+
+ Parameters:
+ detected_features: any features detected by parent ArCamera that will help in scene pose estimation.
Returns:
- scene translation vector
- scene rotation matrix
- pose estimation strategy
- dict of markers used to estimate the pose
+ tvec: scene translation vector
+ rvec: scene rotation matrix
"""
- # Pose estimation fails when no marker is detected
- if len(detected_markers) == 0:
-
- raise PoseEstimationFailed('No marker detected')
-
- scene_markers, _ = self.aruco_scene.filter_markers(detected_markers)
-
- # Pose estimation fails when no marker belongs to the scene
- if len(scene_markers) == 0:
-
- raise PoseEstimationFailed('No marker belongs to the scene')
-
- # Estimate scene pose from unique marker transformations
- elif len(scene_markers) == 1:
-
- marker_id, marker = scene_markers.popitem()
- tvec, rmat = self.aruco_scene.estimate_pose_from_single_marker(marker)
-
- return tvec, rmat, 'estimate_pose_from_single_marker', {marker_id: marker}
-
- # Try to estimate scene pose from 3 markers defining an orthogonal axis
- elif len(scene_markers) >= 3 and len(self.aruco_axis) > 0:
-
- for axis_name, axis_markers in self.aruco_axis.items():
-
- try:
-
- origin_marker = scene_markers[axis_markers['origin_marker']]
- horizontal_axis_marker = scene_markers[axis_markers['horizontal_axis_marker']]
- vertical_axis_marker = scene_markers[axis_markers['vertical_axis_marker']]
-
- tvec, rmat = self.aruco_scene.estimate_pose_from_axis_markers(origin_marker, horizontal_axis_marker, vertical_axis_marker)
-
- return tvec, rmat, 'estimate_pose_from_axis_markers', {origin_marker.identifier: origin_marker, horizontal_axis_marker.identifier: horizontal_axis_marker, vertical_axis_marker.identifier: vertical_axis_marker}
-
- except:
- pass
-
- raise PoseEstimationFailed('No marker axis')
-
- # Otherwise, check markers consistency
- consistent_markers, unconsistent_markers, unconsistencies = self.aruco_scene.check_markers_consistency(scene_markers, self.angle_tolerance, self.distance_tolerance)
-
- # Pose estimation fails when no marker passes consistency checking
- if len(consistent_markers) == 0:
-
- raise PoseEstimationFailed('Unconsistent marker poses', unconsistencies)
-
- # Otherwise, estimate scene pose from all consistent markers pose
- tvec, rmat = self.aruco_scene.estimate_pose_from_markers(consistent_markers)
-
- return tvec, rmat, 'estimate_pose_from_markers', consistent_markers
+ raise NotImplementedError('estimate_pose() method not implemented')
def project(self, tvec: numpy.array, rvec: numpy.array, visual_hfov: float = 0.) -> Tuple[str, AOI2DScene.AOI2DScene]:
"""Project layers according estimated pose and optional horizontal field of view clipping angle.
@@ -1374,52 +1308,6 @@ class ArScene():
# Project layer aoi scene
yield name, aoi_scene_copy.project(tvec, rvec, self.parent.aruco_detector.optic_parameters.K)
- def build_aruco_aoi_scene(self, detected_markers) -> AOI2DScene.AOI2DScene:
- """
- Build AOI scene from detected ArUco markers as defined in aruco_aoi dictionary.
-
- Returns:
- aoi_2d_scene: built AOI 2D scene
- """
-
- # ArUco aoi must be defined
- assert(self.aruco_aoi)
-
- # AOI projection fails when no marker is detected
- if len(detected_markers) == 0:
-
- raise SceneProjectionFailed('No marker detected')
-
- aruco_aoi_scene = {}
-
- for aruco_aoi_name, aoi in self.aruco_aoi.items():
-
- # Each aoi's corner is defined by a marker's corner
- aoi_corners = []
- for corner in ["upper_left_corner", "upper_right_corner", "lower_right_corner", "lower_left_corner"]:
-
- marker_identifier = aoi[corner]["marker_identifier"]
-
- try:
-
- aoi_corners.append(detected_markers[marker_identifier].corners[0][aoi[corner]["marker_corner_index"]])
-
- except Exception as e:
-
- raise SceneProjectionFailed(f'Missing marker #{e} to build ArUco AOI scene')
-
- aruco_aoi_scene[aruco_aoi_name] = AOIFeatures.AreaOfInterest(aoi_corners)
-
- # Then each inner aoi is projected from the current aruco aoi
- for inner_aoi_name, inner_aoi in self.aoi_3d_scene.items():
-
- if aruco_aoi_name != inner_aoi_name:
-
- aoi_corners = [numpy.array(aruco_aoi_scene[aruco_aoi_name].outter_axis(inner)) for inner in self.__orthogonal_projection_cache[inner_aoi_name]]
- aruco_aoi_scene[inner_aoi_name] = AOIFeatures.AreaOfInterest(aoi_corners)
-
- return AOI2DScene.AOI2DScene(aruco_aoi_scene)
-
def draw_axis(self, image: numpy.array):
"""
Draw scene axis into image.
@@ -1428,212 +1316,95 @@ class ArScene():
image: where to draw
"""
- self.aruco_scene.draw_axis(image, self.parent.aruco_detector.optic_parameters.K, self.parent.aruco_detector.optic_parameters.D)
-
- def draw_places(self, image: numpy.array):
- """
- Draw scene places into image.
-
- Parameters:
- image: where to draw
- """
-
- self.aruco_scene.draw_places(image, self.parent.aruco_detector.optic_parameters.K, self.parent.aruco_detector.optic_parameters.D)
-
-# Define default ArEnvironment image_paremeters values
-DEFAULT_ARENVIRONMENT_IMAGE_PARAMETERS = {
- "draw_detected_markers": {
- "color": (0, 255, 0),
- "draw_axes": {
- "thickness": 3
- }
- }
-}
+ raise NotImplementedError('draw_axis() method not implemented')
@dataclass
-class ArEnvironment():
+class ArCamera(ArFrame):
"""
- Define Augmented Reality environment based on ArUco marker detection.
+ Define abstract Augmented Reality camera as ArFrame with ArScenes inside.
Parameters:
- name: environment name
- aruco_detector: ArUco marker detector
- camera_frame: where to project scenes
- scenes: all environment scenes
+ scenes: all scenes to project into camera frame
"""
- name: str
- aruco_detector: ArUcoDetector.ArUcoDetector = field(default_factory=ArUcoDetector.ArUcoDetector)
- camera_frame: ArFrame = field(default_factory=ArFrame)
scenes: dict = field(default_factory=dict)
- image_parameters: dict = field(default_factory=DEFAULT_ARENVIRONMENT_IMAGE_PARAMETERS)
def __post_init__(self):
- # Setup camera frame parent attribute
- if self.camera_frame is not None:
-
- self.camera_frame.parent = self
+ # Init ArFrame
+ super().__post_init__()
# Setup scenes parent attribute
for name, scene in self.scenes.items():
scene.parent = self
- # Init a lock to share AOI scene projections into camera frame between multiple threads
- self.__camera_frame_lock = threading.Lock()
-
- # Define public timestamp buffer to store ignored gaze positions
- self.ignored_gaze_positions = GazeFeatures.TimeStampedGazePositions()
-
- @classmethod
- def from_dict(self, environment_data, working_directory: str = None) -> ArEnvironmentType:
-
- new_environment_name = environment_data.pop('name')
-
- try:
- new_detector_data = environment_data.pop('aruco_detector')
-
- new_aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary(**new_detector_data.pop('dictionary'))
- new_marker_size = new_detector_data.pop('marker_size')
-
- # Check optic_parameters value type
- optic_parameters_value = new_detector_data.pop('optic_parameters')
-
- # str: relative path to .json file
- if type(optic_parameters_value) == str:
-
- optic_parameters_value = os.path.join(working_directory, optic_parameters_value)
- new_optic_parameters = ArUcoOpticCalibrator.OpticParameters.from_json(optic_parameters_value)
-
- # dict:
- else:
-
- new_optic_parameters = ArUcoOpticCalibrator.OpticParameters(**optic_parameters_value)
-
- # Check detector parameters value type
- detector_parameters_value = new_detector_data.pop('parameters')
-
- # str: relative path to .json file
- if type(detector_parameters_value) == str:
-
- detector_parameters_value = os.path.join(working_directory, detector_parameters_value)
- new_aruco_detector_parameters = ArUcoDetector.DetectorParameters.from_json(detector_parameters_value)
-
- # dict:
- else:
-
- new_aruco_detector_parameters = ArUcoDetector.DetectorParameters(**detector_parameters_value)
-
- new_aruco_detector = ArUcoDetector.ArUcoDetector(new_aruco_dictionary, new_marker_size, new_optic_parameters, new_aruco_detector_parameters)
-
- except KeyError:
-
- new_aruco_detector = None
-
- # Load camera frame as large as aruco dectector optic parameters
- try:
-
- camera_frame_data = environment_data.pop('camera_frame')
-
- # Create camera frame
- new_camera_frame = ArFrame.from_dict(camera_frame_data, working_directory)
-
- # Setup camera frame
- new_camera_frame.name = new_environment_name
- new_camera_frame.size = new_optic_parameters.dimensions
- new_camera_frame.background = numpy.zeros((new_optic_parameters.dimensions[1], new_optic_parameters.dimensions[0], 3)).astype(numpy.uint8)
-
- except KeyError:
-
- new_camera_frame = None
-
- # Build scenes
- new_scenes = {}
- for scene_name, scene_data in environment_data.pop('scenes').items():
-
- # Append name
- scene_data['name'] = scene_name
-
- # Create new scene
- new_scene = ArScene.from_dict(scene_data, working_directory)
-
- # Append new scene
- new_scenes[scene_name] = new_scene
-
- # Setup expected aoi of each camera frame layer aoi scan path with the aoi of corresponding scene layer
- if new_camera_frame is not None:
-
- for camera_frame_layer_name, camera_frame_layer in new_camera_frame.layers.items():
+ # Setup expected aoi of each layer aoi scan path with the aoi of corresponding scene layer
+ for layer_name, layer in self.layers.items():
- if camera_frame_layer.aoi_scan_path is not None:
+ if layer.aoi_scan_path is not None:
- all_aoi_list = []
+ all_aoi_list = []
- for scene_name, scene in new_scenes.items():
+ for scene_name, scene in new_scenes.items():
- try:
+ try:
- scene_layer = scene.layers[camera_frame_layer_name]
+ scene_layer = scene.layers[layer_name]
- all_aoi_list.extend(list(scene_layer.aoi_scene.keys()))
+ all_aoi_list.extend(list(scene_layer.aoi_scene.keys()))
- except KeyError:
+ except KeyError:
- continue
+ continue
- camera_frame_layer.aoi_scan_path.expected_aois = all_aoi_list
+ layer.aoi_scan_path.expected_aois = all_aoi_list
- # Load environment image parameters
- try:
+ # Init a lock to share scene projections into camera frame between multiple threads
+ self._frame_lock = threading.Lock()
- new_environment_image_parameters = environment_data.pop('image_parameters')
+ # Define public timestamp buffer to store ignored gaze positions
+ self.ignored_gaze_positions = GazeFeatures.TimeStampedGazePositions()
+
+ def __str__(self) -> str:
+ """
+ Returns:
+ String representation
+ """
- except KeyError:
+ output = f'Name:\n{self.name}\n'
- new_environment_image_parameters = DEFAULT_ARENVIRONMENT_IMAGE_PARAMETERS
+ for name, scene in self.scenes.items():
+ output += f'\"{name}\" ArScene:\n{scene}\n'
- # Create new environment
- return ArEnvironment(new_environment_name, \
- new_aruco_detector, \
- new_camera_frame, \
- new_scenes, \
- new_environment_image_parameters \
- )
+ return output
@classmethod
- def from_json(self, json_filepath: str) -> ArEnvironmentType:
+ def from_dict(self, camera_data: dict, working_directory: str = None) -> ArCameraType:
"""
- Load ArEnvironment from .json file.
+ Load ArCamera from dictionary.
Parameters:
- json_filepath: path to json file
+ camera_data: dictionary
+ working_directory: folder path where to load files when a dictionary value is a relative filepath.
"""
- with open(json_filepath) as configuration_file:
-
- environment_data = json.load(configuration_file)
- working_directory = os.path.dirname(json_filepath)
-
- return ArEnvironment.from_dict(environment_data, working_directory)
+ raise NotImplementedError('from_dict() method not implemented')
- def __str__(self) -> str:
- """
- Returns:
- String representation
+ @classmethod
+ def from_json(self, json_filepath: str) -> ArCameraType:
"""
+ Load ArCamera from .json file.
- output = f'Name:\n{self.name}\n'
- output += f'ArUcoDetector:\n{self.aruco_detector}\n'
-
- for name, scene in self.scenes.items():
- output += f'\"{name}\" ArScene:\n{scene}\n'
+ Parameters:
+ json_filepath: path to json file
+ """
- return output
+ raise NotImplementedError('from_json() method not implemented')
@property
def frames(self):
- """Iterate over all environment scenes frames"""
+ """Iterate over all camera scenes frames"""
# For each scene
for scene_name, scene in self.scenes.items():
@@ -1644,84 +1415,18 @@ class ArEnvironment():
yield frame
def detect_and_project(self, image: numpy.array) -> Tuple[float, dict]:
- """Detect environment aruco markers from image and project scenes into camera frame.
-
- Returns:
- - detection_time: aruco marker detection time in ms
- - exceptions: dictionary with exception raised per scene
- """
-
- # Detect aruco markers
- detection_time = self.aruco_detector.detect_markers(image)
-
- # Lock camera frame exploitation
- self.__camera_frame_lock.acquire()
-
- # Fill camera frame background with image
- self.camera_frame.background = image
-
- # Clear former layers projection into camera frame
- for came_layer_name, camera_layer in self.camera_frame.layers.items():
-
- camera_layer.aoi_scene = AOI2DScene.AOI2DScene()
-
- # Store exceptions for each scene
- exceptions = {}
-
- # Project each aoi 3d scene into camera frame
- for scene_name, scene in self.scenes.items():
-
- ''' TODO: Enable aruco_aoi processing
- if scene.aruco_aoi:
-
- try:
-
- # Build AOI scene directly from detected ArUco marker corners
- self.camera_frame.aoi_2d_scene |= scene.build_aruco_aoi_scene(self.aruco_detector.detected_markers)
-
- except SceneProjectionFailed:
-
- pass
- '''
-
- try:
-
- # Estimate scene markers poses
- self.aruco_detector.estimate_markers_pose(scene.aruco_scene.identifiers)
-
- # Estimate scene pose from detected scene markers
- tvec, rmat, _, _ = scene.estimate_pose(self.aruco_detector.detected_markers)
-
- # Project scene into camera frame according estimated pose
- for layer_name, layer_projection in scene.project(tvec, rmat):
-
- try:
+ """Detect AR features from image and project scenes into camera frame."""
- self.camera_frame.layers[layer_name].aoi_scene |= layer_projection
-
- except KeyError:
-
- pass
-
- # Store exceptions and continue
- except Exception as e:
-
- exceptions[scene_name] = e
-
- # Unlock camera frame exploitation
- self.__camera_frame_lock.release()
-
- # Return dection time and exceptions
- return detection_time, exceptions
+ raise NotImplementedError('detect_and_project() method not implemented')
def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition):
- """Project timestamped gaze position into each frame.
+ """Project timestamped gaze position into each scene frames.
!!! warning detect_and_project method needs to be called first.
"""
# Can't use camera frame when it is locked
- if self.__camera_frame_lock.locked():
+ if self._frame_lock.locked():
# TODO: Store ignored timestamped gaze positions for further projections
# PB: This would imply to also store frame projections !!!
@@ -1730,12 +1435,12 @@ class ArEnvironment():
return
# Lock camera frame exploitation
- self.__camera_frame_lock.acquire()
+ self._frame_lock.acquire()
# Project gaze position into camera frame
- yield self.camera_frame, self.camera_frame.look(timestamp, gaze_position)
+ yield self, self.look(timestamp, gaze_position)
- # Project gaze position into each frame if possible
+ # Project gaze position into each scene frames if possible
for frame in self.frames:
# Is there an AOI inside camera frame layers projection which its name equals to a frame name?
@@ -1761,7 +1466,7 @@ class ArEnvironment():
pass
# Unlock camera frame exploitation
- self.__camera_frame_lock.release()
+ self._frame_lock.release()
def map(self):
"""Project camera frame background into frames background.
@@ -1770,11 +1475,11 @@ class ArEnvironment():
"""
# Can't use camera frame when it is locked
- if self.__camera_frame_lock.locked():
+ if self._frame_lock.locked():
return
# Lock camera frame exploitation
- self.__camera_frame_lock.acquire()
+ self._frame_lock.acquire()
# Project image into each frame if possible
for frame in self.frames:
@@ -1798,43 +1503,19 @@ class ArEnvironment():
pass
# Unlock camera frame exploitation
- self.__camera_frame_lock.release()
-
- def image(self, draw_detected_markers: dict = None):
- """Get camera frame projections with ArUco detection visualisation.
+ self._frame_lock.release()
- Parameters:
- image: image where to draw
- draw_detected_markers: ArucoMarker.draw parameters (if None, no marker drawn)
+ def image(self, **frame_image_parameters) -> numpy.array:
+ """
+ Get camera frame image.
"""
- # Use image_parameters attribute if no parameters
- if draw_detected_markers is None:
-
- return self.image(**self.image_parameters)
-
- # Can't use camera frame when it is locked
- if self.__camera_frame_lock.locked():
- return
-
- # Lock camera frame exploitation
- self.__camera_frame_lock.acquire()
-
- # Get camera frame image
- image = self.camera_frame.image()
-
- # Draw detected markers if required
- if draw_detected_markers is not None:
-
- self.aruco_detector.draw_detected_markers(image, draw_detected_markers)
-
- # Unlock camera frame exploitation
- self.__camera_frame_lock.release()
+ print('ArCamera.image')
- return image
+ return super().image(**frame_image_parameters)
def to_json(self, json_filepath):
- """Save environment to .json file."""
+ """Save camera to .json file."""
with open(json_filepath, 'w', encoding='utf-8') as file:
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
new file mode 100644
index 0000000..453c18b
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -0,0 +1,264 @@
+#!/usr/bin/env python
+
+"""ArCamera based of ArUco markers technology."""
+
+__author__ = "Théo de la Hogue"
+__credits__ = []
+__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
+__license__ = "BSD"
+
+from typing import TypeVar, Tuple
+from dataclasses import dataclass, field
+import json
+import os
+
+from argaze import ArFeatures, DataStructures
+from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoDetector, ArUcoOpticCalibrator, ArUcoScene
+from argaze.AreaOfInterest import AOI2DScene
+
+import cv2
+import numpy
+
+ArUcoCameraType = TypeVar('ArUcoCamera', bound="ArUcoCamera")
+# Type definition for type annotation convenience
+
+# Define default ArUcoCamera image_paremeters values
+DEFAULT_ARUCOCAMERA_IMAGE_PARAMETERS = {
+ "draw_detected_markers": {
+ "color": (0, 255, 0),
+ "draw_axes": {
+ "thickness": 3
+ }
+ }
+}
+
+@dataclass
+class ArUcoCamera(ArFeatures.ArCamera):
+ """
+ Define an ArCamera based on ArUco marker detection.
+
+ aruco_detector: ArUco marker detector
+ """
+
+ aruco_detector: ArUcoDetector.ArUcoDetector = field(default_factory=ArUcoDetector.ArUcoDetector)
+
+ def __post_init__(self):
+
+ super().__post_init__()
+
+ # Camera frame size should be equals to optic parameters dimensions
+ assert(self.size == self.aruco_detector.optic_parameters.dimensions)
+
+ def __str__(self) -> str:
+ """
+ Returns:
+ String representation
+ """
+
+ output = super().__str__()
+ output += f'ArUcoDetector:\n{self.aruco_detector}\n'
+
+ return output
+
+ @classmethod
+ def from_dict(self, aruco_camera_data, working_directory: str = None) -> ArUcoCameraType:
+ """
+ Load ArUcoCamera from dictionary.
+
+ Parameters:
+ aruco_camera_data: dictionary
+ working_directory: folder path where to load files when a dictionary value is a relative filepath.
+ """
+
+ # Load ArUco detector
+ try:
+ new_detector_data = aruco_camera_data.pop('aruco_detector')
+
+ new_aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary(**new_detector_data.pop('dictionary'))
+ new_marker_size = new_detector_data.pop('marker_size')
+
+ # Check optic_parameters value type
+ optic_parameters_value = new_detector_data.pop('optic_parameters')
+
+ # str: relative path to .json file
+ if type(optic_parameters_value) == str:
+
+ optic_parameters_value = os.path.join(working_directory, optic_parameters_value)
+ new_optic_parameters = ArUcoOpticCalibrator.OpticParameters.from_json(optic_parameters_value)
+
+ # dict:
+ else:
+
+ new_optic_parameters = ArUcoOpticCalibrator.OpticParameters(**optic_parameters_value)
+
+ # Check detector parameters value type
+ detector_parameters_value = new_detector_data.pop('parameters')
+
+ # str: relative path to .json file
+ if type(detector_parameters_value) == str:
+
+ detector_parameters_value = os.path.join(working_directory, detector_parameters_value)
+ new_aruco_detector_parameters = ArUcoDetector.DetectorParameters.from_json(detector_parameters_value)
+
+ # dict:
+ else:
+
+ new_aruco_detector_parameters = ArUcoDetector.DetectorParameters(**detector_parameters_value)
+
+ new_aruco_detector = ArUcoDetector.ArUcoDetector(new_aruco_dictionary, new_marker_size, new_optic_parameters, new_aruco_detector_parameters)
+
+ except KeyError:
+
+ new_aruco_detector = None
+
+ # Load ArUcoScenes
+ new_scenes = {}
+ for aruco_scene_name, aruco_scene_data in aruco_camera_data.pop('scenes').items():
+
+ # Append name
+ aruco_scene_data['name'] = aruco_scene_name
+
+ # Create new aruco scene
+ new_aruco_scene = ArUcoScene.ArUcoScene.from_dict(aruco_scene_data, working_directory)
+
+ # Append new scene
+ new_scenes[aruco_scene_name] = new_aruco_scene
+
+ # Load image parameters
+ try:
+
+ new_image_parameters = aruco_camera_data.pop('image_parameters')
+
+ except KeyError:
+
+ new_image_parameters = {**DEFAULT_ARFRAME_IMAGE_PARAMETERS, **DEFAULT_ARUCOCAMERA_IMAGE_PARAMETERS}
+
+ # Get values of temporary ar frame created from aruco_camera_data
+ temp_ar_frame_values = DataStructures.as_dict(ArFeatures.ArFrame.from_dict(aruco_camera_data, working_directory))
+
+ # Remove values from temporary ar frame scenes
+ temp_ar_frame_values.pop('image_parameters')
+
+ # Create new aruco camera using temporary ar frame values
+ return ArUcoCamera(aruco_detector=new_aruco_detector, scenes=new_scenes, image_parameters=new_image_parameters, **temp_ar_frame_values)
+
+ @classmethod
+ def from_json(self, json_filepath: str) -> ArUcoCameraType:
+ """
+ Load ArUcoCamera from .json file.
+
+ Parameters:
+ json_filepath: path to json file
+ """
+
+ with open(json_filepath) as configuration_file:
+
+ aruco_camera_data = json.load(configuration_file)
+ working_directory = os.path.dirname(json_filepath)
+
+ return ArUcoCamera.from_dict(aruco_camera_data, working_directory)
+
+ def detect_and_project(self, image: numpy.array) -> Tuple[float, dict]:
+ """Detect environment aruco markers from image and project scenes into camera frame.
+
+ Returns:
+ - detection_time: aruco marker detection time in ms
+ - exceptions: dictionary with exception raised per scene
+ """
+
+ # Detect aruco markers
+ detection_time = self.aruco_detector.detect_markers(image)
+
+ # Lock camera frame exploitation
+ self._frame_lock.acquire()
+
+ # Fill camera frame background with image
+ self.background = image
+
+ # Clear former layers projection into camera frame
+ for layer_name, layer in self.layers.items():
+
+ layer.aoi_scene = AOI2DScene.AOI2DScene()
+
+ # Store exceptions for each scene
+ exceptions = {}
+
+ # Project each aoi 3d scene into camera frame
+ for scene_name, scene in self.scenes.items():
+
+ ''' TODO: Enable aruco_aoi processing
+ if scene.aruco_aoi:
+
+ try:
+
+ # Build AOI scene directly from detected ArUco marker corners
+ self.camera_frame.aoi_2d_scene |= scene.build_aruco_aoi_scene(self.aruco_detector.detected_markers)
+
+ except SceneProjectionFailed:
+
+ pass
+ '''
+
+ try:
+
+ # Estimate scene markers poses
+ self.aruco_detector.estimate_markers_pose(scene.aruco_markers_group.identifiers)
+
+ # Estimate scene pose from detected scene markers
+ tvec, rmat, _, _ = scene.estimate_pose(self.aruco_detector.detected_markers)
+
+ # Project scene into camera frame according estimated pose
+ for layer_name, layer_projection in scene.project(tvec, rmat):
+
+ try:
+
+ self.camera_frame.layers[layer_name].aoi_scene |= layer_projection
+
+ except KeyError:
+
+ pass
+
+ # Store exceptions and continue
+ except Exception as e:
+
+ exceptions[scene_name] = e
+
+ # Unlock camera frame exploitation
+ self._frame_lock.release()
+
+ # Return dection time and exceptions
+ return detection_time, exceptions
+
+ def image(self, draw_detected_markers: dict = None, **frame_image_parameters):
+ """Get camera frame projections with ArUco detection visualisation.
+
+ Parameters:
+ draw_detected_markers: ArucoMarker.draw parameters (if None, no marker drawn)
+ draw_frame: draw ArFrame image
+ """
+
+ print('ArUcoCamera.image 1')
+
+ # Can't use camera frame when it is locked
+ if self._frame_lock.locked():
+ return
+
+ # Lock camera frame exploitation
+ self._frame_lock.acquire()
+
+ print('ArUcoCamera.image 2')
+
+ # Get camera frame image
+ image = super().image(**frame_image_parameters)
+
+ print('ArUcoCamera.image 3')
+
+ # Draw detected markers if required
+ if draw_detected_markers is not None:
+
+ self.aruco_detector.draw_detected_markers(image, draw_detected_markers)
+
+ # Unlock camera frame exploitation
+ self._frame_lock.release()
+
+ return image
diff --git a/src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py b/src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py
new file mode 100644
index 0000000..bdcf70c
--- /dev/null
+++ b/src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py
@@ -0,0 +1,717 @@
+#!/usr/bin/env python
+
+""" """
+
+__author__ = "Théo de la Hogue"
+__credits__ = []
+__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
+__license__ = "BSD"
+
+from typing import TypeVar, Tuple
+from dataclasses import dataclass, field
+import json
+import math
+import itertools
+import re
+
+from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoMarker, ArUcoOpticCalibrator
+
+import numpy
+import cv2 as cv
+import cv2.aruco as aruco
+
+T0 = numpy.array([0., 0., 0.])
+"""Define no translation vector."""
+
+R0 = numpy.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
+"""Define no rotation matrix."""
+
+ArUcoMarkersGroupType = TypeVar('ArUcoMarkersGroup', bound="ArUcoMarkersGroup")
+# Type definition for type annotation convenience
+
+def make_rotation_matrix(x, y, z):
+
+ # Create rotation matrix around x axis
+ c = numpy.cos(numpy.deg2rad(x))
+ s = numpy.sin(numpy.deg2rad(x))
+ Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]])
+
+ # Create rotation matrix around y axis
+ c = numpy.cos(numpy.deg2rad(y))
+ s = numpy.sin(numpy.deg2rad(y))
+ Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
+
+ # Create rotation matrix around z axis
+ c = numpy.cos(numpy.deg2rad(z))
+ s = numpy.sin(numpy.deg2rad(z))
+ Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
+
+ # Return intrinsic rotation matrix
+ return Rx.dot(Ry.dot(Rz))
+
+def is_rotation_matrix(R):
+
+ Rt = numpy.transpose(R)
+ shouldBeIdentity = numpy.dot(Rt, R)
+ I = numpy.identity(3, dtype = R.dtype)
+ n = numpy.linalg.norm(I - shouldBeIdentity)
+
+ return n < 1e-3
+
+def make_euler_rotation_vector(R):
+
+ assert(is_rotation_matrix(R))
+
+ sy = math.sqrt(R[0,0] * R[0,0] + R[1,0] * R[1,0])
+
+ singular = sy < 1e-6
+
+ if not singular :
+ x = math.atan2(R[2,1] , R[2,2])
+ y = math.atan2(-R[2,0], sy)
+ z = math.atan2(R[1,0], R[0,0])
+ else :
+ x = math.atan2(-R[1,2], R[1,1])
+ y = math.atan2(-R[2,0], sy)
+ z = 0
+
+ return numpy.array([numpy.rad2deg(x), numpy.rad2deg(y), numpy.rad2deg(z)])
+
+@dataclass(frozen=True)
+class Place():
+ """Define a place as a pose and a marker."""
+
+ translation: numpy.array
+ """Position in group referential."""
+
+ rotation: numpy.array
+ """Rotation in group referential."""
+
+ marker: dict
+ """ArUco marker linked to the place."""
+
+@dataclass
+class ArUcoMarkersGroup():
+ """Handle group of ArUco markers as one unique spatial entity and estimate its pose."""
+
+ marker_size: float = field(default=0.)
+ """Expected size of all markers in the group."""
+
+ dictionary: ArUcoMarkersDictionary.ArUcoMarkersDictionary = field(default_factory=ArUcoMarkersDictionary.ArUcoMarkersDictionary)
+ """Expected dictionary of all markers in the group."""
+
+ places: dict = field(default_factory=dict)
+ """Expected markers place"""
+
+ def __post_init__(self):
+ """Init group pose and places pose."""
+
+ # Init pose data
+ self._translation = numpy.zeros(3)
+ self._rotation = numpy.zeros(3)
+
+ # Normalize places data
+ new_places = {}
+
+ for identifier, data in self.places.items():
+
+ # Convert string identifier to int value
+ if type(identifier) == str:
+
+ identifier = int(identifier)
+
+ # Get translation vector
+ tvec = numpy.array(data.pop('translation')).astype(numpy.float32)
+
+ # Check rotation value shape
+ rvalue = numpy.array(data.pop('rotation')).astype(numpy.float32)
+
+ # Rotation matrix
+ if rvalue.shape == (3, 3):
+
+ rmat = rvalue
+
+ # Rotation vector (expected in degree)
+ elif rvalue.shape == (3,):
+
+ rmat = make_rotation_matrix(rvalue[0], rvalue[1], rvalue[2]).astype(numpy.float32)
+
+ else:
+
+ raise ValueError(f'Bad rotation value: {rvalue}')
+
+ assert(is_rotation_matrix(rmat))
+
+ new_marker = ArUcoMarker.ArUcoMarker(self.dictionary, identifier, self.marker_size)
+
+ new_places[identifier] = Place(tvec, rmat, new_marker)
+
+ # else places are configured using detected markers
+ elif isinstance(data, ArUcoMarker.ArUcoMarker):
+
+ new_places[identifier] = Place(data.translation, data.rotation, data)
+
+ # else places are already at expected format
+ elif (type(identifier) == int) and isinstance(data, Place):
+
+ new_places[identifier] = data
+
+ self.places = new_places
+
+ # Init place consistency
+ self.init_places_consistency()
+
+ @classmethod
+ def from_obj(self, obj_filepath: str) -> ArUcoMarkersGroupType:
+ """Load ArUco markers group from .obj file.
+
+ !!! note
+ Expected object (o) name format: <DICTIONARY>#<IDENTIFIER>_Marker
+
+ !!! note
+ All markers have to belong to the same dictionary.
+
+ !!! note
+ Marker normal vectors (vn) expected.
+
+ """
+
+ new_marker_size = 0
+ new_dictionary = None
+ new_places = {}
+
+ # Regex rules for .obj file parsing
+ OBJ_RX_DICT = {
+ 'object': re.compile(r'o (.*)#([0-9]+)_(.*)\n'),
+ 'vertice': re.compile(r'v ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+)\n'),
+ 'normal': re.compile(r'vn ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+)\n'),
+ 'face': re.compile(r'f ([0-9]+)//([0-9]+) ([0-9]+)//([0-9]+) ([0-9]+)//([0-9]+) ([0-9]+)//([0-9]+)\n'),
+ 'comment': re.compile(r'#(.*)\n') # keep comment regex after object regex because the # is used in object string too
+ }
+
+ # Regex .obj line parser
+ def __parse_obj_line(line):
+
+ for key, rx in OBJ_RX_DICT.items():
+ match = rx.search(line)
+ if match:
+ return key, match
+
+ # If there are no matches
+ return None, None
+
+ # Start parsing
+ try:
+
+ identifier = None
+ vertices = []
+ normals = {}
+ faces = {}
+
+ # Open the file and read through it line by line
+ with open(obj_filepath, 'r') as file:
+
+ line = file.readline()
+
+ while line:
+
+ # At each line check for a match with a regex
+ key, match = __parse_obj_line(line)
+
+ # Extract comment
+ if key == 'comment':
+ pass
+
+ # Extract marker dictionary and identifier
+ elif key == 'object':
+
+ dictionary = str(match.group(1))
+ identifier = int(match.group(2))
+ last = str(match.group(3))
+
+ # Init new group dictionary with first dictionary name
+ if new_dictionary == None:
+
+ new_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary(dictionary)
+
+ # Check all others marker dictionary are equal to new group dictionary
+ elif dictionary != new_dictionary.name:
+
+ raise NameError(f'Marker {identifier} dictionary is not {new_dictionary.name}')
+
+ # Fill vertices array
+ elif key == 'vertice':
+
+ vertices.append(tuple([float(match.group(1)), float(match.group(2)), float(match.group(3))]))
+
+ # Extract normal to calculate rotation matrix
+ elif key == 'normal':
+
+ normals[identifier] = tuple([float(match.group(1)), float(match.group(2)), float(match.group(3))])
+
+ # Extract vertice ids
+ elif key == 'face':
+
+ faces[identifier] = [int(match.group(1)), int(match.group(3)), int(match.group(5)), int(match.group(7))]
+
+ # Go to next line
+ line = file.readline()
+
+ file.close()
+
+ # Retreive marker vertices thanks to face vertice ids
+ for identifier, face in faces.items():
+
+ # Gather place corners from counter clockwise ordered face vertices
+ corners = numpy.array([ vertices[i-1] for i in face ])
+
+ # Edit translation (Tp) allowing to move world axis (W) at place axis (P)
+ Tp = corners.mean(axis=0)
+
+ # Edit place axis from corners positions
+ place_x_axis = corners[1:3].mean(axis=0) - Tp
+ place_x_axis_norm = numpy.linalg.norm(place_x_axis)
+ place_x_axis = place_x_axis / place_x_axis_norm
+
+ place_y_axis = corners[2:4].mean(axis=0) - Tp
+ place_y_axis_norm = numpy.linalg.norm(place_y_axis)
+ place_y_axis = place_y_axis / place_y_axis_norm
+
+ place_z_axis = normals[identifier]
+
+ # Edit rotation (Rp) allowing to transform world axis (W) into place axis (P)
+ W = numpy.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])
+ P = numpy.array([place_x_axis, place_y_axis, place_z_axis])
+ Rp = W.dot(P.T)
+
+ # Check axis size: they should be almost equal
+ if math.isclose(place_x_axis_norm, place_y_axis_norm, rel_tol=1e-3):
+
+ current_marker_size = place_x_axis_norm*2
+
+ # Check that all markers size are almost equal
+ if new_marker_size > 0:
+
+ if not math.isclose(current_marker_size, new_marker_size, rel_tol=1e-3):
+
+ raise ValueError('Markers size should be almost equal.')
+
+ new_marker_size = current_marker_size
+
+ # Create a new place related to a new marker
+ new_marker = ArUcoMarker.ArUcoMarker(new_dictionary, identifier, new_marker_size)
+ new_places[identifier] = Place(Tp, Rp, new_marker)
+
+ except IOError:
+ raise IOError(f'File not found: {obj_filepath}')
+
+ return ArUcoMarkersGroup(new_marker_size, new_dictionary, new_places)
+
+ @classmethod
+ def from_json(self, json_filepath: str) -> ArUcoMarkersGroupType:
+ """Load ArUco markers group from .json file."""
+
+ new_marker_size = 0
+ new_dictionary = None
+ new_places = {}
+
+ with open(json_filepath) as configuration_file:
+
+ data = json.load(configuration_file)
+
+ new_marker_size = data.pop('marker_size')
+ new_dictionary = data.pop('dictionary')
+ new_places = data.pop('places')
+
+ return ArUcoMarkersGroup(new_marker_size, new_dictionary, new_places)
+
+ def __str__(self) -> str:
+ """String display"""
+
+ output = f'\n\tDictionary: {self.dictionary}'
+
+ output += f'\n\tMarker size: {self.marker_size} cm'
+
+ output += '\n\n\tPlaces:'
+ for identifier, place in self.places.items():
+ output += f'\n\t\t- {identifier}:'
+ output += f'\n{place.translation}'
+ output += f'\n{place.rotation}'
+
+ output += '\n\n\tAngle cache:'
+ for A_identifier, A_angle_cache in self.__rotation_cache.items():
+ for B_identifier, angle in A_angle_cache.items():
+ output += f'\n\t\t- {A_identifier}/{B_identifier}: [{angle[0]:3f} {angle[1]:3f} {angle[2]:3f}]'
+
+ output += '\n\n\tDistance cache:'
+ for A_identifier, A_distance_cache in self.__translation_cache.items():
+ for B_identifier, distance in A_distance_cache.items():
+ output += f'\n\t\t- {A_identifier}/{B_identifier}: {distance:3f}'
+
+ return output
+
+ @property
+ def identifiers(self) -> list:
+ """List place marker identifiers belonging to the group."""
+
+ return list(self.places.keys())
+
+ def filter_markers(self, detected_markers: dict) -> Tuple[dict, dict]:
+ """Sort markers belonging to the group from given detected markers dict (cf ArUcoDetector.detect_markers()).
+
+ Returns:
+ dict of markers belonging to this group
+ dict of remaining markers not belonging to this group
+ """
+
+ group_markers = {}
+ remaining_markers = {}
+
+ for (marker_id, marker) in detected_markers.items():
+
+ if marker_id in self.places.keys():
+
+ group_markers[marker_id] = marker
+
+ else:
+
+ remaining_markers[marker_id] = marker
+
+ return group_markers, remaining_markers
+
+ def init_places_consistency(self):
+ """Initialize places consistency to speed up further markers consistency checking."""
+
+ # Process expected rotation between places combinations to speed up further calculations
+ self.__rotation_cache = {}
+ for (A_identifier, A_place), (B_identifier, B_place) in itertools.combinations(self.places.items(), 2):
+
+ A = self.places[A_identifier].rotation
+ B = self.places[B_identifier].rotation
+
+ if numpy.array_equal(A, B):
+
+ AB_rvec = [0., 0., 0.]
+ BA_rvec = [0., 0., 0.]
+
+ else:
+
+ # Calculate euler angle representation of AB and BA rotation matrix
+ AB_rvec = make_euler_rotation_vector(B.dot(A.T))
+ BA_rvec = make_euler_rotation_vector(A.dot(B.T))
+
+ try:
+ self.__rotation_cache[A_identifier][B_identifier] = AB_rvec
+ except:
+ self.__rotation_cache[A_identifier] = {B_identifier: AB_rvec}
+
+ try:
+ self.__rotation_cache[B_identifier][A_identifier] = BA_rvec
+ except:
+ self.__rotation_cache[B_identifier] = {A_identifier: BA_rvec}
+
+ # Process translation between each places combinations to speed up further calculations
+ self.__translation_cache = {}
+ for (A_identifier, A_place), (B_identifier, B_place) in itertools.combinations(self.places.items(), 2):
+
+ A = self.places[A_identifier].translation
+ B = self.places[B_identifier].translation
+
+ # Calculate translation between A and B position
+ AB_tvec = numpy.linalg.norm(B - A)
+
+ try:
+ self.__translation_cache[A_identifier][B_identifier] = AB_tvec
+ except:
+ self.__translation_cache[A_identifier] = {B_identifier: AB_tvec}
+
+ try:
+ self.__translation_cache[B_identifier][A_identifier] = AB_tvec
+ except:
+ self.__translation_cache[B_identifier] = {A_identifier: AB_tvec}
+
+ def check_markers_consistency(self, group_markers: dict, angle_tolerance: float, distance_tolerance: float) -> Tuple[dict, dict, dict]:
+ """Evaluate if given markers configuration match related places configuration.
+
+ Returns:
+ dict of consistent markers
+ dict of unconsistent markers
+ dict of identified distance or angle unconsistencies and out-of-bounds values
+ """
+
+ consistent_markers = {}
+ unconsistencies = {'rotation': {}, 'translation': {}}
+
+ for (A_identifier, A_marker), (B_identifier, B_marker) in itertools.combinations(group_markers.items(), 2):
+
+ try:
+
+ # Rotation matrix from A marker to B marker
+ AB = B_marker.rotation.dot(A_marker.rotation.T)
+
+ # Calculate euler angle representation of AB rotation matrix
+ AB_rvec = make_euler_rotation_vector(AB)
+ expected_rvec= self.__rotation_cache[A_identifier][B_identifier]
+
+ # Calculate distance between A marker center and B marker center
+ AB_tvec = numpy.linalg.norm(A_marker.translation - B_marker.translation)
+ expected_tvec = self.__translation_cache[A_identifier][B_identifier]
+
+ # Check angle and distance according given tolerance then normalise marker pose
+ consistent_rotation = numpy.allclose(AB_rvec, expected_rvec, atol=angle_tolerance)
+ consistent_translation = math.isclose(AB_tvec, expected_tvec, abs_tol=distance_tolerance)
+
+ if consistent_rotation and consistent_translation:
+
+ if A_identifier not in consistent_markers.keys():
+
+ # Remember this marker is already validated
+ consistent_markers[A_identifier] = A_marker
+
+ if B_identifier not in consistent_markers.keys():
+
+ # Remember this marker is already validated
+ consistent_markers[B_identifier] = B_marker
+
+ else:
+
+ if not consistent_rotation:
+ unconsistencies['rotation'][f'{A_identifier}/{B_identifier}'] = {'current': AB_rvec, 'expected': expected_rvec}
+
+ if not consistent_translation:
+ unconsistencies['translation'][f'{A_identifier}/{B_identifier}'] = {'current': AB_tvec, 'expected': expected_tvec}
+
+ except KeyError:
+
+ raise ValueError(f'Marker {A_identifier} or {B_identifier} don\'t belong to the group.')
+
+ # Gather unconsistent markers
+ unconsistent_markers = {}
+
+ for identifier, marker in group_markers.items():
+
+ if identifier not in consistent_markers.keys():
+
+ unconsistent_markers[identifier] = marker
+
+ return consistent_markers, unconsistent_markers, unconsistencies
+
+ def estimate_pose_from_single_marker(self, marker: ArUcoMarker.ArUcoMarker) -> Tuple[numpy.array, numpy.array]:
+ """Calculate rotation and translation that move a marker to its place."""
+
+ # Get the place related to the given marker
+ try:
+
+ place = self.places[marker.identifier]
+
+ # Rotation matrix that transform marker to related place
+ self._rotation = marker.rotation.dot(place.rotation.T)
+
+ # Translation vector that transform marker to related place
+ self._translation = marker.translation - place.translation.dot(place.rotation).dot(marker.rotation.T)
+
+ return self._translation, self._rotation
+
+ except KeyError:
+
+ raise ValueError(f'Marker {marker.identifier} doesn\'t belong to the group.')
+
+ def estimate_pose_from_markers(self, markers: dict) -> Tuple[numpy.array, numpy.array]:
+ """Calculate average rotation and translation that move markers to their related places."""
+
+ rotations = []
+ translations = []
+
+ for identifier, marker in markers.items():
+
+ try:
+
+ place = self.places[identifier]
+
+ # Rotation matrix that transform marker to related place
+ R = marker.rotation.dot(place.rotation.T)
+
+ # Translation vector that transform marker to related place
+ T = marker.translation - place.translation.dot(place.rotation).dot(marker.rotation.T)
+
+ rotations.append(R)
+ translations.append(T)
+
+ except KeyError:
+
+ raise ValueError(f'Marker {marker.identifier} doesn\'t belong to the group.')
+
+ # Consider ArUcoMarkersGroup rotation as the mean of all marker rotations
+ # !!! WARNING !!! This is a bad hack : processing rotations average is a very complex problem that needs to well define the distance calculation method before.
+ self._rotation = numpy.mean(numpy.array(rotations), axis=0)
+
+ # Consider ArUcoMarkersGroup translation as the mean of all marker translations
+ self._translation = numpy.mean(numpy.array(translations), axis=0)
+
+ return self._translation, self._rotation
+
+ def estimate_pose_from_axis_markers(self, origin_marker: ArUcoMarker.ArUcoMarker, horizontal_axis_marker: ArUcoMarker.ArUcoMarker, vertical_axis_marker: ArUcoMarker.ArUcoMarker) -> Tuple[numpy.array, numpy.array]:
+ """Calculate rotation and translation from 3 markers defining an orthogonal axis."""
+
+ O_marker = origin_marker
+ A_marker = horizontal_axis_marker
+ B_marker = vertical_axis_marker
+
+ O_place = self.places[O_marker.identifier]
+ A_place = self.places[A_marker.identifier]
+ B_place = self.places[B_marker.identifier]
+
+ # Place axis
+ OA = A_place.translation - O_place.translation
+ OA = OA / numpy.linalg.norm(OA)
+
+ OB = B_place.translation - O_place.translation
+ OB = OB / numpy.linalg.norm(OB)
+
+ # Detect and correct bad place axis orientation
+ X_sign = numpy.sign(OA)[0]
+ Y_sign = numpy.sign(OB)[1]
+
+ P = numpy.array([OA*X_sign, OB*Y_sign, numpy.cross(OA*X_sign, OB*Y_sign)])
+
+ # Marker axis
+ OA = A_marker.translation - O_marker.translation
+ OA = OA / numpy.linalg.norm(OA)
+
+ OB = B_marker.translation - O_marker.translation
+ OB = OB / numpy.linalg.norm(OB)
+
+ # Detect and correct bad place axis orientation
+ X_sign = numpy.sign(OA)[0]
+ Y_sign = -numpy.sign(OB)[1]
+
+ M = numpy.array([OA*X_sign, OB*Y_sign, numpy.cross(OA*X_sign, OB*Y_sign)])
+
+ # Then estimate ArUcoMarkersGroup rotation
+ self._rotation = P.dot(M.T)
+
+ # Consider ArUcoMarkersGroup translation as the translation of the marker at axis origin
+ self._translation = O_marker.translation - O_place.translation.dot(O_place.rotation).dot(M.T)
+
+ return self._translation, self._rotation
+
+ @property
+ def translation(self) -> numpy.array:
+ """Access to group translation vector."""
+
+ return self._translation
+
+ @translation.setter
+ def translation(self, tvec):
+
+ self._translation = tvec
+
+ @property
+ def rotation(self) -> numpy.array:
+ """Access to group rotation matrix."""
+
+ return self._translation
+
+ @rotation.setter
+ def rotation(self, rmat):
+
+ self._rotation = rmat
+
+ def draw_axis(self, image: numpy.array, K, D, consistency=2):
+ """Draw group axis according a consistency score."""
+
+ l = self.marker_size / 2
+ ll = self.marker_size
+
+ # Select color according consistency score
+ n = 95 * consistency if consistency < 2 else 0
+ f = 159 * consistency if consistency < 2 else 255
+
+ try:
+
+ # Draw axis
+ axisPoints = numpy.float32([[ll, 0, 0], [0, ll, 0], [0, 0, ll], [0, 0, 0]]).reshape(-1, 3)
+ axisPoints, _ = cv.projectPoints(axisPoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
+ axisPoints = axisPoints.astype(int)
+
+ cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[0].ravel()), (n,n,f), 6) # X (red)
+ cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[1].ravel()), (n,f,n), 6) # Y (green)
+ cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[2].ravel()), (f,n,n), 6) # Z (blue)
+
+ # Ignore errors due to out of field axis: their coordinate are larger than int32 limitations.
+ except cv.error:
+ pass
+
+ def draw_places(self, image: numpy.array, K, D, consistency=2):
+ """Draw group places and their axis according a consistency score."""
+
+ l = self.marker_size / 2
+ ll = self.marker_size
+
+ # Select color according consistency score
+ n = 95 * consistency if consistency < 2 else 0
+ f = 159 * consistency if consistency < 2 else 255
+
+ for identifier, place in self.places.items():
+
+ try:
+
+ T = self.places[identifier].translation
+ R = self.places[identifier].rotation
+
+ # Draw place axis
+ axisPoints = (T + numpy.float32([R.dot([l/2, 0, 0]), R.dot([0, l/2, 0]), R.dot([0, 0, l/2]), R.dot([0, 0, 0])])).reshape(-1, 3)
+ axisPoints, _ = cv.projectPoints(axisPoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
+ axisPoints = axisPoints.astype(int)
+
+ cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[0].ravel()), (n,n,f), 6) # X (red)
+ cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[1].ravel()), (n,f,n), 6) # Y (green)
+ cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[2].ravel()), (f,n,n), 6) # Z (blue)
+
+ # Draw place
+ placePoints = (T + numpy.float32([R.dot([-l, -l, 0]), R.dot([l, -l, 0]), R.dot([l, l, 0]), R.dot([-l, l, 0])])).reshape(-1, 3)
+ placePoints, _ = cv.projectPoints(placePoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
+ placePoints = placePoints.astype(int)
+
+ cv.line(image, tuple(placePoints[0].ravel()), tuple(placePoints[1].ravel()), (f,f,f), 3)
+ cv.line(image, tuple(placePoints[1].ravel()), tuple(placePoints[2].ravel()), (f,f,f), 3)
+ cv.line(image, tuple(placePoints[2].ravel()), tuple(placePoints[3].ravel()), (f,f,f), 3)
+ cv.line(image, tuple(placePoints[3].ravel()), tuple(placePoints[0].ravel()), (f,f,f), 3)
+
+ # Ignore errors due to out of field places: their coordinate are larger than int32 limitations.
+ except cv.error:
+ pass
+
+ def to_obj(self, obj_filepath):
+ """Save group to .obj file."""
+
+ with open(obj_filepath, 'w', encoding='utf-8') as file:
+
+ file.write('# ArGaze OBJ File\n')
+ file.write('# http://achil.recherche.enac.fr/features/eye/argaze/\n')
+
+ v_count = 0
+
+ for identifier, place in self.places.items():
+
+ file.write(f'o {self.dictionary.name}#{identifier}_Marker\n')
+
+ vertices = ''
+
+ T = place.translation
+ R = place.rotation
+
+ points = (T + numpy.float32([R.dot(place.marker.points[0]), R.dot(place.marker.points[1]), R.dot(place.marker.points[2]), R.dot(place.marker.points[3])])).reshape(-1, 3)
+
+ print(points)
+
+ # Write vertices in reverse order
+ for i in [3, 2, 1, 0]:
+
+ file.write(f'v {" ".join(map(str, points[i]))}\n')
+ v_count += 1
+
+ vertices += f' {v_count}'
+
+ file.write('s off\n')
+ file.write(f'f{vertices}\n')
diff --git a/src/argaze/ArUcoMarkers/ArUcoScene.py b/src/argaze/ArUcoMarkers/ArUcoScene.py
index 77ddb65..227d3c6 100644
--- a/src/argaze/ArUcoMarkers/ArUcoScene.py
+++ b/src/argaze/ArUcoMarkers/ArUcoScene.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-""" """
+"""ArScene based of ArUco markers technology."""
__author__ = "Théo de la Hogue"
__credits__ = []
@@ -10,708 +10,141 @@ __license__ = "BSD"
from typing import TypeVar, Tuple
from dataclasses import dataclass, field
import json
-import math
-import itertools
-import re
+import os
-from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoMarker, ArUcoOpticCalibrator
+from argaze import ArFeatures, DataStructures
+from argaze.ArUcoMarkers import ArUcoMarkersGroup
+from argaze.AreaOfInterest import AOI2DScene
+import cv2
import numpy
-import cv2 as cv
-import cv2.aruco as aruco
-
-T0 = numpy.array([0., 0., 0.])
-"""Define no translation vector."""
-
-R0 = numpy.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
-"""Define no rotation matrix."""
ArUcoSceneType = TypeVar('ArUcoScene', bound="ArUcoScene")
# Type definition for type annotation convenience
-def make_rotation_matrix(x, y, z):
-
- # Create rotation matrix around x axis
- c = numpy.cos(numpy.deg2rad(x))
- s = numpy.sin(numpy.deg2rad(x))
- Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]])
-
- # Create rotation matrix around y axis
- c = numpy.cos(numpy.deg2rad(y))
- s = numpy.sin(numpy.deg2rad(y))
- Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
-
- # Create rotation matrix around z axis
- c = numpy.cos(numpy.deg2rad(z))
- s = numpy.sin(numpy.deg2rad(z))
- Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
-
- # Return intrinsic rotation matrix
- return Rx.dot(Ry.dot(Rz))
-
-def is_rotation_matrix(R):
-
- Rt = numpy.transpose(R)
- shouldBeIdentity = numpy.dot(Rt, R)
- I = numpy.identity(3, dtype = R.dtype)
- n = numpy.linalg.norm(I - shouldBeIdentity)
-
- return n < 1e-3
-
-def make_euler_rotation_vector(R):
-
- assert(is_rotation_matrix(R))
-
- sy = math.sqrt(R[0,0] * R[0,0] + R[1,0] * R[1,0])
-
- singular = sy < 1e-6
-
- if not singular :
- x = math.atan2(R[2,1] , R[2,2])
- y = math.atan2(-R[2,0], sy)
- z = math.atan2(R[1,0], R[0,0])
- else :
- x = math.atan2(-R[1,2], R[1,1])
- y = math.atan2(-R[2,0], sy)
- z = 0
-
- return numpy.array([numpy.rad2deg(x), numpy.rad2deg(y), numpy.rad2deg(z)])
-
-@dataclass(frozen=True)
-class Place():
- """Define a place as a pose and a marker."""
-
- translation: numpy.array
- """Position in scene referential."""
-
- rotation: numpy.array
- """Rotation in scene referential."""
-
- marker: dict
- """ArUco marker linked to the place."""
-
@dataclass
-class ArUcoScene():
- """Handle group of ArUco markers as one unique spatial entity and estimate its pose."""
-
- marker_size: float = field(default=0.)
- """Expected size of all markers in the scene."""
-
- dictionary: ArUcoMarkersDictionary.ArUcoMarkersDictionary = field(default_factory=ArUcoMarkersDictionary.ArUcoMarkersDictionary)
- """Expected dictionary of all markers in the scene."""
-
- places: dict = field(default_factory=dict)
- """Expected markers place"""
-
- def __post_init__(self):
- """Init scene pose and places pose."""
-
- # Init pose data
- self._translation = numpy.zeros(3)
- self._rotation = numpy.zeros(3)
-
- # Normalize places data
- new_places = {}
-
- for identifier, data in self.places.items():
-
- # Convert string identifier to int value
- if type(identifier) == str:
-
- identifier = int(identifier)
-
- # Get translation vector
- tvec = numpy.array(data.pop('translation')).astype(numpy.float32)
-
- # Check rotation value shape
- rvalue = numpy.array(data.pop('rotation')).astype(numpy.float32)
-
- # Rotation matrix
- if rvalue.shape == (3, 3):
-
- rmat = rvalue
-
- # Rotation vector (expected in degree)
- elif rvalue.shape == (3,):
-
- rmat = make_rotation_matrix(rvalue[0], rvalue[1], rvalue[2]).astype(numpy.float32)
-
- else:
-
- raise ValueError(f'Bad rotation value: {rvalue}')
-
- assert(is_rotation_matrix(rmat))
-
- new_marker = ArUcoMarker.ArUcoMarker(self.dictionary, identifier, self.marker_size)
-
- new_places[identifier] = Place(tvec, rmat, new_marker)
-
- # else places are configured using detected markers
- elif isinstance(data, ArUcoMarker.ArUcoMarker):
-
- new_places[identifier] = Place(data.translation, data.rotation, data)
-
- # else places are already at expected format
- elif (type(identifier) == int) and isinstance(data, Place):
-
- new_places[identifier] = data
-
- self.places = new_places
-
- # Init place consistency
- self.init_places_consistency()
-
- @classmethod
- def from_obj(self, obj_filepath: str) -> ArUcoSceneType:
- """Load ArUco scene from .obj file.
-
- !!! note
- Expected object (o) name format: <DICTIONARY>#<IDENTIFIER>_Marker
-
- !!! note
- All markers have to belong to the same dictionary.
-
- !!! note
- Marker normal vectors (vn) expected.
-
- """
-
- new_marker_size = 0
- new_dictionary = None
- new_places = {}
-
- # Regex rules for .obj file parsing
- OBJ_RX_DICT = {
- 'object': re.compile(r'o (.*)#([0-9]+)_(.*)\n'),
- 'vertice': re.compile(r'v ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+)\n'),
- 'normal': re.compile(r'vn ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+)\n'),
- 'face': re.compile(r'f ([0-9]+)//([0-9]+) ([0-9]+)//([0-9]+) ([0-9]+)//([0-9]+) ([0-9]+)//([0-9]+)\n'),
- 'comment': re.compile(r'#(.*)\n') # keep comment regex after object regex because the # is used in object string too
- }
-
- # Regex .obj line parser
- def __parse_obj_line(line):
-
- for key, rx in OBJ_RX_DICT.items():
- match = rx.search(line)
- if match:
- return key, match
-
- # If there are no matches
- return None, None
-
- # Start parsing
- try:
-
- identifier = None
- vertices = []
- normals = {}
- faces = {}
-
- # Open the file and read through it line by line
- with open(obj_filepath, 'r') as file:
-
- line = file.readline()
-
- while line:
-
- # At each line check for a match with a regex
- key, match = __parse_obj_line(line)
-
- # Extract comment
- if key == 'comment':
- pass
-
- # Extract marker dictionary and identifier
- elif key == 'object':
-
- dictionary = str(match.group(1))
- identifier = int(match.group(2))
- last = str(match.group(3))
-
- # Init new scene dictionary with first dictionary name
- if new_dictionary == None:
-
- new_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary(dictionary)
-
- # Check all others marker dictionary are equal to new scene dictionary
- elif dictionary != new_dictionary.name:
-
- raise NameError(f'Marker {identifier} dictionary is not {new_dictionary.name}')
-
- # Fill vertices array
- elif key == 'vertice':
-
- vertices.append(tuple([float(match.group(1)), float(match.group(2)), float(match.group(3))]))
-
- # Extract normal to calculate rotation matrix
- elif key == 'normal':
-
- normals[identifier] = tuple([float(match.group(1)), float(match.group(2)), float(match.group(3))])
-
- # Extract vertice ids
- elif key == 'face':
-
- faces[identifier] = [int(match.group(1)), int(match.group(3)), int(match.group(5)), int(match.group(7))]
-
- # Go to next line
- line = file.readline()
-
- file.close()
-
- # Retreive marker vertices thanks to face vertice ids
- for identifier, face in faces.items():
-
- # Gather place corners from counter clockwise ordered face vertices
- corners = numpy.array([ vertices[i-1] for i in face ])
-
- # Edit translation (Tp) allowing to move world axis (W) at place axis (P)
- Tp = corners.mean(axis=0)
-
- # Edit place axis from corners positions
- place_x_axis = corners[1:3].mean(axis=0) - Tp
- place_x_axis_norm = numpy.linalg.norm(place_x_axis)
- place_x_axis = place_x_axis / place_x_axis_norm
-
- place_y_axis = corners[2:4].mean(axis=0) - Tp
- place_y_axis_norm = numpy.linalg.norm(place_y_axis)
- place_y_axis = place_y_axis / place_y_axis_norm
-
- place_z_axis = normals[identifier]
-
- # Edit rotation (Rp) allowing to transform world axis (W) into place axis (P)
- W = numpy.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])
- P = numpy.array([place_x_axis, place_y_axis, place_z_axis])
- Rp = W.dot(P.T)
-
- # Check axis size: they should be almost equal
- if math.isclose(place_x_axis_norm, place_y_axis_norm, rel_tol=1e-3):
-
- current_marker_size = place_x_axis_norm*2
-
- # Check that all markers size are almost equal
- if new_marker_size > 0:
-
- if not math.isclose(current_marker_size, new_marker_size, rel_tol=1e-3):
-
- raise ValueError('Markers size should be almost equal.')
-
- new_marker_size = current_marker_size
-
- # Create a new place related to a new marker
- new_marker = ArUcoMarker.ArUcoMarker(new_dictionary, identifier, new_marker_size)
- new_places[identifier] = Place(Tp, Rp, new_marker)
-
- except IOError:
- raise IOError(f'File not found: {obj_filepath}')
-
- return ArUcoScene(new_marker_size, new_dictionary, new_places)
-
- @classmethod
- def from_json(self, json_filepath: str) -> ArUcoSceneType:
- """Load ArUco scene from .json file."""
-
- new_marker_size = 0
- new_dictionary = None
- new_places = {}
-
- with open(json_filepath) as configuration_file:
-
- data = json.load(configuration_file)
-
- new_marker_size = data.pop('marker_size')
- new_dictionary = data.pop('dictionary')
- new_places = data.pop('places')
-
- return ArUcoScene(new_marker_size, new_dictionary, new_places)
-
- def __str__(self) -> str:
- """String display"""
-
- output = f'\n\tDictionary: {self.dictionary}'
-
- output += f'\n\tMarker size: {self.marker_size} cm'
-
- output += '\n\n\tPlaces:'
- for identifier, place in self.places.items():
- output += f'\n\t\t- {identifier}:'
- output += f'\n{place.translation}'
- output += f'\n{place.rotation}'
-
- output += '\n\n\tAngle cache:'
- for A_identifier, A_angle_cache in self.__rotation_cache.items():
- for B_identifier, angle in A_angle_cache.items():
- output += f'\n\t\t- {A_identifier}/{B_identifier}: [{angle[0]:3f} {angle[1]:3f} {angle[2]:3f}]'
-
- output += '\n\n\tDistance cache:'
- for A_identifier, A_distance_cache in self.__translation_cache.items():
- for B_identifier, distance in A_distance_cache.items():
- output += f'\n\t\t- {A_identifier}/{B_identifier}: {distance:3f}'
-
- return output
-
- @property
- def identifiers(self) -> list:
- """List place marker identifiers belonging to the scene."""
-
- return list(self.places.keys())
-
- def filter_markers(self, detected_markers: dict) -> Tuple[dict, dict]:
- """Sort markers belonging to the scene from given detected markers dict (cf ArUcoDetector.detect_markers()).
-
- Returns:
- dict of markers belonging to this scene
- dict of remaining markers not belonging to this scene
- """
-
- scene_markers = {}
- remaining_markers = {}
-
- for (marker_id, marker) in detected_markers.items():
-
- if marker_id in self.places.keys():
-
- scene_markers[marker_id] = marker
-
- else:
-
- remaining_markers[marker_id] = marker
-
- return scene_markers, remaining_markers
-
- def init_places_consistency(self):
- """Initialize places consistency to speed up further markers consistency checking."""
-
- # Process expected rotation between places combinations to speed up further calculations
- self.__rotation_cache = {}
- for (A_identifier, A_place), (B_identifier, B_place) in itertools.combinations(self.places.items(), 2):
-
- A = self.places[A_identifier].rotation
- B = self.places[B_identifier].rotation
-
- if numpy.array_equal(A, B):
-
- AB_rvec = [0., 0., 0.]
- BA_rvec = [0., 0., 0.]
-
- else:
-
- # Calculate euler angle representation of AB and BA rotation matrix
- AB_rvec = make_euler_rotation_vector(B.dot(A.T))
- BA_rvec = make_euler_rotation_vector(A.dot(B.T))
-
- try:
- self.__rotation_cache[A_identifier][B_identifier] = AB_rvec
- except:
- self.__rotation_cache[A_identifier] = {B_identifier: AB_rvec}
-
- try:
- self.__rotation_cache[B_identifier][A_identifier] = BA_rvec
- except:
- self.__rotation_cache[B_identifier] = {A_identifier: BA_rvec}
-
- # Process translation between each places combinations to speed up further calculations
- self.__translation_cache = {}
- for (A_identifier, A_place), (B_identifier, B_place) in itertools.combinations(self.places.items(), 2):
-
- A = self.places[A_identifier].translation
- B = self.places[B_identifier].translation
-
- # Calculate translation between A and B position
- AB_tvec = numpy.linalg.norm(B - A)
-
- try:
- self.__translation_cache[A_identifier][B_identifier] = AB_tvec
- except:
- self.__translation_cache[A_identifier] = {B_identifier: AB_tvec}
-
- try:
- self.__translation_cache[B_identifier][A_identifier] = AB_tvec
- except:
- self.__translation_cache[B_identifier] = {A_identifier: AB_tvec}
-
- def check_markers_consistency(self, scene_markers: dict, angle_tolerance: float, distance_tolerance: float) -> Tuple[dict, dict, dict]:
- """Evaluate if given markers configuration match related places configuration.
-
- Returns:
- dict of consistent markers
- dict of unconsistent markers
- dict of identified distance or angle unconsistencies and out-of-bounds values
- """
-
- consistent_markers = {}
- unconsistencies = {'rotation': {}, 'translation': {}}
-
- for (A_identifier, A_marker), (B_identifier, B_marker) in itertools.combinations(scene_markers.items(), 2):
-
- try:
-
- # Rotation matrix from A marker to B marker
- AB = B_marker.rotation.dot(A_marker.rotation.T)
-
- # Calculate euler angle representation of AB rotation matrix
- AB_rvec = make_euler_rotation_vector(AB)
- expected_rvec= self.__rotation_cache[A_identifier][B_identifier]
-
- # Calculate distance between A marker center and B marker center
- AB_tvec = numpy.linalg.norm(A_marker.translation - B_marker.translation)
- expected_tvec = self.__translation_cache[A_identifier][B_identifier]
-
- # Check angle and distance according given tolerance then normalise marker pose
- consistent_rotation = numpy.allclose(AB_rvec, expected_rvec, atol=angle_tolerance)
- consistent_translation = math.isclose(AB_tvec, expected_tvec, abs_tol=distance_tolerance)
-
- if consistent_rotation and consistent_translation:
-
- if A_identifier not in consistent_markers.keys():
-
- # Remember this marker is already validated
- consistent_markers[A_identifier] = A_marker
-
- if B_identifier not in consistent_markers.keys():
-
- # Remember this marker is already validated
- consistent_markers[B_identifier] = B_marker
-
- else:
-
- if not consistent_rotation:
- unconsistencies['rotation'][f'{A_identifier}/{B_identifier}'] = {'current': AB_rvec, 'expected': expected_rvec}
-
- if not consistent_translation:
- unconsistencies['translation'][f'{A_identifier}/{B_identifier}'] = {'current': AB_tvec, 'expected': expected_tvec}
-
- except KeyError:
-
- raise ValueError(f'Marker {A_identifier} or {B_identifier} don\'t belong to the scene.')
-
- # Gather unconsistent markers
- unconsistent_markers = {}
-
- for identifier, marker in scene_markers.items():
-
- if identifier not in consistent_markers.keys():
-
- unconsistent_markers[identifier] = marker
-
- return consistent_markers, unconsistent_markers, unconsistencies
-
- def estimate_pose_from_single_marker(self, marker: ArUcoMarker.ArUcoMarker) -> Tuple[numpy.array, numpy.array]:
- """Calculate rotation and translation that move a marker to its place."""
-
- # Get the place related to the given marker
- try:
-
- place = self.places[marker.identifier]
-
- # Rotation matrix that transform marker to related place
- self._rotation = marker.rotation.dot(place.rotation.T)
-
- # Translation vector that transform marker to related place
- self._translation = marker.translation - place.translation.dot(place.rotation).dot(marker.rotation.T)
-
- return self._translation, self._rotation
-
- except KeyError:
-
- raise ValueError(f'Marker {marker.identifier} doesn\'t belong to the scene.')
-
- def estimate_pose_from_markers(self, markers: dict) -> Tuple[numpy.array, numpy.array]:
- """Calculate average rotation and translation that move markers to their related places."""
-
- rotations = []
- translations = []
-
- for identifier, marker in markers.items():
-
- try:
-
- place = self.places[identifier]
-
- # Rotation matrix that transform marker to related place
- R = marker.rotation.dot(place.rotation.T)
-
- # Translation vector that transform marker to related place
- T = marker.translation - place.translation.dot(place.rotation).dot(marker.rotation.T)
-
- rotations.append(R)
- translations.append(T)
-
- except KeyError:
-
- raise ValueError(f'Marker {marker.identifier} doesn\'t belong to the scene.')
-
- # Consider ArUcoScene rotation as the mean of all marker rotations
- # !!! WARNING !!! This is a bad hack : processing rotations average is a very complex problem that needs to well define the distance calculation method before.
- self._rotation = numpy.mean(numpy.array(rotations), axis=0)
-
- # Consider ArUcoScene translation as the mean of all marker translations
- self._translation = numpy.mean(numpy.array(translations), axis=0)
-
- return self._translation, self._rotation
-
- def estimate_pose_from_axis_markers(self, origin_marker: ArUcoMarker.ArUcoMarker, horizontal_axis_marker: ArUcoMarker.ArUcoMarker, vertical_axis_marker: ArUcoMarker.ArUcoMarker) -> Tuple[numpy.array, numpy.array]:
- """Calculate rotation and translation from 3 markers defining an orthogonal axis."""
-
- O_marker = origin_marker
- A_marker = horizontal_axis_marker
- B_marker = vertical_axis_marker
-
- O_place = self.places[O_marker.identifier]
- A_place = self.places[A_marker.identifier]
- B_place = self.places[B_marker.identifier]
-
- # Place axis
- OA = A_place.translation - O_place.translation
- OA = OA / numpy.linalg.norm(OA)
-
- OB = B_place.translation - O_place.translation
- OB = OB / numpy.linalg.norm(OB)
-
- # Detect and correct bad place axis orientation
- X_sign = numpy.sign(OA)[0]
- Y_sign = numpy.sign(OB)[1]
-
- P = numpy.array([OA*X_sign, OB*Y_sign, numpy.cross(OA*X_sign, OB*Y_sign)])
-
- # Marker axis
- OA = A_marker.translation - O_marker.translation
- OA = OA / numpy.linalg.norm(OA)
-
- OB = B_marker.translation - O_marker.translation
- OB = OB / numpy.linalg.norm(OB)
-
- # Detect and correct bad place axis orientation
- X_sign = numpy.sign(OA)[0]
- Y_sign = -numpy.sign(OB)[1]
-
- M = numpy.array([OA*X_sign, OB*Y_sign, numpy.cross(OA*X_sign, OB*Y_sign)])
-
- # Then estimate ArUcoScene rotation
- self._rotation = P.dot(M.T)
-
- # Consider ArUcoScene translation as the translation of the marker at axis origin
- self._translation = O_marker.translation - O_place.translation.dot(O_place.rotation).dot(M.T)
-
- return self._translation, self._rotation
-
- @property
- def translation(self) -> numpy.array:
- """Access to scene translation vector."""
-
- return self._translation
+class ArUcoScene(ArFeatures.ArScene):
+ """
+ Define an ArScene based on an ArUcoMarkersGroup description.
- @translation.setter
- def translation(self, tvec):
+ Parameters:
+
+ aruco_markers_group: ArUco markers 3D scene description used to estimate scene pose from detected markers: see [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function below.
+
+ """
+ aruco_markers_group: ArUcoMarkersGroup.ArUcoMarkersGroup = field(default_factory=ArUcoMarkersGroup.ArUcoMarkersGroup)
- self._translation = tvec
+ def __post_init__(self):
- @property
- def rotation(self) -> numpy.array:
- """Access to scene rotation matrix."""
+ super().__post_init__()
- return self._translation
+ def __str__(self) -> str:
+ """
+ Returns:
+ String representation
+ """
- @rotation.setter
- def rotation(self, rmat):
+ output = output = super().__str__()
+ output += f'ArUcoMarkersGroup:\n{self.aruco_markers_group}\n'
- self._rotation = rmat
+ return output
- def draw_axis(self, image: numpy.array, K, D, consistency=2):
- """Draw scene axis according a consistency score."""
+ @classmethod
+ def from_dict(self, aruco_scene_data: dict, working_directory: str = None) -> ArUcoSceneType:
+ """
+ Load ArUcoScene from dictionary.
- l = self.marker_size / 2
- ll = self.marker_size
+ Parameters:
+ aruco_scene_data: dictionary
+ working_directory: folder path where to load files when a dictionary value is a relative filepath.
+ """
- # Select color according consistency score
- n = 95 * consistency if consistency < 2 else 0
- f = 159 * consistency if consistency < 2 else 255
+ # Load aruco markers group
+ try:
- try:
+ # Check aruco_markers_group value type
+ aruco_markers_group_value = aruco_scene_data.pop('aruco_markers_group')
- # Draw axis
- axisPoints = numpy.float32([[ll, 0, 0], [0, ll, 0], [0, 0, ll], [0, 0, 0]]).reshape(-1, 3)
- axisPoints, _ = cv.projectPoints(axisPoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
- axisPoints = axisPoints.astype(int)
+ # str: relative path to .obj file
+ if type(aruco_markers_group_value) == str:
- cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[0].ravel()), (n,n,f), 6) # X (red)
- cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[1].ravel()), (n,f,n), 6) # Y (green)
- cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[2].ravel()), (f,n,n), 6) # Z (blue)
+ aruco_markers_group_value = os.path.join(working_directory, aruco_markers_group_value)
+ new_aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup.from_obj(aruco_markers_group_value)
- # Ignore errors due to out of field axis: their coordinate are larger than int32 limitations.
- except cv.error:
- pass
+ # dict:
+ else:
- def draw_places(self, image: numpy.array, K, D, consistency=2):
- """Draw scene places and their axis according a consistency score."""
+ new_aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup(**aruco_markers_group_value)
- l = self.marker_size / 2
- ll = self.marker_size
+ except KeyError:
- # Select color according consistency score
- n = 95 * consistency if consistency < 2 else 0
- f = 159 * consistency if consistency < 2 else 255
+ new_aruco_markers_group = None
- for identifier, place in self.places.items():
+ # Get values of temporary ar scene created from aruco_scene_data
+ temp_ar_scene_values = DataStructures.as_dict(ArFeatures.ArScene.from_dict(aruco_scene_data, working_directory))
- try:
+ # Create new aruco scene using temporary ar scene values
+ return ArUcoScene(aruco_markers_group=new_aruco_markers_group, **temp_ar_scene_values)
+
+ def estimate_pose(self, detected_markers) -> Tuple[numpy.array, numpy.array, str, dict]:
+ """Estimate scene pose from detected ArUco markers.
- T = self.places[identifier].translation
- R = self.places[identifier].rotation
+ Returns:
+ scene translation vector
+ scene rotation matrix
+ pose estimation strategy
+ dict of markers used to estimate the pose
+ """
- # Draw place axis
- axisPoints = (T + numpy.float32([R.dot([l/2, 0, 0]), R.dot([0, l/2, 0]), R.dot([0, 0, l/2]), R.dot([0, 0, 0])])).reshape(-1, 3)
- axisPoints, _ = cv.projectPoints(axisPoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
- axisPoints = axisPoints.astype(int)
+ # Pose estimation fails when no marker is detected
+ if len(detected_markers) == 0:
- cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[0].ravel()), (n,n,f), 6) # X (red)
- cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[1].ravel()), (n,f,n), 6) # Y (green)
- cv.line(image, tuple(axisPoints[3].ravel()), tuple(axisPoints[2].ravel()), (f,n,n), 6) # Z (blue)
-
- # Draw place
- placePoints = (T + numpy.float32([R.dot([-l, -l, 0]), R.dot([l, -l, 0]), R.dot([l, l, 0]), R.dot([-l, l, 0])])).reshape(-1, 3)
- placePoints, _ = cv.projectPoints(placePoints, self._rotation, self._translation, numpy.array(K), numpy.array(D))
- placePoints = placePoints.astype(int)
-
- cv.line(image, tuple(placePoints[0].ravel()), tuple(placePoints[1].ravel()), (f,f,f), 3)
- cv.line(image, tuple(placePoints[1].ravel()), tuple(placePoints[2].ravel()), (f,f,f), 3)
- cv.line(image, tuple(placePoints[2].ravel()), tuple(placePoints[3].ravel()), (f,f,f), 3)
- cv.line(image, tuple(placePoints[3].ravel()), tuple(placePoints[0].ravel()), (f,f,f), 3)
+ raise PoseEstimationFailed('No marker detected')
- # Ignore errors due to out of field places: their coordinate are larger than int32 limitations.
- except cv.error:
- pass
+ scene_markers, _ = self.aruco_markers_group.filter_markers(detected_markers)
- def to_obj(self, obj_filepath):
- """Save ArUco scene to .obj file."""
+ # Pose estimation fails when no marker belongs to the scene
+ if len(scene_markers) == 0:
- with open(obj_filepath, 'w', encoding='utf-8') as file:
+ raise PoseEstimationFailed('No marker belongs to the scene')
- file.write('# ArGaze OBJ File\n')
- file.write('# http://achil.recherche.enac.fr/features/eye/argaze/\n')
+ # Estimate scene pose from unique marker transformations
+ elif len(scene_markers) == 1:
- v_count = 0
+ marker_id, marker = scene_markers.popitem()
+ tvec, rmat = self.aruco_markers_group.estimate_pose_from_single_marker(marker)
+
+ return tvec, rmat, 'estimate_pose_from_single_marker', {marker_id: marker}
- for identifier, place in self.places.items():
+ # Otherwise, check markers consistency
+ consistent_markers, unconsistent_markers, unconsistencies = self.aruco_markers_group.check_markers_consistency(scene_markers, self.angle_tolerance, self.distance_tolerance)
- file.write(f'o {self.dictionary.name}#{identifier}_Marker\n')
+ # Pose estimation fails when no marker passes consistency checking
+ if len(consistent_markers) == 0:
- vertices = ''
+ raise PoseEstimationFailed('Unconsistent marker poses', unconsistencies)
- T = place.translation
- R = place.rotation
+ # Otherwise, estimate scene pose from all consistent markers pose
+ tvec, rmat = self.aruco_markers_group.estimate_pose_from_markers(consistent_markers)
- points = (T + numpy.float32([R.dot(place.marker.points[0]), R.dot(place.marker.points[1]), R.dot(place.marker.points[2]), R.dot(place.marker.points[3])])).reshape(-1, 3)
+ return tvec, rmat, 'estimate_pose_from_markers', consistent_markers
- print(points)
+ def draw_axis(self, image: numpy.array):
+ """
+ Draw scene axis into image.
+
+ Parameters:
+ image: where to draw
+ """
- # Write vertices in reverse order
- for i in [3, 2, 1, 0]:
+ self.aruco_markers_group.draw_axis(image, self.parent.aruco_detector.optic_parameters.K, self.parent.aruco_detector.optic_parameters.D)
- file.write(f'v {" ".join(map(str, points[i]))}\n')
- v_count += 1
+ def draw_places(self, image: numpy.array):
+ """
+ Draw scene places into image.
- vertices += f' {v_count}'
+ Parameters:
+ image: where to draw
+ """
- file.write('s off\n')
- file.write(f'f{vertices}\n')
+ self.aruco_markers_group.draw_places(image, self.parent.aruco_detector.optic_parameters.K, self.parent.aruco_detector.optic_parameters.D)
diff --git a/src/argaze/ArUcoMarkers/__init__.py b/src/argaze/ArUcoMarkers/__init__.py
index 350c69e..0ca48cc 100644
--- a/src/argaze/ArUcoMarkers/__init__.py
+++ b/src/argaze/ArUcoMarkers/__init__.py
@@ -1,4 +1,4 @@
"""
Handle [OpenCV ArUco markers](https://docs.opencv.org/4.x/d5/dae/tutorial_aruco_detection.html): generate and detect markers, calibrate camera, describe scene, ...
"""
-__all__ = ['ArUcoMarkersDictionary', 'ArUcoMarker', 'ArUcoBoard', 'ArUcoOpticCalibrator', 'ArUcoDetector', 'ArUcoScene', 'utils'] \ No newline at end of file
+__all__ = ['ArUcoMarkersDictionary', 'ArUcoMarker', 'ArUcoBoard', 'ArUcoOpticCalibrator', 'ArUcoDetector', 'ArUcoMarkersGroup', 'ArUcoCamera', 'ArUcoScene', 'utils'] \ No newline at end of file
diff --git a/src/argaze/utils/aruco_markers_scene_export.py b/src/argaze/utils/aruco_markers_scene_export.py
index 4518e48..c1a0991 100644
--- a/src/argaze/utils/aruco_markers_scene_export.py
+++ b/src/argaze/utils/aruco_markers_scene_export.py
@@ -11,7 +11,7 @@ import argparse
import time
import itertools
-from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoOpticCalibrator, ArUcoDetector, ArUcoScene
+from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoOpticCalibrator, ArUcoDetector, ArUcoMarkersGroup
from argaze.utils import MiscFeatures
import cv2
@@ -54,7 +54,7 @@ def main():
aruco_detector = ArUcoDetector.ArUcoDetector(dictionary=aruco_dictionary, marker_size=args.marker_size, optic_parameters=optic_parameters, parameters=detector_parameters)
# Create empty ArUco scene
- aruco_scene = None
+ aruco_markers_group = None
# Create a window to display AR environment
window_name = "Export ArUco scene"
@@ -96,7 +96,7 @@ def main():
aruco_detector.estimate_markers_pose()
# Build aruco scene from detected markers
- aruco_scene = ArUcoScene.ArUcoScene(args.marker_size, aruco_dictionary, aruco_detector.detected_markers)
+ aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup(args.marker_size, aruco_dictionary, aruco_detector.detected_markers)
# Write scene detected markers
cv2.putText(video_image, f'{list(aruco_detector.detected_markers.keys())}', (20, image_height-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
@@ -149,9 +149,9 @@ def main():
# Save selected marker edition using 'Ctrl + s'
if key_pressed == 19:
- if aruco_scene:
+ if aruco_markers_group:
- aruco_scene.to_obj(f'{args.output}/{int(current_image_time)}-aruco_scene.obj')
+ aruco_markers_group.to_obj(f'{args.output}/{int(current_image_time)}-aruco_markers_group.obj')
print(f'ArUco scene saved into {args.output}')
else:
diff --git a/src/argaze/utils/demo_augmented_reality_run.py b/src/argaze/utils/demo_augmented_reality_run.py
index 25d4083..14ddd36 100644
--- a/src/argaze/utils/demo_augmented_reality_run.py
+++ b/src/argaze/utils/demo_augmented_reality_run.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-""" """
+"""Augmented Reality pipeline demo script."""
__author__ = "Théo de la Hogue"
__credits__ = []
@@ -13,28 +13,29 @@ import os
import time
from argaze import ArFeatures, GazeFeatures
+from argaze.ArUcoMarkers import ArUcoCamera
import cv2
import numpy
def main():
"""
- Load AR environment from .json file, detect ArUco markers into camera device images and project it.
+ Load ArUcoCamera from .json file, detect ArUco markers into camera device images and project it.
"""
current_directory = os.path.dirname(os.path.abspath(__file__))
# Manage arguments
parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
- parser.add_argument('environment', metavar='ENVIRONMENT', type=str, help='ar environment filepath')
+ parser.add_argument('aruco_camera', metavar='ARUCO_CAMERA', type=str, help='ArUco camera filepath')
parser.add_argument('-s', '--source', metavar='SOURCE', type=str, default='0', help='video capture source (a number to select camera device or a filepath to load a movie)')
args = parser.parse_args()
- # Load AR enviroment
- ar_environment = ArFeatures.ArEnvironment.from_json(args.environment)
+ # Load ArUcoCamera
+ aruco_camera = ArUcoCamera.ArUcoCamera.from_json(args.aruco_camera)
- # Create a window to display AR environment
- cv2.namedWindow(ar_environment.name, cv2.WINDOW_AUTOSIZE)
+ # Create a window to display ArUcoCamera
+ cv2.namedWindow(aruco_camera.name, cv2.WINDOW_AUTOSIZE)
# Init timestamp
start_time = time.time()
@@ -45,17 +46,17 @@ def main():
# Edit millisecond timestamp
timestamp = int((time.time() - start_time) * 1e3)
- # Project gaze position into environment
- for frame, look_data in ar_environment.look(timestamp, GazeFeatures.GazePosition((x, y))):
+ # Project gaze position into camera
+ for frame, look_data in aruco_camera.look(timestamp, GazeFeatures.GazePosition((x, y))):
# Unpack look data
- movement, scan_step_analysis, layer_analysis, execution_times, exception = look_data
+ gaze_movement, scan_step_analysis, layer_analysis, execution_times, exception = look_data
# Do something with look data
# ...
# Attach mouse callback to window
- cv2.setMouseCallback(ar_environment.name, on_mouse_event)
+ cv2.setMouseCallback(aruco_camera.name, on_mouse_event)
# Enable camera video capture into separate thread
video_capture = cv2.VideoCapture(int(args.source) if args.source.isdecimal() else args.source)
@@ -71,28 +72,28 @@ def main():
if success:
- # Detect and project environment
- detection_time, exceptions = ar_environment.detect_and_project(video_image)
+ # Detect and project AR features
+ detection_time, exceptions = aruco_camera.detect_and_project(video_image)
- # Get environment image
- environment_image = ar_environment.image()
+ # Get ArUcoCamera image
+ aruco_camera_image = aruco_camera.image()
# Write detection fps
- cv2.rectangle(environment_image, (0, 0), (420, 50), (63, 63, 63), -1)
- cv2.putText(environment_image, f'Detection fps: {1e3/detection_time:.1f}', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+ cv2.rectangle(aruco_camera_image, (0, 0), (420, 50), (63, 63, 63), -1)
+ cv2.putText(aruco_camera_image, f'Detection fps: {1e3/detection_time:.1f}', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
# Handle exceptions
for i, (scene_name, e) in enumerate(exceptions.items()):
# Write errors
- cv2.rectangle(environment_image, (0, (i+1)*50), (720, (i+2)*50), (127, 127, 127), -1)
- cv2.putText(environment_image, f'{scene_name} error: {e}', (20, (i+1)*90), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ cv2.rectangle(aruco_camera_image, (0, (i+1)*50), (720, (i+2)*50), (127, 127, 127), -1)
+ cv2.putText(aruco_camera_image, f'{scene_name} error: {e}', (20, (i+1)*90), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- # Display environment
- cv2.imshow(ar_environment.name, environment_image)
+ # Display ArUcoCamera image
+ cv2.imshow(aruco_camera.name, aruco_camera_image)
# Draw and display each frames
- for frame in ar_environment.frames:
+ for frame in aruco_camera.frames:
# Display frame
cv2.imshow(f'{frame.parent.name}:{frame.name}', frame.image())
diff --git a/src/argaze/utils/demo_environment/aoi_3d_scene.obj b/src/argaze/utils/demo_environment/aoi_3d_scene.obj
index 8922e78..d32e235 100644
--- a/src/argaze/utils/demo_environment/aoi_3d_scene.obj
+++ b/src/argaze/utils/demo_environment/aoi_3d_scene.obj
@@ -1,4 +1,4 @@
-# Blender v3.0.1 OBJ File: 'ar_environment.blend'
+# Blender v3.0.1 OBJ File: 'ar_camera.blend'
# www.blender.org
o GrayRectangle
v 0.000000 0.000000 0.000000
diff --git a/src/argaze/utils/demo_environment/aruco_scene.obj b/src/argaze/utils/demo_environment/aruco_markers_group.obj
index 9ad43be..1030d01 100644
--- a/src/argaze/utils/demo_environment/aruco_scene.obj
+++ b/src/argaze/utils/demo_environment/aruco_markers_group.obj
@@ -1,4 +1,4 @@
-# Blender v3.0.1 OBJ File: 'ar_environment.blend'
+# Blender v3.0.1 OBJ File: 'ar_camera.blend'
# www.blender.org
o DICT_APRILTAG_16h5#0_Marker
v -5.000000 14.960000 0.000000
diff --git a/src/argaze/utils/demo_environment/demo_augmented_reality_setup.json b/src/argaze/utils/demo_environment/demo_augmented_reality_setup.json
index b1c0696..f157120 100644
--- a/src/argaze/utils/demo_environment/demo_augmented_reality_setup.json
+++ b/src/argaze/utils/demo_environment/demo_augmented_reality_setup.json
@@ -1,5 +1,6 @@
{
- "name": "ArEnvironment Demo",
+ "name": "ArUcoCamera Demo",
+ "size": [1280, 720],
"aruco_detector": {
"dictionary": {
"name": "DICT_APRILTAG_16h5"
@@ -12,33 +13,31 @@
"aprilTagDeglitch": 1
}
},
- "camera_frame": {
- "layers": {
- "Camera_layer": {}
- },
- "image_parameters": {
- "background_weight": 1,
- "draw_layers": {
- "Camera_layer": {
- "draw_aoi_scene": {
- "draw_aoi": {
- "color": [255, 255, 255],
- "border_size": 1
- }
+ "layers": {
+ "main_layer": {}
+ },
+ "image_parameters": {
+ "background_weight": 1,
+ "draw_layers": {
+ "main_layer": {
+ "draw_aoi_scene": {
+ "draw_aoi": {
+ "color": [255, 255, 255],
+ "border_size": 1
}
}
- },
- "draw_gaze_position": {
- "color": [0, 255, 255],
- "size": 4
}
+ },
+ "draw_gaze_position": {
+ "color": [0, 255, 255],
+ "size": 4
}
},
"scenes": {
"ArScene Demo" : {
- "aruco_scene": "aruco_scene.obj",
+ "aruco_markers_group": "aruco_markers_group.obj",
"layers": {
- "Camera_layer" : {
+ "main_layer" : {
"aoi_scene": "aoi_3d_scene.obj"
}
},
@@ -111,49 +110,6 @@
}
}
},
- "aruco_axis": {
- "lower_left_corner": {
- "origin_marker": 2,
- "horizontal_axis_marker": 3,
- "vertical_axis_marker": 0
- },
- "lower_right_corner": {
- "origin_marker": 3,
- "horizontal_axis_marker": 2,
- "vertical_axis_marker": 1
- },
- "upper_left_corner": {
- "origin_marker": 0,
- "horizontal_axis_marker": 1,
- "vertical_axis_marker": 2
- },
- "upper_right_corner": {
- "origin_marker": 1,
- "horizontal_axis_marker": 0,
- "vertical_axis_marker": 3
- }
- },
- "aruco_aoi": {
- "GrayRectangle": {
- "upper_left_corner": {
- "marker_identifier": 0,
- "marker_corner_index": 2
- },
- "upper_right_corner": {
- "marker_identifier": 1,
- "marker_corner_index": 3
- },
- "lower_left_corner": {
- "marker_identifier": 2,
- "marker_corner_index": 1
- },
- "lower_right_corner": {
- "marker_identifier": 3,
- "marker_corner_index": 0
- },
- "inner_aoi": "all"
- }
- },
"angle_tolerance": 15.0,
"distance_tolerance": 2.54
}
diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py
index 92fa282..465c5db 100644
--- a/src/argaze/utils/demo_gaze_analysis_run.py
+++ b/src/argaze/utils/demo_gaze_analysis_run.py
@@ -34,7 +34,7 @@ def main():
# Load ArFrame
ar_frame = ArFeatures.ArFrame.from_json(args.frame)
- # Create a window to display ArEnvironment
+ # Create a window to display ArCamera
cv2.namedWindow(ar_frame.name, cv2.WINDOW_AUTOSIZE)
# Heatmap buffer display option