aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md4
-rw-r--r--docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md10
-rw-r--r--docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md10
-rw-r--r--src/argaze/ArFeatures.py25
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py2
-rw-r--r--src/argaze/utils/aruco_markers_group_export.py2
-rw-r--r--src/argaze/utils/demo_aruco_markers_run.py2
7 files changed, 14 insertions, 41 deletions
diff --git a/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md b/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md
index 892d6dd..c79c8b5 100644
--- a/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md
+++ b/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md
@@ -75,11 +75,11 @@ for name, aruco_scene in aruco_camera.scenes.items():
[ArUcoCamera.watch](../../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method returns data about pipeline execution.
```python
-# Assuming that images are available
+# Assuming that timestamped images are available
...:
# Watch image with ArUco camera
- detection_time, projection_time, exception = aruco_camera.watch(image)
+ detection_time, projection_time, exception = aruco_camera.watch(timestamp, image)
# Do something with pipeline times
...
diff --git a/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md b/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md
index 7f60f86..86839c1 100644
--- a/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md
+++ b/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md
@@ -101,17 +101,11 @@ The names of 3D AOI **and** their related [ArFrames](../../argaze.md/#argaze.ArF
After camera image is passed to [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method, it is possible to apply a perpective transformation in order to project watched image into each [ArUcoScenes](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) [frames background](../../argaze.md/#argaze.ArFeatures.ArFrame) image.
```python
-# Assuming that Full HD (1920x1080) video stream or file is opened
-...
-
-# Assuming that the video reading is handled in a looping code block
+# Assuming that Full HD (1920x1080) timestamped images are available
...:
- # Capture image from video stream of file
- image = video_capture.read()
-
# Detect ArUco markers, estimate scene pose then, project 3D AOI into camera frame
- aruco_camera.watch(image)
+ aruco_camera.watch(timestamp, image)
# Map watched image into ArUcoScenes frames background
aruco_camera.map()
diff --git a/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md b/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md
index 329a137..43bb64e 100644
--- a/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md
+++ b/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md
@@ -98,17 +98,11 @@ The usual [ArFrame visualisation parameters](../gaze_analysis_pipeline/visualisa
Pass each camera image to [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method to execute the whole pipeline dedicated to ArUco markers detection, scene pose estimation and 3D AOI projection.
```python
-# Assuming that Full HD (1920x1080) video stream or file is opened
-...
-
-# Assuming that the video reading is handled in a looping code block
+# Assuming that Full HD (1920x1080) timestamped images are available
...:
- # Capture image from video stream of file
- image = video_capture.read()
-
# Detect ArUco markers, estimate scene pose then, project 3D AOI into camera frame
- aruco_camera.watch(image)
+ aruco_camera.watch(timestamp, image)
# Display ArUcoCamera frame image to display detected ArUco markers, scene pose, 2D AOI projection and ArFrame visualisation.
... aruco_camera.image()
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index dca2e3a..599b81f 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -1448,9 +1448,13 @@ class ArCamera(ArFrame):
yield scene_frame
- def watch(self, image: numpy.array) -> Tuple[float, dict]:
+ def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, dict]:
"""Detect AR features from image and project scenes into camera frame.
+ Parameters:
+ timestamp: image time stamp (unit does'nt matter)
+ image: image where to extract AR features
+
Returns:
detection time: AR features detection time in ms.
exception: dictionary with exception raised per scene.
@@ -1469,25 +1473,6 @@ class ArCamera(ArFrame):
watch method needs to be called first.
"""
- # Can't use camera frame while it is locked
- wait_start = time.perf_counter()
- waiting_time = 0
-
- while super().locked():
-
- time.sleep(1e-6)
- waiting_time = (time.perf_counter() - wait_start) * 1e3
-
- # TODO? return waiting time?
-
- # TODO? add timeout parameter?
- #if waiting_time > timeout:
- # return None, None
-
- # DEBUG
- #if waiting_time > 0:
- # print(f'ArCamera: waiting {waiting_time:.3f} ms before to process gaze position at {timestamp} time.')
-
# Project gaze position into camera frame
yield self, super().look(timestamp, gaze_position)
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index 04226c0..1cf0896 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -142,7 +142,7 @@ class ArUcoCamera(ArFeatures.ArCamera):
return ArUcoCamera.from_dict(aruco_camera_data, working_directory)
- def watch(self, image: numpy.array) -> Tuple[float, float, dict]:
+ def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, float, dict]:
"""Detect environment aruco markers from image and project scenes into camera frame.
Returns:
diff --git a/src/argaze/utils/aruco_markers_group_export.py b/src/argaze/utils/aruco_markers_group_export.py
index 8740af2..92646ca 100644
--- a/src/argaze/utils/aruco_markers_group_export.py
+++ b/src/argaze/utils/aruco_markers_group_export.py
@@ -74,7 +74,7 @@ def main():
current_image_time = video_capture.get(cv2.CAP_PROP_POS_MSEC)
# Detect markers
- detection_time, projection_time, exceptions = aruco_camera.watch(video_image)
+ detection_time, projection_time, exceptions = aruco_camera.watch(current_image_time, video_image)
# Estimate each markers pose
aruco_camera.aruco_detector.estimate_markers_pose(aruco_camera.aruco_detector.detected_markers)
diff --git a/src/argaze/utils/demo_aruco_markers_run.py b/src/argaze/utils/demo_aruco_markers_run.py
index ce81da4..67e2845 100644
--- a/src/argaze/utils/demo_aruco_markers_run.py
+++ b/src/argaze/utils/demo_aruco_markers_run.py
@@ -118,7 +118,7 @@ def main():
video_chrono.restart()
# Detect and project AR features
- detection_time, projection_time, exceptions = aruco_camera.watch(video_image)
+ detection_time, projection_time, exceptions = aruco_camera.watch(capture_time, video_image)
# Assess visualisation time
visualisation_start = time.time()