aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md4
-rw-r--r--docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md10
-rw-r--r--docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md10
-rw-r--r--src/argaze/ArFeatures.py106
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py31
-rw-r--r--src/argaze/DataStructures.py72
-rw-r--r--src/argaze/utils/aruco_markers_group_export.py2
-rw-r--r--src/argaze/utils/demo_aruco_markers_run.py2
8 files changed, 136 insertions, 101 deletions
diff --git a/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md b/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md
index 892d6dd..c79c8b5 100644
--- a/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md
+++ b/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md
@@ -75,11 +75,11 @@ for name, aruco_scene in aruco_camera.scenes.items():
[ArUcoCamera.watch](../../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method returns data about pipeline execution.
```python
-# Assuming that images are available
+# Assuming that timestamped images are available
...:
# Watch image with ArUco camera
- detection_time, projection_time, exception = aruco_camera.watch(image)
+ detection_time, projection_time, exception = aruco_camera.watch(timestamp, image)
# Do something with pipeline times
...
diff --git a/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md b/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md
index 8c13bf2..4f9af7c 100644
--- a/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md
+++ b/docs/user_guide/aruco_markers_pipeline/aoi_3d_frame.md
@@ -101,17 +101,11 @@ The names of 3D AOI **and** their related [ArFrames](../../argaze.md/#argaze.ArF
After camera image is passed to [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method, it is possible to apply a perpective transformation in order to project watched image into each [ArUcoScenes](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) [frames background](../../argaze.md/#argaze.ArFeatures.ArFrame) image.
```python
-# Assuming that Full HD (1920x1080) video stream or file is opened
-...
-
-# Assuming that the video reading is handled in a looping code block
+# Assuming that Full HD (1920x1080) timestamped images are available
...:
- # Capture image from video stream of file
- image = video_capture.read()
-
# Detect ArUco markers, estimate scene pose then, project 3D AOI into camera frame
- aruco_camera.watch(image)
+ aruco_camera.watch(timestamp, image)
# Map watched image into ArUcoScenes frames background
aruco_camera.map()
diff --git a/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md b/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md
index 329a137..43bb64e 100644
--- a/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md
+++ b/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md
@@ -98,17 +98,11 @@ The usual [ArFrame visualisation parameters](../gaze_analysis_pipeline/visualisa
Pass each camera image to [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method to execute the whole pipeline dedicated to ArUco markers detection, scene pose estimation and 3D AOI projection.
```python
-# Assuming that Full HD (1920x1080) video stream or file is opened
-...
-
-# Assuming that the video reading is handled in a looping code block
+# Assuming that Full HD (1920x1080) timestamped images are available
...:
- # Capture image from video stream of file
- image = video_capture.read()
-
# Detect ArUco markers, estimate scene pose then, project 3D AOI into camera frame
- aruco_camera.watch(image)
+ aruco_camera.watch(timestamp, image)
# Display ArUcoCamera frame image to display detected ArUco markers, scene pose, 2D AOI projection and ArFrame visualisation.
... aruco_camera.image()
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 5ec6b7e..7f1d2d6 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -94,10 +94,13 @@ DEFAULT_ARLAYER_DRAW_PARAMETERS = {
}
@dataclass
-class ArLayer():
+class ArLayer(DataStructures.SharedObject):
"""
Defines a space where to make matching of gaze movements and AOI and inside which those matchings need to be analyzed.
+ !!! note
+ Inherits from DataStructures.SharedObject class to be shared by multiple threads
+
Parameters:
name: name of the layer
aoi_scene: AOI scene description
@@ -118,15 +121,15 @@ class ArLayer():
def __post_init__(self):
+ # Init sharedObject
+ super().__init__()
+
# Define parent attribute: it will be setup by parent later
self.__parent = None
# Init current gaze movement
self.__gaze_movement = GazeFeatures.UnvalidGazeMovement()
- # Init lock to share looking data with multiples threads
- self.__look_lock = threading.Lock()
-
# Cast aoi scene to its effective dimension
if self.aoi_scene.dimension == 2:
@@ -372,7 +375,7 @@ class ArLayer():
"""
# Lock layer exploitation
- self.__look_lock.acquire()
+ self.acquire()
# Store look execution start date
look_start = time.perf_counter()
@@ -460,7 +463,7 @@ class ArLayer():
execution_times['total'] = (time.perf_counter() - look_start) * 1e3
# Unlock layer exploitation
- self.__look_lock.release()
+ self.release()
# Return look data
return looked_aoi, aoi_scan_path_analysis, execution_times, exception
@@ -479,8 +482,8 @@ class ArLayer():
return self.draw(image, **self.draw_parameters)
- # Lock frame exploitation
- self.__look_lock.acquire()
+ # Lock layer exploitation
+ self.acquire()
# Draw aoi if required
if draw_aoi_scene is not None:
@@ -492,8 +495,8 @@ class ArLayer():
self.aoi_matcher.draw(image, self.aoi_scene, **draw_aoi_matching)
- # Unlock frame exploitation
- self.__look_lock.release()
+ # Unlock layer exploitation
+ self.release()
# Define default ArFrame image parameters
DEFAULT_ARFRAME_IMAGE_PARAMETERS = {
@@ -517,10 +520,13 @@ DEFAULT_ARFRAME_IMAGE_PARAMETERS = {
}
@dataclass
-class ArFrame():
+class ArFrame(DataStructures.SharedObject):
"""
Defines a rectangular area where to project in timestamped gaze positions and inside which they need to be analyzed.
+ !!! note
+ Inherits from DataStructures.SharedObject class to be shared by multiple threads
+
Parameters:
name: name of the frame
size: defines the dimension of the rectangular area where gaze positions are projected
@@ -548,9 +554,12 @@ class ArFrame():
layers: dict = field(default_factory=dict)
log: bool = field(default=False)
image_parameters: dict = field(default_factory=DEFAULT_ARFRAME_IMAGE_PARAMETERS)
-
+
def __post_init__(self):
+ # Init sharedObject
+ super().__init__()
+
# Define parent attribute: it will be setup by parent later
self.__parent = None
@@ -562,9 +571,6 @@ class ArFrame():
# Init current gaze position
self.__gaze_position = GazeFeatures.UnvalidGazePosition()
- # Init lock to share looked data with multiples threads
- self.__look_lock = threading.Lock()
-
# Prepare logging if needed
self.__ts_logs = {}
@@ -812,14 +818,14 @@ class ArFrame():
return ArFrame.from_dict(frame_data, working_directory)
@property
- def parent(self):
+ def parent(self) -> object:
"""Get parent instance"""
return self.__parent
@parent.setter
- def parent(self, parent):
- """Get parent instance"""
+ def parent(self, parent: object):
+ """Set parent instance"""
self.__parent = parent
@@ -852,7 +858,7 @@ class ArFrame():
"""
# Lock frame exploitation
- self.__look_lock.acquire()
+ self.acquire()
# Store look execution start date
look_start = time.perf_counter()
@@ -988,7 +994,7 @@ class ArFrame():
execution_times['total'] = (time.perf_counter() - look_start) * 1e3
# Unlock frame exploitation
- self.__look_lock.release()
+ self.release()
# Return look data
return self.__gaze_position, identified_gaze_movement, scan_step_analysis, layer_analysis, execution_times, exception
@@ -1009,7 +1015,7 @@ class ArFrame():
"""
# Lock frame exploitation
- self.__look_lock.acquire()
+ self.acquire()
# Draw background only
if background_weight is not None and (heatmap_weight is None or self.heatmap is None):
@@ -1066,7 +1072,7 @@ class ArFrame():
self.__gaze_position.draw(image, **draw_gaze_positions)
# Unlock frame exploitation
- self.__look_lock.release()
+ self.release()
return image
@@ -1269,7 +1275,7 @@ class ArScene():
return ArScene(new_scene_name, new_layers, new_frames, **scene_data)
- def estimate_pose(self, detected_features: Any) -> Tuple[numpy.array, numpy.array]:
+ def estimate_pose(self, detected_features: any) -> Tuple[numpy.array, numpy.array]:
"""Define abstract estimate scene pose method.
Parameters:
@@ -1393,9 +1399,6 @@ class ArCamera(ArFrame):
layer.aoi_scan_path.expected_aoi = expected_aoi_list
layer.aoi_matcher.exclude = exclude_aoi_list
-
- # Init a lock to share scene projections into camera frame between multiple threads
- self._frame_lock = threading.Lock()
def __str__(self) -> str:
"""
@@ -1445,9 +1448,13 @@ class ArCamera(ArFrame):
yield scene_frame
- def watch(self, image: numpy.array) -> Tuple[float, dict]:
+ def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, dict]:
"""Detect AR features from image and project scenes into camera frame.
+ Parameters:
+ timestamp: image time stamp (unit does'nt matter)
+ image: image where to extract AR features
+
Returns:
detection time: AR features detection time in ms.
exception: dictionary with exception raised per scene.
@@ -1466,31 +1473,12 @@ class ArCamera(ArFrame):
watch method needs to be called first.
"""
- # Can't use camera frame while it is locked
- wait_start = time.perf_counter()
- waiting_time = 0
-
- while self._frame_lock.locked():
-
- time.sleep(1e-6)
- waiting_time = (time.perf_counter() - wait_start) * 1e3
-
- # TODO? return waiting time?
-
- # TODO? add timeout parameter?
- #if waiting_time > timeout:
- # return None, None
-
- # DEBUG
- #if waiting_time > 0:
- # print(f'ArCamera: waiting {waiting_time:.3f} ms before to process gaze position at {timestamp} time.')
-
- # Lock camera frame exploitation
- self._frame_lock.acquire()
-
# Project gaze position into camera frame
yield self, super().look(timestamp, gaze_position)
+ # Lock camera frame exploitation
+ self.acquire()
+
# Project gaze position into each scene frames if possible
for scene_frame in self.scene_frames:
@@ -1517,7 +1505,7 @@ class ArCamera(ArFrame):
pass
# Unlock camera frame exploitation
- self._frame_lock.release()
+ self.release()
def map(self):
"""Project camera frame background into scene frames background.
@@ -1526,12 +1514,8 @@ class ArCamera(ArFrame):
watch method needs to be called first.
"""
- # Can't use camera frame when it is locked
- if self._frame_lock.locked():
- return
-
# Lock camera frame exploitation
- self._frame_lock.acquire()
+ self.acquire()
# Project camera frame background into each scene frame if possible
for frame in self.scene_frames:
@@ -1555,17 +1539,7 @@ class ArCamera(ArFrame):
pass
# Unlock camera frame exploitation
- self._frame_lock.release()
-
- def image(self, **kwargs: dict) -> numpy.array:
- """
- Get frame image.
-
- Parameters:
- kwargs: ArFrame.image parameters
- """
-
- return super().image(**kwargs)
+ self.release()
def to_json(self, json_filepath):
"""Save camera to .json file."""
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index ed6c619..6afce03 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -142,21 +142,24 @@ class ArUcoCamera(ArFeatures.ArCamera):
return ArUcoCamera.from_dict(aruco_camera_data, working_directory)
- def watch(self, image: numpy.array) -> Tuple[float, float, dict]:
+ def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, float, dict]:
"""Detect environment aruco markers from image and project scenes into camera frame.
+ !!! note
+ This method timestamps camera frame and its layers.
+
Returns:
detection time: aruco marker detection time in ms.
projection time: scenes projection time in ms.
exception: dictionary with exception raised per scene.
"""
+ # Lock camera frame exploitation
+ self.acquire()
+
# Detect aruco markers
detection_time = self.aruco_detector.detect_markers(image)
- # Lock camera frame exploitation
- self._frame_lock.acquire()
-
# Store projection execution start date
projection_start = time.perf_counter()
@@ -197,8 +200,12 @@ class ArUcoCamera(ArFeatures.ArCamera):
try:
+ # Update camera layer aoi
self.layers[layer_name].aoi_scene |= layer_projection
+ # Timestamp camera layer
+ self.layers[layer_name].timestamp = timestamp
+
except KeyError:
pass
@@ -212,7 +219,10 @@ class ArUcoCamera(ArFeatures.ArCamera):
projection_time = (time.perf_counter() - projection_start) * 1e3
# Unlock camera frame exploitation
- self._frame_lock.release()
+ self.release()
+
+ # Timestamp camera frame
+ self.timestamp = timestamp
# Return detection time, projection time and exceptions
return detection_time, projection_time, exceptions
@@ -227,14 +237,8 @@ class ArUcoCamera(ArFeatures.ArCamera):
kwargs: ArCamera.image parameters
"""
- # Can't use camera frame when it is locked
- if self._frame_lock.locked():
- return
-
- # Lock camera frame exploitation
- self._frame_lock.acquire()
-
# Get camera frame image
+ # Note: don't lock/unlock camera frame here as super().image manage it.
image = super().image(**kwargs)
# Draw optic parameters grid if required
@@ -254,9 +258,6 @@ class ArUcoCamera(ArFeatures.ArCamera):
self.aruco_detector.draw_detected_markers(image, draw_detected_markers)
- # Unlock camera frame exploitation
- self._frame_lock.release()
-
return image
def image(self, **kwargs: dict) -> numpy.array:
diff --git a/src/argaze/DataStructures.py b/src/argaze/DataStructures.py
index fc5072b..6e058e8 100644
--- a/src/argaze/DataStructures.py
+++ b/src/argaze/DataStructures.py
@@ -13,6 +13,8 @@ import collections
import json
import ast
import bisect
+import threading
+import math
import pandas
import numpy
@@ -99,6 +101,76 @@ class JsonEncoder(json.JSONEncoder):
return public_dict
+class SharedObject():
+ """Enable multiple threads sharing."""
+
+ def __init__(self):
+ self._lock = threading.Lock()
+ self._timestamp = math.nan
+ self._token = None
+
+ def acquire(self):
+ self._lock.acquire()
+
+ def release(self):
+ self._lock.release()
+
+ def locked(self) -> bool:
+ return self._lock.locked()
+
+ @property
+ def timestamp(self) -> int|float:
+ """Get timestamp"""
+
+ self._lock.acquire()
+ timestamp = self._timestamp
+ self._lock.release()
+
+ return timestamp
+
+ @timestamp.setter
+ def timestamp(self, timestamp: int|float):
+ """Set timestamp"""
+
+ self._lock.acquire()
+ self._timestamp = timestamp
+ self._lock.release()
+
+ def untimestamp(self):
+ """Reset timestamp"""
+
+ self._lock.acquire()
+ self._timestamp = math.nan
+ self._lock.release()
+
+ @property
+ def timestamped(self) -> bool:
+ """Is the object timestamped?"""
+
+ self._lock.acquire()
+ timestamped = not math.isnan(self._timestamp)
+ self._lock.release()
+
+ return timestamped
+
+ @property
+ def token(self) -> any:
+ """Get token"""
+
+ self._lock.acquire()
+ token = self._token
+ self._lock.release()
+
+ return token
+
+ @token.setter
+ def token(self, token: any):
+ """Set token"""
+
+ self._lock.acquire()
+ self._token = token
+ self._lock.release()
+
class TimeStampedBuffer(collections.OrderedDict):
"""Ordered dictionary to handle timestamped data.
```
diff --git a/src/argaze/utils/aruco_markers_group_export.py b/src/argaze/utils/aruco_markers_group_export.py
index 8740af2..92646ca 100644
--- a/src/argaze/utils/aruco_markers_group_export.py
+++ b/src/argaze/utils/aruco_markers_group_export.py
@@ -74,7 +74,7 @@ def main():
current_image_time = video_capture.get(cv2.CAP_PROP_POS_MSEC)
# Detect markers
- detection_time, projection_time, exceptions = aruco_camera.watch(video_image)
+ detection_time, projection_time, exceptions = aruco_camera.watch(current_image_time, video_image)
# Estimate each markers pose
aruco_camera.aruco_detector.estimate_markers_pose(aruco_camera.aruco_detector.detected_markers)
diff --git a/src/argaze/utils/demo_aruco_markers_run.py b/src/argaze/utils/demo_aruco_markers_run.py
index ce81da4..67e2845 100644
--- a/src/argaze/utils/demo_aruco_markers_run.py
+++ b/src/argaze/utils/demo_aruco_markers_run.py
@@ -118,7 +118,7 @@ def main():
video_chrono.restart()
# Detect and project AR features
- detection_time, projection_time, exceptions = aruco_camera.watch(video_image)
+ detection_time, projection_time, exceptions = aruco_camera.watch(capture_time, video_image)
# Assess visualisation time
visualisation_start = time.time()