aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/argaze/ArFeatures.py189
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py72
-rw-r--r--src/argaze/DataFeatures.py2
3 files changed, 129 insertions, 134 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 262ebd0..f023d08 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -379,59 +379,64 @@ class ArLayer(DataFeatures.SharedObject):
gaze_movement: gaze movement to project
"""
- # Lock layer exploitation
- self.acquire()
+ # Use try block to always release the layer lock in finally block
+ try:
- # Update current gaze movement
- self.__gaze_movement = gaze_movement
+ # Lock layer exploitation
+ self.acquire()
- # No new analysis available by default
- self.__new_analysis_available = False
+ # Update current gaze movement
+ self.__gaze_movement = gaze_movement
- # No looked aoi by default
- self.__looked_aoi_name = None
+ # No new analysis available by default
+ self.__new_analysis_available = False
+
+ # No looked aoi by default
+ self.__looked_aoi_name = None
- if self.aoi_matcher is not None:
+ if self.aoi_matcher is not None:
- # Update looked aoi thanks to aoi matcher
- # Note: don't filter valid/unvalid and finished/unfinished fixation/saccade as we don't know how the aoi matcher works internally
- self.__looked_aoi_name, _ , match_time, match_exception = self.aoi_matcher.match(self.aoi_scene, gaze_movement)
+ # Update looked aoi thanks to aoi matcher
+ # Note: don't filter valid/unvalid and finished/unfinished fixation/saccade as we don't know how the aoi matcher works internally
+ self.__looked_aoi_name, _ , match_time, match_exception = self.aoi_matcher.match(self.aoi_scene, gaze_movement)
- # Valid and finished gaze movement has been identified
- if gaze_movement.valid and gaze_movement.finished:
+ # Valid and finished gaze movement has been identified
+ if gaze_movement.valid and gaze_movement.finished:
- if GazeFeatures.is_fixation(gaze_movement):
+ if GazeFeatures.is_fixation(gaze_movement):
- # Append fixation to aoi scan path
- if self.aoi_scan_path is not None and self.__looked_aoi_name is not None:
+ # Append fixation to aoi scan path
+ if self.aoi_scan_path is not None and self.__looked_aoi_name is not None:
- aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, gaze_movement, self.__looked_aoi_name)
+ aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, gaze_movement, self.__looked_aoi_name)
- # Is there a new step?
- if aoi_scan_step is not None and len(self.aoi_scan_path) > 1:
+ # Is there a new step?
+ if aoi_scan_step is not None and len(self.aoi_scan_path) > 1:
- for aoi_scan_path_analyzer_module_path, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items():
+ for aoi_scan_path_analyzer_module_path, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items():
- # Analyze aoi scan path
- analyze_time, analyze_exception = aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
+ # Analyze aoi scan path
+ analyze_time, analyze_exception = aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
- # Update new analysis available state
- self.__new_analysis_available = True
+ # Update new analysis available state
+ self.__new_analysis_available = True
- elif GazeFeatures.is_saccade(gaze_movement):
+ elif GazeFeatures.is_saccade(gaze_movement):
- # Append saccade to aoi scan path
- if self.aoi_scan_path is not None:
+ # Append saccade to aoi scan path
+ if self.aoi_scan_path is not None:
- self.aoi_scan_path.append_saccade(timestamp, gaze_movement)
+ self.aoi_scan_path.append_saccade(timestamp, gaze_movement)
- # Log look data
- for logger_name, logger in self.loggers.items():
+ # Log look data
+ for logger_name, logger in self.loggers.items():
- logger.emit(locals())
+ logger.emit(locals())
- # Unlock layer exploitation
- self.release()
+ finally:
+
+ # Unlock layer exploitation
+ self.release()
def draw(self, image: numpy.array, draw_aoi_scene: dict = None, draw_aoi_matching: dict = None):
"""
@@ -830,86 +835,91 @@ class ArFrame(DataFeatures.SharedObject):
gaze_position: gaze position to project
"""
- # Lock frame exploitation
- self.acquire()
+ # Use try block to always release the frame lock in finally block
+ try:
- # No new analysis by default
- self.__new_analysis_available = False
+ # Lock frame exploitation
+ self.acquire()
- # No gaze movement identified by default
- self.__identified_gaze_movement = GazeFeatures.UnvalidGazeMovement()
+ # No new analysis by default
+ self.__new_analysis_available = False
- # Apply gaze position calibration
- if self.gaze_position_calibrator is not None:
+ # No gaze movement identified by default
+ self.__identified_gaze_movement = GazeFeatures.UnvalidGazeMovement()
- self.__calibrated_gaze_position = self.gaze_position_calibrator.apply(gaze_position)
+ # Apply gaze position calibration
+ if self.gaze_position_calibrator is not None:
- # Or update gaze position at least
- else:
+ self.__calibrated_gaze_position = self.gaze_position_calibrator.apply(gaze_position)
- self.__calibrated_gaze_position = gaze_position
+ # Or update gaze position at least
+ else:
- # Identify gaze movement
- if self.gaze_movement_identifier is not None:
+ self.__calibrated_gaze_position = gaze_position
- # Identify finished gaze movement
- self.__identified_gaze_movement, identify_time, identify_exception = self.gaze_movement_identifier.identify(timestamp, self.__calibrated_gaze_position)
+ # Identify gaze movement
+ if self.gaze_movement_identifier is not None:
- # Valid and finished gaze movement has been identified
- if self.__identified_gaze_movement.valid and self.__identified_gaze_movement.finished:
+ # Identify finished gaze movement
+ self.__identified_gaze_movement, identify_time, identify_exception = self.gaze_movement_identifier.identify(timestamp, self.__calibrated_gaze_position)
- if GazeFeatures.is_fixation(self.__identified_gaze_movement):
+ # Valid and finished gaze movement has been identified
+ if self.__identified_gaze_movement.valid and self.__identified_gaze_movement.finished:
- # Append fixation to scan path
- if self.scan_path is not None:
+ if GazeFeatures.is_fixation(self.__identified_gaze_movement):
- self.scan_path.append_fixation(timestamp, self.__identified_gaze_movement)
+ # Append fixation to scan path
+ if self.scan_path is not None:
- elif GazeFeatures.is_saccade(self.__identified_gaze_movement):
+ self.scan_path.append_fixation(timestamp, self.__identified_gaze_movement)
- # Append saccade to scan path
- if self.scan_path is not None:
-
- scan_step = self.scan_path.append_saccade(timestamp, self.__identified_gaze_movement)
+ elif GazeFeatures.is_saccade(self.__identified_gaze_movement):
- # Is there a new step?
- if scan_step and len(self.scan_path) > 1:
+ # Append saccade to scan path
+ if self.scan_path is not None:
+
+ scan_step = self.scan_path.append_saccade(timestamp, self.__identified_gaze_movement)
- for scan_path_analyzer_module_path, scan_path_analyzer in self.scan_path_analyzers.items():
+ # Is there a new step?
+ if scan_step and len(self.scan_path) > 1:
- # Analyze aoi scan path
- analyze_time, analyze_exception = scan_path_analyzer.analyze(self.scan_path)
+ for scan_path_analyzer_module_path, scan_path_analyzer in self.scan_path_analyzers.items():
- # Update new analysis available state
- self.__new_analysis_available = True
+ # Analyze aoi scan path
+ analyze_time, analyze_exception = scan_path_analyzer.analyze(self.scan_path)
- # No valid finished gaze movement: optionnaly stop in progress identification filtering
- elif self.gaze_movement_identifier is not None and not self.filter_in_progress_identification:
+ # Update new analysis available state
+ self.__new_analysis_available = True
- self.__identified_gaze_movement = self.gaze_movement_identifier.current_gaze_movement
+ # No valid finished gaze movement: optionnaly stop in progress identification filtering
+ elif self.gaze_movement_identifier is not None and not self.filter_in_progress_identification:
- # Update heatmap
- if self.heatmap is not None:
+ self.__identified_gaze_movement = self.gaze_movement_identifier.current_gaze_movement
- # Scale gaze position value
- scale = numpy.array([self.heatmap.size[0] / self.size[0], self.heatmap.size[1] / self.size[1]])
+ # Update heatmap
+ if self.heatmap is not None:
- # Update heatmap image
- update_time, update_exception = self.heatmap.update(self.__calibrated_gaze_position.value * scale)
+ # Scale gaze position value
+ scale = numpy.array([self.heatmap.size[0] / self.size[0], self.heatmap.size[1] / self.size[1]])
- # Look layers with valid identified gaze movement
- # Note: don't filter valid/unvalid finished/unfished gaze movement to allow layers to reset internally
- for layer_name, layer in self.layers.items():
+ # Update heatmap image
+ update_time, update_exception = self.heatmap.update(self.__calibrated_gaze_position.value * scale)
- look_time, look_exception = layer.look(timestamp, self.__identified_gaze_movement)
+ # Look layers with valid identified gaze movement
+ # Note: don't filter valid/unvalid finished/unfished gaze movement to allow layers to reset internally
+ for layer_name, layer in self.layers.items():
- # Log look data
- for logger_name, logger in self.loggers.items():
+ look_time, look_exception = layer.look(timestamp, self.__identified_gaze_movement)
- logger.emit(locals())
+ # Log look data
+ for logger_name, logger in self.loggers.items():
- # Unlock frame exploitation
- self.release()
+ logger.emit(locals())
+
+ finally:
+
+ # Unlock frame exploitation
+ self.release()
def __image(self, background_weight: float = None, heatmap_weight: float = None, draw_gaze_position_calibrator: dict = None, draw_scan_path: dict = None, draw_layers: dict = None, draw_gaze_positions: dict = None, draw_fixations: dict = None, draw_saccades: dict = None) -> numpy.array:
"""
@@ -1360,16 +1370,13 @@ class ArCamera(ArFrame):
yield scene_frame
- def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, dict]:
+ @DataFeatures.PipelineStep
+ def watch(self, timestamp: int|float, image: numpy.array):
"""Detect AR features from image and project scenes into camera frame.
Parameters:
timestamp: image time stamp (unit does'nt matter)
image: image where to extract AR features
-
- Returns:
- detection time: AR features detection time in ms.
- exception: dictionary with exception raised per scene.
"""
raise NotImplementedError('watch() method not implemented')
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index 3ef572b..86802a5 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -142,6 +142,7 @@ class ArUcoCamera(ArFeatures.ArCamera):
return ArUcoCamera.from_dict(aruco_camera_data, working_directory)
+ @DataFeatures.PipelineStep
def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, float, dict]:
"""Detect environment aruco markers from image and project scenes into camera frame.
@@ -150,47 +151,40 @@ class ArUcoCamera(ArFeatures.ArCamera):
Returns:
detection time: aruco marker detection time in ms.
- projection time: scenes projection time in ms.
- exception: dictionary with exception raised per scene.
"""
- # Lock camera frame exploitation
- self.acquire()
-
- # Detect aruco markers
- detection_time = self.aruco_detector.detect_markers(image)
-
- # Store projection execution start date
- projection_start = time.perf_counter()
+ # Use try block to always release the camera frame lock in finally block
+ try:
- # Fill camera frame background with image
- self.background = image
+ # Lock camera frame exploitation
+ self.acquire()
- # Clear former layers projection into camera frame
- for layer_name, layer in self.layers.items():
-
- layer.aoi_scene = AOI2DScene.AOI2DScene()
+ # Detect aruco markers
+ detection_time = self.aruco_detector.detect_markers(image)
- # Store exceptions for each scene
- exceptions = {}
+ # Fill camera frame background with image
+ self.background = image
- # Project each aoi 3d scene into camera frame
- for scene_name, scene in self.scenes.items():
+ # Clear former layers projection into camera frame
+ for layer_name, layer in self.layers.items():
+
+ layer.aoi_scene = AOI2DScene.AOI2DScene()
- ''' TODO: Enable aruco_aoi processing
- if scene.aruco_aoi:
+ # Project each aoi 3d scene into camera frame
+ for scene_name, scene in self.scenes.items():
- try:
+ ''' TODO: Enable aruco_aoi processing
+ if scene.aruco_aoi:
- # Build AOI scene directly from detected ArUco marker corners
- self.layers[??].aoi_2d_scene |= scene.build_aruco_aoi_scene(self.aruco_detector.detected_markers)
+ try:
- except ArFeatures.PoseEstimationFailed:
+ # Build AOI scene directly from detected ArUco marker corners
+ self.layers[??].aoi_2d_scene |= scene.build_aruco_aoi_scene(self.aruco_detector.detected_markers)
- pass
- '''
+ except ArFeatures.PoseEstimationFailed:
- try:
+ pass
+ '''
# Estimate scene pose from detected scene markers
tvec, rmat, _ = scene.estimate_pose(self.aruco_detector.detected_markers)
@@ -210,22 +204,16 @@ class ArUcoCamera(ArFeatures.ArCamera):
pass
- # Store exceptions and continue
- except Exception as e:
-
- exceptions[scene_name] = e
-
- # Assess projection time in ms
- projection_time = (time.perf_counter() - projection_start) * 1e3
+ finally:
- # Unlock camera frame exploitation
- self.release()
+ # Unlock camera frame exploitation
+ self.release()
- # Timestamp camera frame
- self.timestamp = timestamp
+ # Timestamp camera frame
+ self.timestamp = timestamp
- # Return detection time, projection time and exceptions
- return detection_time, projection_time, exceptions
+ # Return detection time
+ return detection_time
def __image(self, draw_detected_markers: dict = None, draw_scenes: dict = None, draw_optic_parameters_grid: dict = None, **kwargs: dict) -> numpy.array:
"""Get frame image with ArUco detection visualisation.
diff --git a/src/argaze/DataFeatures.py b/src/argaze/DataFeatures.py
index 12e7bff..2636aaa 100644
--- a/src/argaze/DataFeatures.py
+++ b/src/argaze/DataFeatures.py
@@ -430,7 +430,7 @@ class DataDictionary(dict):
__delattr__ = dict.__delitem__
def PipelineStep(method):
- """Define a decorator to define a method as a pipeline step."""
+ """Define a decorator to declare a method as a pipeline step."""
def handler(*args, **kw) -> Tuple[Any, float, Exception]:
"""Handle pipeline step