From d904b99cc969c977f911d36cfeb2279544c528e5 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Tue, 23 Jan 2024 14:54:30 +0100 Subject: Defing PipelineStepObject and PipelineStepMethod to assess time execution. Removing exception managment to let the user catch them into the script. --- .../advanced_topics/scripting.md | 26 ++-- .../configuration_and_execution.md | 30 +++- .../advanced_topics/scripting.md | 67 ++++---- .../configuration_and_execution.md | 17 ++- src/argaze/ArFeatures.py | 32 ++-- src/argaze/ArUcoMarkers/ArUcoCamera.py | 10 +- src/argaze/ArUcoMarkers/ArUcoDetector.py | 4 +- src/argaze/AreaOfInterest/AOIFeatures.py | 4 +- src/argaze/DataFeatures.py | 168 ++++++++------------- src/argaze/GazeAnalysis/Basic.py | 4 +- src/argaze/GazeAnalysis/DeviationCircleCoverage.py | 2 +- .../DispersionThresholdIdentification.py | 2 +- src/argaze/GazeAnalysis/Entropy.py | 2 +- src/argaze/GazeAnalysis/ExploreExploitRatio.py | 2 +- src/argaze/GazeAnalysis/FocusPointInside.py | 2 +- src/argaze/GazeAnalysis/KCoefficient.py | 4 +- src/argaze/GazeAnalysis/LempelZivComplexity.py | 2 +- src/argaze/GazeAnalysis/NGram.py | 2 +- src/argaze/GazeAnalysis/NearestNeighborIndex.py | 2 +- src/argaze/GazeAnalysis/TransitionMatrix.py | 2 +- .../VelocityThresholdIdentification.py | 2 +- src/argaze/GazeFeatures.py | 14 +- src/argaze/PupillAnalysis/WorkloadIndex.py | 2 +- src/argaze/PupillFeatures.py | 4 +- src/argaze/utils/demo_aruco_markers_run.py | 15 +- src/argaze/utils/demo_gaze_analysis_run.py | 2 +- 26 files changed, 206 insertions(+), 217 deletions(-) diff --git a/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md b/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md index c79c8b5..99f52ee 100644 --- a/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md +++ b/docs/user_guide/aruco_markers_pipeline/advanced_topics/scripting.md @@ -78,30 +78,26 @@ for name, aruco_scene in aruco_camera.scenes.items(): # Assuming that timestamped images are available ...: - # Watch image with ArUco camera - detection_time, projection_time, exception = aruco_camera.watch(timestamp, image) + try: - # Do something with pipeline times - ... + # Watch image with ArUco camera + aruco_camera.watch(timestamp, image) # Do something with pipeline exception - if exception: + except Exception as e: + ... + + # Do something with detected_markers + ... aruco_camera.aruco_detector.detected_markers + ``` Let's understand the meaning of each returned data. -### *detection_time* - -ArUco marker detection time in ms. - -### *projection_time* - -Scenes projection time in ms. - -### *exception* +### *aruco_camera.aruco_detector.detected_markers* -A [python Exception](https://docs.python.org/3/tutorial/errors.html#exceptions) object raised during pipeline execution. +A dictionary containing all detected markers provided by [ArUcoDetector](../../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector) class. ## Setup ArUcoCamera image parameters diff --git a/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md b/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md index 43bb64e..5b740dc 100644 --- a/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md +++ b/docs/user_guide/aruco_markers_pipeline/configuration_and_execution.md @@ -97,12 +97,23 @@ The usual [ArFrame visualisation parameters](../gaze_analysis_pipeline/visualisa Pass each camera image to [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method to execute the whole pipeline dedicated to ArUco markers detection, scene pose estimation and 3D AOI projection. +!!! warning "Mandatory" + + [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method must be called from a *try* block to catch pipeline exceptions. + ```python # Assuming that Full HD (1920x1080) timestamped images are available ...: - # Detect ArUco markers, estimate scene pose then, project 3D AOI into camera frame - aruco_camera.watch(timestamp, image) + try: + + # Detect ArUco markers, estimate scene pose then, project 3D AOI into camera frame + aruco_camera.watch(timestamp, image) + + # Do something with pipeline exception + except Exception as e: + + ... # Display ArUcoCamera frame image to display detected ArUco markers, scene pose, 2D AOI projection and ArFrame visualisation. ... aruco_camera.image() @@ -114,12 +125,23 @@ As mentioned above, [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCame Particularly, timestamped gaze positions can be passed one by one to [ArUcoCamera.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method to execute the whole pipeline dedicated to gaze analysis. +!!! warning "Mandatory" + + [ArUcoCamera.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method must be called from a *try* block to catch pipeline exceptions. + ```python # Assuming that timestamped gaze positions are available ... - # Look ArUcoCamera frame at a timestamped gaze position - aruco_camera.look(timestamp, gaze_position) + try: + + # Look ArUcoCamera frame at a timestamped gaze position + aruco_camera.look(timestamp, gaze_position) + + # Do something with pipeline exception + except Exception as e: + + ... ``` !!! note "" diff --git a/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md b/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md index 837c8ff..9e65c08 100644 --- a/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md +++ b/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md @@ -71,54 +71,53 @@ Calling [ArFrame.look](../../../argaze.md/#argaze.ArFeatures.ArFrame.look) metho # Assuming that timestamped gaze positions are available ... - # Look ArFrame at a timestamped gaze position - execution_time, exception = ar_frame.look(timestamp, gaze_position).values() + try: - # Do something with calibrated gaze position - ... ar_frame.gaze_position + # Look ArFrame at a timestamped gaze position + ar_frame.look(timestamp, gaze_position).values() - # Check if a gaze movement has been identified - if ar_frame.gaze_movement.valid and ar_frame.gaze_movement.finished: + # Do something with calibrated gaze position + ... ar_frame.gaze_position - # Do something with identified fixation - if GazeFeatures.is_fixation(ar_frame.gaze_movement): - ... - - # Do something with identified saccade - elif GazeFeatures.is_saccade(ar_frame.gaze_movement): - ... + # Check if a gaze movement has been identified + if ar_frame.gaze_movement.valid and ar_frame.gaze_movement.finished: - # Check if new scan path analysis are available - if ar_frame.new_analysis_available: + # Do something with identified fixation + if GazeFeatures.is_fixation(ar_frame.gaze_movement): + ... - # Access to each scan path analyzer - for analyzer_name, analyzer in ar_frame.scan_path_analyzers.items(): + # Do something with identified saccade + elif GazeFeatures.is_saccade(ar_frame.gaze_movement): + ... - # Do something with analysis results - ... analyzer.analysis + # Check if new scan path analysis are available + if ar_frame.new_analysis_available: - # Iterate over each ArFrame layers - for name, ar_layer in ar_frame.layers.items(): - - # Check if new aoi scan path analysis are available - if ar_layer.new_analysis_available: - - # Access to each aoi scan path analyzer - for analyzer_name, analyzer in ar_layer.aoi_scan_path_analyzers.items(): + # Access to each scan path analyzer + for analyzer_name, analyzer in ar_frame.scan_path_analyzers.items(): # Do something with analysis results ... analyzer.analysis -``` -Let's understand the meaning of each data. + # Iterate over each ArFrame layers + for name, ar_layer in ar_frame.layers.items(): + + # Check if new aoi scan path analysis are available + if ar_layer.new_analysis_available: -### *execution_times* + # Access to each aoi scan path analyzer + for analyzer_name, analyzer in ar_layer.aoi_scan_path_analyzers.items(): -A dictionary with each pipeline step execution time. + # Do something with analysis results + ... analyzer.analysis -### *exception* + # Do something with pipeline exception + except Exception as e: + + ... +``` -A [python Exception](https://docs.python.org/3/tutorial/errors.html#exceptions) object raised during pipeline execution. +Let's understand the meaning of each data. ### *ar_frame.gaze_position* @@ -139,7 +138,7 @@ This flag allows to now when new scan path and aoi scan path analysis are availa ### *analyzer.analysis* -A dict containing all data produced by an analyzer. +A dictionary containing all data produced by an analyzer. ## Setup ArFrame image parameters diff --git a/docs/user_guide/gaze_analysis_pipeline/configuration_and_execution.md b/docs/user_guide/gaze_analysis_pipeline/configuration_and_execution.md index 0edbef3..c53cfda 100644 --- a/docs/user_guide/gaze_analysis_pipeline/configuration_and_execution.md +++ b/docs/user_guide/gaze_analysis_pipeline/configuration_and_execution.md @@ -94,14 +94,25 @@ In the example file, the choosen analysis algorithms are the [Basic](../../argaz ## Pipeline execution -Timestamped gaze positions have to be passed one by one to [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method to execute the whole instantiated pipeline. +Timestamped gaze positions have to be passed one by one to [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method to execute the whole instantiated pipeline. + +!!! warning "Mandatory" + + [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method must be called from a *try* block to catch pipeline exceptions. ```python # Assuming that timestamped gaze positions are available ... - # Look ArFrame at a timestamped gaze position - ar_frame.look(timestamp, gaze_position) + try: + + # Look ArFrame at a timestamped gaze position + ar_frame.look(timestamp, gaze_position) + + # Do something with pipeline exception + except Exception as e: + + ... ``` !!! note "" diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py index f023d08..cb9658e 100644 --- a/src/argaze/ArFeatures.py +++ b/src/argaze/ArFeatures.py @@ -94,7 +94,7 @@ DEFAULT_ARLAYER_DRAW_PARAMETERS = { } @dataclass -class ArLayer(DataFeatures.SharedObject): +class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): """ Defines a space where to make matching of gaze movements and AOI and inside which those matchings need to be analyzed. @@ -367,7 +367,7 @@ class ArLayer(DataFeatures.SharedObject): return self.__new_analysis_available - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def look(self, timestamp: int|float, gaze_movement: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()) -> dict: """ Project timestamped gaze movement into layer. @@ -398,7 +398,7 @@ class ArLayer(DataFeatures.SharedObject): # Update looked aoi thanks to aoi matcher # Note: don't filter valid/unvalid and finished/unfinished fixation/saccade as we don't know how the aoi matcher works internally - self.__looked_aoi_name, _ , match_time, match_exception = self.aoi_matcher.match(self.aoi_scene, gaze_movement) + self.__looked_aoi_name, _ = self.aoi_matcher.match(self.aoi_scene, gaze_movement) # Valid and finished gaze movement has been identified if gaze_movement.valid and gaze_movement.finished: @@ -416,7 +416,7 @@ class ArLayer(DataFeatures.SharedObject): for aoi_scan_path_analyzer_module_path, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items(): # Analyze aoi scan path - analyze_time, analyze_exception = aoi_scan_path_analyzer.analyze(self.aoi_scan_path) + aoi_scan_path_analyzer.analyze(self.aoi_scan_path) # Update new analysis available state self.__new_analysis_available = True @@ -490,7 +490,7 @@ DEFAULT_ARFRAME_IMAGE_PARAMETERS = { } @dataclass -class ArFrame(DataFeatures.SharedObject): +class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): """ Defines a rectangular area where to project in timestamped gaze positions and inside which they need to be analyzed. @@ -822,7 +822,7 @@ class ArFrame(DataFeatures.SharedObject): return self.__new_analysis_available - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()): """ Project gaze position into frame. @@ -861,7 +861,7 @@ class ArFrame(DataFeatures.SharedObject): if self.gaze_movement_identifier is not None: # Identify finished gaze movement - self.__identified_gaze_movement, identify_time, identify_exception = self.gaze_movement_identifier.identify(timestamp, self.__calibrated_gaze_position) + self.__identified_gaze_movement = self.gaze_movement_identifier.identify(timestamp, self.__calibrated_gaze_position) # Valid and finished gaze movement has been identified if self.__identified_gaze_movement.valid and self.__identified_gaze_movement.finished: @@ -886,7 +886,7 @@ class ArFrame(DataFeatures.SharedObject): for scan_path_analyzer_module_path, scan_path_analyzer in self.scan_path_analyzers.items(): # Analyze aoi scan path - analyze_time, analyze_exception = scan_path_analyzer.analyze(self.scan_path) + scan_path_analyzer.analyze(self.scan_path) # Update new analysis available state self.__new_analysis_available = True @@ -903,13 +903,13 @@ class ArFrame(DataFeatures.SharedObject): scale = numpy.array([self.heatmap.size[0] / self.size[0], self.heatmap.size[1] / self.size[1]]) # Update heatmap image - update_time, update_exception = self.heatmap.update(self.__calibrated_gaze_position.value * scale) + self.heatmap.update(self.__calibrated_gaze_position.value * scale) # Look layers with valid identified gaze movement # Note: don't filter valid/unvalid finished/unfished gaze movement to allow layers to reset internally for layer_name, layer in self.layers.items(): - look_time, look_exception = layer.look(timestamp, self.__identified_gaze_movement) + layer.look(timestamp, self.__identified_gaze_movement) # Log look data for logger_name, logger in self.loggers.items(): @@ -1370,7 +1370,7 @@ class ArCamera(ArFrame): yield scene_frame - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def watch(self, timestamp: int|float, image: numpy.array): """Detect AR features from image and project scenes into camera frame. @@ -1393,7 +1393,10 @@ class ArCamera(ArFrame): """ # Project gaze position into camera frame - yield self, super().look(timestamp, gaze_position) + super().look(timestamp, gaze_position) + + # yield camera frame to process its results + yield self # Lock camera frame exploitation self.acquire() @@ -1415,8 +1418,11 @@ class ArCamera(ArFrame): # QUESTION: How to project gaze precision? inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y)) + + scene_frame.look(timestamp, inner_gaze_position * scene_frame.size) - yield scene_frame, scene_frame.look(timestamp, inner_gaze_position * scene_frame.size) + # yield scene frame to process its results + yield scene_frame # Ignore missing aoi in camera frame layer projection except KeyError: diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py index 86802a5..52979cc 100644 --- a/src/argaze/ArUcoMarkers/ArUcoCamera.py +++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py @@ -142,15 +142,12 @@ class ArUcoCamera(ArFeatures.ArCamera): return ArUcoCamera.from_dict(aruco_camera_data, working_directory) - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, float, dict]: """Detect environment aruco markers from image and project scenes into camera frame. !!! note This method timestamps camera frame and its layers. - - Returns: - detection time: aruco marker detection time in ms. """ # Use try block to always release the camera frame lock in finally block @@ -160,7 +157,7 @@ class ArUcoCamera(ArFeatures.ArCamera): self.acquire() # Detect aruco markers - detection_time = self.aruco_detector.detect_markers(image) + self.aruco_detector.detect_markers(image) # Fill camera frame background with image self.background = image @@ -212,9 +209,6 @@ class ArUcoCamera(ArFeatures.ArCamera): # Timestamp camera frame self.timestamp = timestamp - # Return detection time - return detection_time - def __image(self, draw_detected_markers: dict = None, draw_scenes: dict = None, draw_optic_parameters_grid: dict = None, **kwargs: dict) -> numpy.array: """Get frame image with ArUco detection visualisation. diff --git a/src/argaze/ArUcoMarkers/ArUcoDetector.py b/src/argaze/ArUcoMarkers/ArUcoDetector.py index e6a305f..6d12f3d 100644 --- a/src/argaze/ArUcoMarkers/ArUcoDetector.py +++ b/src/argaze/ArUcoMarkers/ArUcoDetector.py @@ -14,6 +14,7 @@ import os from collections import Counter import time +from argaze import DataFeatures from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoMarker, ArUcoOpticCalibrator import numpy @@ -131,7 +132,7 @@ class DetectorParameters(): return self.__parameters @dataclass -class ArUcoDetector(): +class ArUcoDetector(DataFeatures.PipelineStepObject): """ArUco markers detector. Parameters: @@ -255,6 +256,7 @@ class ArUcoDetector(): return output + @DataFeatures.PipelineStepMethod def detect_markers(self, image: numpy.array) -> float: """Detect all ArUco markers into an image. diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py index e20717c..6457e8f 100644 --- a/src/argaze/AreaOfInterest/AOIFeatures.py +++ b/src/argaze/AreaOfInterest/AOIFeatures.py @@ -554,7 +554,7 @@ HeatmapType = TypeVar('Heatmap', bound="Heatmap") # Type definition for type annotation convenience @dataclass -class Heatmap(): +class Heatmap(DataFeatures.PipelineStepObject): """Define image to draw heatmap.""" size: tuple = field(default=(1, 1)) @@ -599,7 +599,7 @@ class Heatmap(): self.__point_spread_buffer = [] self.__point_spread_buffer_size = self.buffer - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def update(self, point: tuple): """Update heatmap image.""" diff --git a/src/argaze/DataFeatures.py b/src/argaze/DataFeatures.py index 2636aaa..fec9f6d 100644 --- a/src/argaze/DataFeatures.py +++ b/src/argaze/DataFeatures.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -"""Timestamped data features.""" +"""Miscellaneous data features.""" __author__ = "Théo de la Hogue" __credits__ = [] @@ -104,76 +104,6 @@ class JsonEncoder(json.JSONEncoder): return public_dict -class SharedObject(): - """Enable multiple threads sharing.""" - - def __init__(self): - self._lock = threading.Lock() - self._timestamp = math.nan - self._token = None - - def acquire(self): - self._lock.acquire() - - def release(self): - self._lock.release() - - def locked(self) -> bool: - return self._lock.locked() - - @property - def timestamp(self) -> int|float: - """Get timestamp""" - - self._lock.acquire() - timestamp = self._timestamp - self._lock.release() - - return timestamp - - @timestamp.setter - def timestamp(self, timestamp: int|float): - """Set timestamp""" - - self._lock.acquire() - self._timestamp = timestamp - self._lock.release() - - def untimestamp(self): - """Reset timestamp""" - - self._lock.acquire() - self._timestamp = math.nan - self._lock.release() - - @property - def timestamped(self) -> bool: - """Is the object timestamped?""" - - self._lock.acquire() - timestamped = not math.isnan(self._timestamp) - self._lock.release() - - return timestamped - - @property - def token(self) -> any: - """Get token""" - - self._lock.acquire() - token = self._token - self._lock.release() - - return token - - @token.setter - def token(self, token: any): - """Set token""" - - self._lock.acquire() - self._token = token - self._lock.release() - class TimeStampedBuffer(collections.OrderedDict): """Ordered dictionary to handle timestamped data. ``` @@ -429,50 +359,86 @@ class DataDictionary(dict): __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ -def PipelineStep(method): - """Define a decorator to declare a method as a pipeline step.""" +class SharedObject(): + """Abstract class to enable multiple threads sharing and timestamp management.""" - def handler(*args, **kw) -> Tuple[Any, float, Exception]: - """Handle pipeline step - - Returns: - method_returns: what the handled method returns - execution_time: measure of method time execution in millisecond. - exception: any error catched during method execution. - """ + def __init__(self): + self._lock = threading.Lock() + self._timestamp = math.nan + self._execution_times = {} + self._exceptions = {} - # Initialize execution time assessment - start = time.perf_counter() - - try: + def acquire(self): + self._lock.acquire() - result = method(*args, **kw) - exception = None + def release(self): + self._lock.release() - except Exception as e: + def locked(self) -> bool: + return self._lock.locked() - result = None - exception = e - - # Measure execution time - execution_time = (time.perf_counter() - start) * 1e3 + @property + def timestamp(self) -> int|float: + """Get timestamp""" - # Edit result tuple - if type(result) is tuple: + self._lock.acquire() + timestamp = self._timestamp + self._lock.release() - result = result + (execution_time, exception) + return timestamp - elif result is not None: + @timestamp.setter + def timestamp(self, timestamp: int|float): + """Set timestamp""" - result = result, execution_time, exception + self._lock.acquire() + self._timestamp = timestamp + self._lock.release() - else: + def untimestamp(self): + """Reset timestamp""" + + self._lock.acquire() + self._timestamp = math.nan + self._lock.release() + + @property + def timestamped(self) -> bool: + """Is the object timestamped?""" + + self._lock.acquire() + timestamped = not math.isnan(self._timestamp) + self._lock.release() + + return timestamped + +class PipelineStepObject(): + """Abstract class to assess pipeline step methods execution time.""" + + execution_times: dict = {} + """Execution time for each mehtod in ms.""" + +def PipelineStepMethod(method): + """Define a decorator use into PipelineStepObject class to declare pipeline method.""" + + def wrapper(self, *args, **kw): + """Wrap pipeline step method to measure execution time.""" + + # Initialize execution time assessment + start = time.perf_counter() + + try: + + result = method(self, *args, **kw) + + finally: - result = execution_time, exception + # Measure execution time + self.execution_times[method.__name__] = (time.perf_counter() - start) * 1e3 return result - return handler + return wrapper # Import libraries that can be used in selector or formatter codes from argaze import GazeFeatures diff --git a/src/argaze/GazeAnalysis/Basic.py b/src/argaze/GazeAnalysis/Basic.py index 455ca2e..b75932a 100644 --- a/src/argaze/GazeAnalysis/Basic.py +++ b/src/argaze/GazeAnalysis/Basic.py @@ -26,7 +26,7 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer): self.__steps_number = 0 self.__step_fixation_durations_average = 0 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, scan_path: GazeFeatures.ScanPathType): self.__path_duration = scan_path.duration @@ -71,7 +71,7 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer): self.__steps_number = 0 self.__step_fixation_durations_average = 0 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: GazeFeatures.ScanPathType): self.__path_duration = aoi_scan_path.duration diff --git a/src/argaze/GazeAnalysis/DeviationCircleCoverage.py b/src/argaze/GazeAnalysis/DeviationCircleCoverage.py index c86ebed..f890701 100644 --- a/src/argaze/GazeAnalysis/DeviationCircleCoverage.py +++ b/src/argaze/GazeAnalysis/DeviationCircleCoverage.py @@ -37,7 +37,7 @@ class AOIMatcher(GazeFeatures.AOIMatcher): self.__matched_gaze_movement = None self.__matched_region = None - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def match(self, aoi_scene, gaze_movement) -> Tuple[str, AOIFeatures.AreaOfInterest]: """Returns AOI with the maximal fixation's deviation circle coverage if above coverage threshold.""" diff --git a/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py b/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py index f3ee608..011c272 100644 --- a/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py +++ b/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py @@ -142,7 +142,7 @@ class GazeMovementIdentifier(GazeFeatures.GazeMovementIdentifier): self.__fixation_positions = GazeFeatures.TimeStampedGazePositions() self.__saccade_positions = GazeFeatures.TimeStampedGazePositions() - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def identify(self, ts, gaze_position, terminate=False) -> GazeMovementType: # Ignore non valid gaze position diff --git a/src/argaze/GazeAnalysis/Entropy.py b/src/argaze/GazeAnalysis/Entropy.py index a391092..a62dfe6 100644 --- a/src/argaze/GazeAnalysis/Entropy.py +++ b/src/argaze/GazeAnalysis/Entropy.py @@ -37,7 +37,7 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer): self.__stationary_entropy = -1 self.__transition_entropy = -1 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType): assert(len(aoi_scan_path) > 1) diff --git a/src/argaze/GazeAnalysis/ExploreExploitRatio.py b/src/argaze/GazeAnalysis/ExploreExploitRatio.py index d4c0b6c..5516349 100644 --- a/src/argaze/GazeAnalysis/ExploreExploitRatio.py +++ b/src/argaze/GazeAnalysis/ExploreExploitRatio.py @@ -33,7 +33,7 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer): self.__explore_exploit_ratio = 0. - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, scan_path: GazeFeatures.ScanPathType): assert(len(scan_path) > 1) diff --git a/src/argaze/GazeAnalysis/FocusPointInside.py b/src/argaze/GazeAnalysis/FocusPointInside.py index 62ce054..d559ac2 100644 --- a/src/argaze/GazeAnalysis/FocusPointInside.py +++ b/src/argaze/GazeAnalysis/FocusPointInside.py @@ -30,7 +30,7 @@ class AOIMatcher(GazeFeatures.AOIMatcher): self.__looked_aoi_data = (None, None) self.__matched_gaze_movement = None - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def match(self, aoi_scene, gaze_movement) -> Tuple[str, AOIFeatures.AreaOfInterest]: """Returns AOI containing fixation focus point.""" diff --git a/src/argaze/GazeAnalysis/KCoefficient.py b/src/argaze/GazeAnalysis/KCoefficient.py index c6dfa15..41338a3 100644 --- a/src/argaze/GazeAnalysis/KCoefficient.py +++ b/src/argaze/GazeAnalysis/KCoefficient.py @@ -30,7 +30,7 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer): self.__K = 0 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, scan_path: GazeFeatures.ScanPathType): assert(len(scan_path) > 1) @@ -87,7 +87,7 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer): self.__K = 0 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType) -> float: assert(len(aoi_scan_path) > 1) diff --git a/src/argaze/GazeAnalysis/LempelZivComplexity.py b/src/argaze/GazeAnalysis/LempelZivComplexity.py index 67e5001..f6a49ab 100644 --- a/src/argaze/GazeAnalysis/LempelZivComplexity.py +++ b/src/argaze/GazeAnalysis/LempelZivComplexity.py @@ -31,7 +31,7 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer): self.__lempel_ziv_complexity = 0 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType): assert(len(aoi_scan_path) > 1) diff --git a/src/argaze/GazeAnalysis/NGram.py b/src/argaze/GazeAnalysis/NGram.py index b1e5ab3..2526123 100644 --- a/src/argaze/GazeAnalysis/NGram.py +++ b/src/argaze/GazeAnalysis/NGram.py @@ -35,7 +35,7 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer): self.__ngrams_count = {} - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType): assert(len(aoi_scan_path) > 1) diff --git a/src/argaze/GazeAnalysis/NearestNeighborIndex.py b/src/argaze/GazeAnalysis/NearestNeighborIndex.py index 1dc692e..72df516 100644 --- a/src/argaze/GazeAnalysis/NearestNeighborIndex.py +++ b/src/argaze/GazeAnalysis/NearestNeighborIndex.py @@ -35,7 +35,7 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer): self.__nearest_neighbor_index = 0 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, scan_path: GazeFeatures.ScanPathType): assert(len(scan_path) > 1) diff --git a/src/argaze/GazeAnalysis/TransitionMatrix.py b/src/argaze/GazeAnalysis/TransitionMatrix.py index 313c945..d001947 100644 --- a/src/argaze/GazeAnalysis/TransitionMatrix.py +++ b/src/argaze/GazeAnalysis/TransitionMatrix.py @@ -33,7 +33,7 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer): self.__transition_matrix_probabilities = pandas.DataFrame() self.__transition_matrix_density = 0. - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType): assert(len(aoi_scan_path) > 1) diff --git a/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py b/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py index 1e486e1..2c3ecd1 100644 --- a/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py +++ b/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py @@ -142,7 +142,7 @@ class GazeMovementIdentifier(GazeFeatures.GazeMovementIdentifier): self.__fixation_positions = GazeFeatures.TimeStampedGazePositions() self.__saccade_positions = GazeFeatures.TimeStampedGazePositions() - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def identify(self, ts, gaze_position, terminate=False) -> GazeMovementType: # Ignore non valid gaze position diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py index c9269e0..d70cdc6 100644 --- a/src/argaze/GazeFeatures.py +++ b/src/argaze/GazeFeatures.py @@ -520,10 +520,10 @@ class TimeStampedGazeStatus(DataFeatures.TimeStampedBuffer): super().__setitem__(key, value) -class GazeMovementIdentifier(): +class GazeMovementIdentifier(DataFeatures.PipelineStepObject): """Abstract class to define what should provide a gaze movement identifier.""" - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def identify(self, timestamp: int|float, gaze_position: GazePosition, terminate:bool=False) -> Tuple[GazeMovementType, GazeMovementType]: """Identify gaze movement from successive timestamped gaze positions. Each identified gaze movement should share its first/last gaze position with previous/next gaze movement. @@ -816,7 +816,7 @@ class ScanPath(list): step.last_saccade.draw(image, **draw_saccades) -class ScanPathAnalyzer(): +class ScanPathAnalyzer(DataFeatures.PipelineStepObject): """Abstract class to define what should provide a scan path analyzer.""" def __init__(self): @@ -836,14 +836,14 @@ class ScanPathAnalyzer(): return DataFeatures.DataDictionary(analysis) - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, scan_path: ScanPathType): """Analyze scan path.""" raise NotImplementedError('analyze() method not implemented') @dataclass -class AOIMatcher(): +class AOIMatcher(DataFeatures.PipelineStepObject): """Abstract class to define what should provide an AOI matcher algorithm.""" exclude: list[str] = field(default_factory = list) @@ -1154,7 +1154,7 @@ class AOIScanPath(list): return scan_fixations_count, aoi_fixations_count -class AOIScanPathAnalyzer(): +class AOIScanPathAnalyzer(DataFeatures.PipelineStepObject): """Abstract class to define what should provide a aoi scan path analyzer.""" def __init__(self): @@ -1174,7 +1174,7 @@ class AOIScanPathAnalyzer(): return DataFeatures.DataDictionary(analysis) - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: AOIScanPathType): """Analyze aoi scan path.""" diff --git a/src/argaze/PupillAnalysis/WorkloadIndex.py b/src/argaze/PupillAnalysis/WorkloadIndex.py index 1429eaf..99f143b 100644 --- a/src/argaze/PupillAnalysis/WorkloadIndex.py +++ b/src/argaze/PupillAnalysis/WorkloadIndex.py @@ -33,7 +33,7 @@ class PupillDiameterAnalyzer(PupillFeatures.PupillDiameterAnalyzer): self.__variations_number = 0 self.__last_ts = 0 - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, ts, pupill_diameter) -> float: """Analyze workload index from successive timestamped pupill diameters.""" diff --git a/src/argaze/PupillFeatures.py b/src/argaze/PupillFeatures.py index 5eb70ce..8aa7827 100644 --- a/src/argaze/PupillFeatures.py +++ b/src/argaze/PupillFeatures.py @@ -79,10 +79,10 @@ class TimeStampedPupillDiameters(DataFeatures.TimeStampedBuffer): TimeStampedBufferType = TypeVar('TimeStampedBuffer', bound="TimeStampedBuffer") # Type definition for type annotation convenience -class PupillDiameterAnalyzer(): +class PupillDiameterAnalyzer(DataFeatures.PipelineStepObject): """Abstract class to define what should provide a pupill diameter analyser.""" - @DataFeatures.PipelineStep + @DataFeatures.PipelineStepMethod def analyze(self, ts, pupill_diameter) -> float: """Analyze pupill diameter from successive timestamped pupill diameters.""" diff --git a/src/argaze/utils/demo_aruco_markers_run.py b/src/argaze/utils/demo_aruco_markers_run.py index 67e2845..a0d044c 100644 --- a/src/argaze/utils/demo_aruco_markers_run.py +++ b/src/argaze/utils/demo_aruco_markers_run.py @@ -53,7 +53,7 @@ def main(): nonlocal gaze_positions_frequency nonlocal gaze_analysis_time - + # Assess gaze analysis lap_time, nb_laps, elapsed_time = call_chrono.lap() @@ -62,21 +62,14 @@ def main(): gaze_positions_frequency = nb_laps call_chrono.restart() - gaze_analysis_time = 0 - # Edit millisecond timestamp timestamp = int((time.time() - start_time) * 1e3) # Project gaze position into camera - for frame, look_data in aruco_camera.look(timestamp, GazeFeatures.GazePosition((x, y))): - - # Unpack look data - if look_data: + aruco_camera.look(timestamp, GazeFeatures.GazePosition((x, y))): - gaze_position, gaze_movement, scan_step_analysis, layer_analysis, execution_times, exception = look_data - - # Assess gaze analysis - gaze_analysis_time += execution_times['total'] + # Assess gaze analysis + gaze_analysis_time = aruco_camera.execution_times['look'] # Attach mouse callback to window cv2.setMouseCallback(aruco_camera.name, on_mouse_event) diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py index 210e188..5f46596 100644 --- a/src/argaze/utils/demo_gaze_analysis_run.py +++ b/src/argaze/utils/demo_gaze_analysis_run.py @@ -50,7 +50,7 @@ def main(): timestamp = int((time.time() - start_time) * 1e3) # Project gaze position into frame - execution_time, exception = ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y))) + ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y))) # Attach mouse callback to window cv2.setMouseCallback(ar_frame.name, on_mouse_event) -- cgit v1.1