diff options
-rw-r--r-- | docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md | 9 | ||||
-rw-r--r-- | src/argaze/ArFeatures.py | 64 | ||||
-rw-r--r-- | src/argaze/GazeFeatures.py | 6 | ||||
-rw-r--r-- | src/argaze/utils/demo_data/demo_gaze_analysis_setup.json | 14 | ||||
-rw-r--r-- | src/argaze/utils/demo_gaze_analysis_run.py | 5 |
5 files changed, 53 insertions, 45 deletions
diff --git a/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md b/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md index b888b9e..9c20c7a 100644 --- a/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md +++ b/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md @@ -72,7 +72,7 @@ for name, ar_layer in ar_frame.layers.items(): ... # Look ArFrame at a timestamped gaze position - gaze_position, gaze_movement, scan_path_analysis, execution_times, exception, layers_look_data = ar_frame.look(timestamp, gaze_position) + gaze_position, gaze_movement, scan_path_analysis, execution_times, exception, layers = ar_frame.look(timestamp, gaze_position).values() # Check if a gaze movement has been identified if gaze_movement.valid and gaze_movement.finished: @@ -98,9 +98,12 @@ for name, ar_layer in ar_frame.layers.items(): ... # Do something with each ArLayer look data - for layer_name, layer_look_data in layers_look_data.items(): + for layer_name, layer_look_data in layers.items(): - looked_aoi_name, looked_aoi, aoi_scan_path_analysis, layer_execution_times, layer_exception = layer_look_data + gaze_movement, looked_aoi_name, looked_aoi, aoi_scan_path_analysis, layer_execution_times, layer_exception = layer_look_data.values() + + # Do something with gaze movement + ... # Do something with looked AOI name ... diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py index faa3d9a..5782512 100644 --- a/src/argaze/ArFeatures.py +++ b/src/argaze/ArFeatures.py @@ -107,7 +107,7 @@ class ArLayer(DataFeatures.SharedObject): aoi_matcher: AOI matcher object aoi_scan_path: AOI scan path object aoi_scan_path_analyzers: dictionary of AOI scan path analyzers - log: enable aoi scan path analysis logging + loggers: dictionary of timestamped data loggers draw_parameters: default parameters passed to draw method """ @@ -357,10 +357,14 @@ class ArLayer(DataFeatures.SharedObject): gaze_movement: gaze movement to project Returns: - looked_aoi_name: most likely looked aoi name - looked_aoi: most likely looked aoi shape - aoi_scan_path_analysis: aoi scan path analysis at each new scan step if aoi_scan_path is instanciated - exception: error catched during gaze movement processing + look_data: data dictionary + + !!! note "look data dictionary" + - **gaze_movement**: incoming gaze movement + - **looked_aoi_name**: most likely looked aoi name + - **looked_aoi**: most likely looked aoi shape + - **aoi_scan_path_analysis**: aoi scan path analysis at each new scan step if aoi_scan_path is instanciated + - **exception**: error catched during gaze movement processing """ # Lock layer exploitation @@ -381,7 +385,7 @@ class ArLayer(DataFeatures.SharedObject): # Assess pipeline execution times execution_times = { 'aoi_matcher': None, - 'aoi_scan_step_analyzers': {} + 'aoi_scan_path_analyzers': {} } # Catch any error @@ -422,8 +426,8 @@ class ArLayer(DataFeatures.SharedObject): # Analyze aoi scan path aoi_scan_path_analyzer.analyze(self.aoi_scan_path) - # Assess aoi scan step analysis time in ms - execution_times['aoi_scan_step_analyzers'][aoi_scan_path_analyzer_module_path] = (time.perf_counter() - aoi_scan_path_analysis_start) * 1e3 + # Assess aoi scan path analysis time in ms + execution_times['aoi_scan_path_analyzers'][aoi_scan_path_analyzer_module_path] = (time.perf_counter() - aoi_scan_path_analysis_start) * 1e3 # Store analysis aoi_scan_path_analysis[aoi_scan_path_analyzer_module_path] = aoi_scan_path_analyzer.analysis @@ -448,13 +452,14 @@ class ArLayer(DataFeatures.SharedObject): execution_times['total'] = (time.perf_counter() - look_start) * 1e3 # Edit look data dictionary - look_data = { + look_data = DataFeatures.DataDictionary({ + "gaze_movement": gaze_movement, "looked_aoi_name": looked_aoi_name, "looked_aoi": looked_aoi, "aoi_scan_path_analysis": DataFeatures.DataDictionary(aoi_scan_path_analysis), "execution_times": DataFeatures.DataDictionary(execution_times), "exception": exception - } + }) # Log look data for logger_module_path, logger in self.loggers.items(): @@ -464,8 +469,8 @@ class ArLayer(DataFeatures.SharedObject): # Unlock layer exploitation self.release() - # Return look data values - return look_data.values() + # Return look data dictionary + return look_data def draw(self, image: numpy.array, draw_aoi_scene: dict = None, draw_aoi_matching: dict = None): """ @@ -824,7 +829,7 @@ class ArFrame(DataFeatures.SharedObject): self.__parent = parent - def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()) -> Tuple[GazeFeatures.GazePosition, GazeFeatures.GazeMovement, dict, dict, dict, Exception]: + def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()) -> DataFeatures.DataDictionary: """ Project gaze position into frame. @@ -836,12 +841,15 @@ class ArFrame(DataFeatures.SharedObject): gaze_position: gaze position to project Returns: - current_gaze_position: calibrated gaze position if gaze_position_calibrator is instanciated else, given gaze position. - identified_gaze_movement: identified gaze movement from incoming consecutive timestamped gaze positions if gaze_movement_identifier is instanciated. Current gaze movement if filter_in_progress_identification is False. - scan_path_analysis: scan path analysis at each new scan step if scan_path is instanciated. - execution_times: all pipeline steps execution times. - exception: error catched during gaze position processing. - layers_look_data: dictionary with each layer's look data. + look_data: data dictionary + + !!! note "look data dictionary" + - **gaze_position**: calibrated gaze position if gaze_position_calibrator is instanciated else, given gaze position. + - **gaze_movement**: identified gaze movement from incoming consecutive timestamped gaze positions if gaze_movement_identifier is instanciated. Current gaze movement if filter_in_progress_identification is False. + - **scan_path_analysis**: scan path analysis at each new scan step if scan_path is instanciated. + - **execution_times**: all pipeline steps execution times. + - **exception**: error catched during gaze position processing. + - **layers**: data dictionary with each layer's look data. """ # Lock frame exploitation @@ -859,7 +867,7 @@ class ArFrame(DataFeatures.SharedObject): # Assess pipeline execution times execution_times = { 'gaze_movement_identifier': None, - 'scan_step_analyzers':{}, + 'scan_path_analyzers':{}, 'heatmap': None } @@ -921,8 +929,8 @@ class ArFrame(DataFeatures.SharedObject): # Analyze aoi scan path scan_path_analyzer.analyze(self.scan_path) - # Assess scan step analysis time in ms - execution_times['scan_step_analyzers'][scan_path_analyzer_module_path] = (time.perf_counter() - scan_step_analysis_start) * 1e3 + # Assess scan path analysis time in ms + execution_times['scan_path_analyzers'][scan_path_analyzer_module_path] = (time.perf_counter() - scan_step_analysis_start) * 1e3 # Store analysis scan_step_analysis[scan_path_analyzer_module_path] = scan_path_analyzer.analysis @@ -967,25 +975,25 @@ class ArFrame(DataFeatures.SharedObject): execution_times['total'] = (time.perf_counter() - look_start) * 1e3 # Edit look data dictionary - look_data = { + look_data = DataFeatures.DataDictionary({ "gaze_position": self.__gaze_position, - "identified_gaze_movement": identified_gaze_movement, + "gaze_movement": identified_gaze_movement, "scan_step_analysis": DataFeatures.DataDictionary(scan_step_analysis), "execution_times": DataFeatures.DataDictionary(execution_times), "exception": exception, - "layers_look_data": DataFeatures.DataDictionary(layers_look_data) - } + "layers": DataFeatures.DataDictionary(layers_look_data) + }) # Log look data for logger_module_path, logger in self.loggers.items(): - logger(timestamp, DataFeatures.DataDictionary(look_data)) + logger(timestamp, look_data) # Unlock frame exploitation self.release() # Return look data - return look_data.values() + return look_data def __image(self, background_weight: float = None, heatmap_weight: float = None, draw_gaze_position_calibrator: dict = None, draw_scan_path: dict = None, draw_layers: dict = None, draw_gaze_positions: dict = None, draw_fixations: dict = None, draw_saccades: dict = None) -> numpy.array: """ diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py index af9f943..a57a127 100644 --- a/src/argaze/GazeFeatures.py +++ b/src/argaze/GazeFeatures.py @@ -823,7 +823,7 @@ class ScanPathAnalyzer(): self.__properties = [name for (name, value) in getmembers(type(self), lambda v: isinstance(v, property))] @property - def analysis(self) -> dict: + def analysis(self) -> DataFeatures.DataDictionary: analysis = {} @@ -833,7 +833,7 @@ class ScanPathAnalyzer(): analysis[p] = getattr(self, p) - return analysis + return DataFeatures.DataDictionary(analysis) def analyze(self, scan_path: ScanPathType): """Analyze scan path.""" @@ -1170,7 +1170,7 @@ class AOIScanPathAnalyzer(): analysis[p] = getattr(self, p) - return analysis + return DataFeatures.DataDictionary(analysis) def analyze(self, aoi_scan_path: AOIScanPathType): """Analyze aoi scan path.""" diff --git a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json index d4447c0..a8ff8f2 100644 --- a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json +++ b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json @@ -49,10 +49,10 @@ }, "loggers": { "File" : { - "path": "_export/logs/aoi.csv", - "header": "Timestamp, AOI", - "selector": "looked_aoi_name is not None", - "formatter": "timestamp, looked_aoi_name" + "path": "_export/logs/KCoefficient.csv", + "header": "Timestamp (ms), K", + "selector": "GazeFeatures.is_fixation(gaze_movement) and gaze_movement.finished and 'argaze.GazeAnalysis.KCoefficient' in aoi_scan_path_analysis", + "formatter": "timestamp, aoi_scan_path_analysis['argaze.GazeAnalysis.KCoefficient'].K" } } } @@ -60,9 +60,9 @@ "loggers": { "File" : { "path": "_export/logs/fixations.csv", - "header": "Timestamp, Focus, Duration", - "selector": "GazeFeatures.is_fixation(identified_gaze_movement)", - "formatter": "timestamp, identified_gaze_movement.focus, identified_gaze_movement.duration" + "header": "Timestamp (ms), Focus (px), Duration (ms), AOI", + "selector": "GazeFeatures.is_fixation(gaze_movement) and gaze_movement.finished", + "formatter": "timestamp, gaze_movement.focus, gaze_movement.duration, layers.main_layer.looked_aoi_name" } }, "image_parameters": { diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py index d1c796c..5f46596 100644 --- a/src/argaze/utils/demo_gaze_analysis_run.py +++ b/src/argaze/utils/demo_gaze_analysis_run.py @@ -50,10 +50,7 @@ def main(): timestamp = int((time.time() - start_time) * 1e3) # Project gaze position into frame - look_data = ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y))) - - # Do something with look data - gaze_position, gaze_movement, scan_step_analysis, layer_analysis, execution_times, exception = look_data + ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y))) # Attach mouse callback to window cv2.setMouseCallback(ar_frame.name, on_mouse_event) |