From 17421ad328d4addb3bac1a32fb53488025f8a3f4 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Thu, 21 Mar 2024 23:44:43 +0100 Subject: Fixing gaze analysis demo. --- src/argaze/ArFeatures.py | 83 ++++++------- src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py | 9 -- src/argaze/ArUcoMarkers/ArUcoScene.py | 12 -- src/argaze/DataFeatures.py | 51 +------- src/argaze/GazeFeatures.py | 22 +--- .../utils/demo_data/demo_gaze_analysis_setup.json | 16 +-- src/argaze/utils/demo_gaze_analysis_run.py | 136 ++++++++++++++------- src/argaze/utils/worn_device_stream.py | 3 - 8 files changed, 138 insertions(+), 194 deletions(-) (limited to 'src') diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py index c5ee535..dc34992 100644 --- a/src/argaze/ArFeatures.py +++ b/src/argaze/ArFeatures.py @@ -237,35 +237,36 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): # Connect analyzers if required for analyzer in self.__aoi_scan_path_analyzers: - # DEBUG - print('ArLayer.aoi_scan_path_analyzers.setter type', type(analyzer)) - # Check scan path analyzer properties type for name, item in type(analyzer).__dict__.items(): - if isinstance(item, property): + if isinstance(item, property) and item.fset is not None: + + # Check setter annotations to get expected value type + try: - # DEBUG - print('ArLayer.aoi_scan_path_analyzers.setter', name, type(item)) + property_type = list(item.fset.__annotations__.values())[0] + + except KeyError: - ''' - for parameter_name, parameter_type in member[1].items(): + raise(ValueError(f'Missing annotations in {item.fset.__name__}: {item.fset.__annotations__}')) - # DEBUG - print('ArLayer.aoi_scan_path_analyzers.setter', parameter_name, parameter_type) + if issubclass(property_type, GazeFeatures.AOIScanPathAnalyzer): - # Check if parameter is part of a package - if len(parameter_type.__module__.split('.')) > 1: + # Search for analyzer instance to set property + found = False - # Try get existing analyzer instance to append as parameter - try: + for a in self.__aoi_scan_path_analyzers: - setattr(analyzer, parameter_name, self.__aoi_scan_path_analyzers[parameter_type.__module__]) + if type(a) == property_type: - except KeyError: + setattr(analyzer, name, a) + found = True + + if not found: + + raise LoadingFailed(f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.') - raise LoadingFailed(f'{module_path} aoi scan path analyzer loading fails because {parameter_type.__module__} scan path analyzer is missing.') - ''' # Force scan path creation if len(self.__aoi_scan_path_analyzers) > 0 and self.aoi_scan_path == None: @@ -294,7 +295,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): """Are aoi scan path analysis ready?""" return self.__aoi_scan_path_analyzed - @property def analysis(self) -> dict: """Get all aoi scan path analysis into dictionary.""" analysis = {} @@ -436,9 +436,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): def __init__(self, **kwargs): """ Initialize ArFrame.""" - # DEBUG - print('ArFrame.__init__') - # Init parent classes DataFeatures.SharedObject.__init__(self) DataFeatures.PipelineStepObject.__init__(self, **kwargs) @@ -478,9 +475,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): @DataFeatures.PipelineStepAttributeSetter def provider(self, provider: DataFeatures.PipelineInputProvider): - # DEBUG - print('ArFrame.provider.setter', provider) - self.__provider = provider # Edit parent @@ -561,32 +555,35 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): # Connect analyzers if required for analyzer in self.__scan_path_analyzers: - # DEBUG - print('ArFrame.scan_path_analyzers.setter type', type(analyzer)) + # Check scan path analyzer properties type + for name, item in type(analyzer).__dict__.items(): - # Check scan path analyzer parameters type - members = getmembers(analyzer) + if isinstance(item, property) and item.fset is not None: - for member in members: + # Check setter annotations to get expected value type + try: - if '__annotations__' in member: + property_type = list(item.fset.__annotations__.values())[0] - for parameter_name, parameter_type in member[1].items(): + except KeyError: - # DEBUG - print('ArFrame.scan_path_analyzers.setter', parameter_name, parameter_type) + raise(ValueError(f'Missing annotations in {item.fset.__name__}: {item.fset.__annotations__}')) - # Check if parameter is part of a package - if len(parameter_type.__module__.split('.')) > 1: + if issubclass(property_type, GazeFeatures.AOIScanPathAnalyzer): - # Try get existing analyzer instance to append as parameter - try: + # Search for analyzer instance to set property + found = False - setattr(analyzer, parameter_name, self.__scan_path_analyzers[parameter_type.__module__]) + for a in self.__scan_path_analyzers: - except KeyError: + if type(a) == property_type: - raise LoadingFailed(f'{module_path} scan path analyzer loading fails because {parameter_type.__module__} scan path analyzer is missing.') + setattr(analyzer, name, a) + found = True + + if not found: + + raise LoadingFailed(f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.') # Force scan path creation if len(self.__scan_path_analyzers) > 0 and self.scan_path == None: @@ -607,9 +604,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): @DataFeatures.PipelineStepAttributeSetter def background(self, background: numpy.array): - # DEBUG - print('ArFrame.background.setter', background) - # Resize image to frame size self.__background = cv2.resize(background, dsize = self.size, interpolation = cv2.INTER_CUBIC) @@ -675,7 +669,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): """Are scan path analysis ready?""" return self.__scan_path_analyzed - @property def analysis(self) -> dict: """Get all scan path analysis into dictionary.""" analysis = {} diff --git a/src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py b/src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py index c57497d..7231384 100644 --- a/src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py +++ b/src/argaze/ArUcoMarkers/ArUcoMarkersGroup.py @@ -88,9 +88,6 @@ class ArUcoMarkersGroup(DataFeatures.PipelineStepObject): def __init__(self, **kwargs): """Initialize ArUcoMarkersGroup""" - # DEBUG - print('ArUcoMarkersGroup.__init__', kwargs.keys()) - # Init parent classes super().__init__() @@ -118,9 +115,6 @@ class ArUcoMarkersGroup(DataFeatures.PipelineStepObject): @places.setter def places(self, places: dict): - # DEBUG - print('ArUcoMarkersGroup.places.setter', places) - # Normalize places data new_places = {} @@ -176,9 +170,6 @@ class ArUcoMarkersGroup(DataFeatures.PipelineStepObject): self.__places = new_places - # DEBUG - print('ArUcoMarkersGroup.places.setter *********************') - @property def identifiers(self) -> list: """List place marker identifiers belonging to the group.""" diff --git a/src/argaze/ArUcoMarkers/ArUcoScene.py b/src/argaze/ArUcoMarkers/ArUcoScene.py index 146ffc7..5000833 100644 --- a/src/argaze/ArUcoMarkers/ArUcoScene.py +++ b/src/argaze/ArUcoMarkers/ArUcoScene.py @@ -53,9 +53,6 @@ class ArUcoScene(ArFeatures.ArScene): @aruco_markers_group.setter def aruco_markers_group(self, aruco_markers_group_value: ArUcoMarkersGroup.ArUcoMarkersGroup): - # DEBUG - print('ArUcoScene.aruco_markers_group.setter', aruco_markers_group_value) - if isinstance(aruco_markers_group_value, ArUcoMarkersGroup.ArUcoMarkersGroup): new_aruco_markers_group = aruco_markers_group_value @@ -69,16 +66,10 @@ class ArUcoScene(ArFeatures.ArScene): # OBJ file format for 3D dimension only if file_format == 'obj': - # DEBUG - print('ArUcoScene.aruco_markers_group.setter OBJ', filepath) - new_aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup.from_obj(filepath) elif file_format == 'json': - # DEBUG - print('ArUcoScene.aruco_markers_group.setter JSON', filepath) - with open(filepath) as file: new_aruco_markers_group = ArUcoMarkersGroup.ArUcoMarkersGroup(**json.load(file)) @@ -91,9 +82,6 @@ class ArUcoScene(ArFeatures.ArScene): self.__aruco_markers_group.parent = self - # DEBUG - print('ArUcoScene.aruco_markers_group.setter *********************') - @classmethod def from_dict(cls, aruco_scene_data: dict, working_directory: str = None) -> ArUcoSceneType: """ diff --git a/src/argaze/DataFeatures.py b/src/argaze/DataFeatures.py index ef6ac27..415b5c2 100644 --- a/src/argaze/DataFeatures.py +++ b/src/argaze/DataFeatures.py @@ -485,9 +485,6 @@ def PipelineStepAttributeSetter(method): # Get new value type new_value_type = type(new_value) - # DEBUG - print('@PipelineStepAttributeSetter new_value_type', new_value_type) - # Check setter annotations to get expected value type try: @@ -497,9 +494,6 @@ def PipelineStepAttributeSetter(method): raise(ValueError(f'Missing annotations in {method.__name__}: {method.__annotations__}')) - # DEBUG - print('@PipelineStepAttributeSetter expected_value_type', expected_value_type) - # Define function to load dict values def load_dict(data: dict) -> any: @@ -523,9 +517,6 @@ def PipelineStepAttributeSetter(method): raise(e) - # DEBUG - print('@PipelineStepAttributeSetter new_class', new_class) - new_objects_list.append( new_class(**value) ) # Only one object have been loaded: pass the object if it is a subclass of expected type @@ -538,15 +529,9 @@ def PipelineStepAttributeSetter(method): return new_objects_list - # DEBUG - print('@PipelineStepAttributeSetter as expected', expected_value_type, data) - # Otherwise, data are parameters of the expected class return expected_value_type(**data) - # DEBUG - print('@PipelineStepAttributeSetter', method.__name__, new_value_type, expected_value_type, type(expected_value_type)) - # String not expected: load value from file if new_value_type == str and new_value_type != expected_value_type: @@ -556,35 +541,18 @@ def PipelineStepAttributeSetter(method): # Load image from JPG and PNG formats if file_format == 'jpg' or file_format == 'png': - # DEBUG - print('@PipelineStepAttributeSetter IMAGE', filepath) - return method(self, cv2.imread(filepath)) - # Load PipelineStepObject from JSON file + # Load object from JSON file elif file_format == 'json': - # DEBUG - print('@PipelineStepAttributeSetter issubclass', issubclass(expected_value_type, PipelineStepObject)) - - #if issubclass(expected_value_type, PipelineStepObject): - - # DEBUG - print('@PipelineStepAttributeSetter JSON', filepath) - with open(filepath) as file: return method(self, load_dict(json.load(file))) - # DEBUG - print('@PipelineStepAttributeSetter unknown file format', file_format) - # Always load value from dict if new_value_type == dict: - # DEBUG - print('@PipelineStepAttributeSetter dict', new_value) - return method(self, load_dict(new_value)) # Otherwise, pass new value to setter method @@ -601,9 +569,6 @@ class PipelineStepObject(): def __init__(self, **kwargs): """Initialize PipelineStepObject.""" - # DEBUG - print('PipelineStepObject.__init__') - # Init private attribute self.__name = None self.__working_directory = None @@ -621,9 +586,6 @@ class PipelineStepObject(): child.__enter__() - # DEBUG - print('PipelineStepObject.__enter__ observers', self.__observers) - # Start observers for observer in self.__observers: @@ -647,9 +609,6 @@ class PipelineStepObject(): def update_attributes(self, object_data: dict): """Update pipeline step object attributes with dictionary.""" - # DEBUG - print('PipelineStepObject.update_attributes', type(self), object_data.keys()) - for key, value in object_data.items(): setattr(self, key, value) @@ -729,9 +688,6 @@ class PipelineStepObject(): patch_filepath: path to json patch file to modify any configuration entries """ - # DEBUG - print('PipelineStepObject.from_json', cls) - # Load configuration from JSON file with open(configuration_filepath) as configuration_file: @@ -771,9 +727,6 @@ class PipelineStepObject(): object_data = update(object_data, patch_data) - # DEBUG - print('PipelineStepObject.from_json', object_data) - # Instanciate class return cls(**object_data) @@ -891,8 +844,6 @@ class PipelineStepObject(): if isinstance(attr, PipelineStepObject) and attr != self.parent: - print('-', name, type(attr)) - yield attr def PipelineStepMethod(method): diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py index a9f9c7f..e63ad46 100644 --- a/src/argaze/GazeFeatures.py +++ b/src/argaze/GazeFeatures.py @@ -847,18 +847,12 @@ class ScanPathAnalyzer(DataFeatures.PipelineStepObject): super().__init__() - self.__properties = [name for (name, value) in self.__class__.__dict__.items() if isinstance(value, property)] + self.__analysis = [name for (name, value) in self.__class__.__dict__.items() if isinstance(value, property) and value.fset is None] def analysis(self) -> DataFeatures.DataDictionary: """Get all scan path analyzer analysis as data dictionary.""" - analysis = {} - - for p in self.__properties: - - analysis[p] = getattr(self, p) - - return DataFeatures.DataDictionary(analysis) + return DataFeatures.DataDictionary( {a: getattr(self, a) for a in self.__analysis} ) @DataFeatures.PipelineStepMethod def analyze(self, scan_path: ScanPathType): @@ -1211,18 +1205,12 @@ class AOIScanPathAnalyzer(DataFeatures.PipelineStepObject): super().__init__() - self.__properties = [name for (name, value) in self.__class__.__dict__.items() if isinstance(value, property)] + self.__analysis = [name for (name, value) in self.__class__.__dict__.items() if isinstance(value, property) and value.fset is None] - def analysis(self) -> dict: + def analysis(self) -> DataFeatures.DataDictionary: """Get all aoi scan path analyzer analysis as data dictionary.""" - analysis = {} - - for p in self.__properties: - - analysis[p] = getattr(self, p) - - return DataFeatures.DataDictionary(analysis) + return DataFeatures.DataDictionary( {a: getattr(self, a) for a in self.__analysis} ) @DataFeatures.PipelineStepMethod def analyze(self, aoi_scan_path: AOIScanPathType): diff --git a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json index a83ecd3..c897fa0 100644 --- a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json +++ b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json @@ -48,19 +48,9 @@ "argaze.GazeAnalysis.Entropy.AOIScanPathAnalyzer":{} }, "observers": { - "demo_loggers.FixationLogger": { - "path": "_export/logs/fixations.csv", - "header": ["Timestamp (ms)", "Focus (px)", "Duration (ms)", "AOI"] - }, - "demo_loggers.ScanPathAnalysisLogger": { - "path": "_export/logs/scan_path_metrics.csv", - "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "NNI", "XXR"] - }, - "demo_loggers.VideoRecorder": { - "path": "_export/logs/video.mp4", - "width": 1920, - "height": 1080, - "fps": 15 + "demo_loggers.AOIScanPathAnalysisLogger": { + "path": "_export/logs/aoi_scan_path_metrics.csv", + "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "LZC"] } } } diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py index 24871f2..b5d9a20 100644 --- a/src/argaze/utils/demo_gaze_analysis_run.py +++ b/src/argaze/utils/demo_gaze_analysis_run.py @@ -95,16 +95,58 @@ def main(): cv2.putText(frame_image, path, (20, ar_frame.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) + # Display frame analysis + analysis = ar_frame.analysis() + + # Display scan path K Coefficient analysis if loaded + try: + + kc_analysis = analysis[KCoefficient.ScanPathAnalyzer] + + # Write raw Kc analysis + if kc_analysis.K < 0.: + + cv2.putText(frame_image, f'K coefficient: Ambient attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + elif kc_analysis.K > 0.: + + cv2.putText(frame_image, f'K coefficient: Focal attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display Nearest Neighbor index analysis if loaded + try: + + nni_analysis = analysis[NearestNeighborIndex.ScanPathAnalyzer] + + cv2.putText(frame_image, f'Nearest neighbor index: {nni_analysis.nearest_neighbor_index:.3f}', (20, ar_frame.size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - if ar_frame.is_analysis_available() and ar_frame.layers["demo_layer"].is_analysis_available(): + except KeyError: + pass + + # Display Explore/Exploit ratio analysis if loaded + try: + + xxr_analyser = analysis[ExploreExploitRatio.ScanPathAnalyzer] + + cv2.putText(frame_image, f'Explore/Exploit ratio: {xxr_analyser.explore_exploit_ratio:.3f}', (20, ar_frame.size[1]-360), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display demo_layer analysis + analysis = ar_frame.layers["demo_layer"].analysis() - # Display Transition matrix analysis if loaded - transition_matrix_analyzer = ar_frame.layers["demo_layer"].analysis["argaze.GazeAnalysis.TransitionMatrix"] + # Display Transition matrix analysis if loaded + try: - cv2.putText(frame_image, f'Transition matrix density: {transition_matrix_analyzer.transition_matrix_density:.2f}', (20, ar_frame.size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + transition_matrix_analysis = analysis[TransitionMatrix.AOIScanPathAnalyzer] + + cv2.putText(frame_image, f'Transition matrix density: {transition_matrix_analysis.transition_matrix_density:.2f}', (20, ar_frame.size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) # Iterate over indexes (departures) - for from_aoi, row in transition_matrix_analyzer.transition_matrix_probabilities.iterrows(): + for from_aoi, row in transition_matrix_analysis.transition_matrix_probabilities.iterrows(): # Iterate over columns (destinations) for to_aoi, probability in row.items(): @@ -122,51 +164,58 @@ def main(): cv2.line(frame_image, start_line, to_center, color, int(probability*10) + 2) cv2.line(frame_image, from_center, to_center, [55, 55, 55], 2) - # Display aoi scan path basic metrics analysis if loaded - basic_analyzer = ar_frame.layers["demo_layer"].analysis["argaze.GazeAnalysis.Basic"] + except KeyError: + pass + + # Display aoi scan path basic metrics analysis if loaded + try: + + basic_analysis = analysis[Basic.AOIScanPathAnalyzer] # Write basic analysis - cv2.putText(frame_image, f'Step number: {basic_analyzer.steps_number}', (20, ar_frame.size[1]-440), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - cv2.putText(frame_image, f'Step fixation duration average: {int(basic_analyzer.step_fixation_durations_average)} ms', (20, ar_frame.size[1]-400), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(frame_image, f'Step number: {basic_analysis.steps_number}', (20, ar_frame.size[1]-440), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(frame_image, f'Step fixation duration average: {int(basic_analysis.step_fixation_durations_average)} ms', (20, ar_frame.size[1]-400), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - # Display scan path K Coefficient analysis if loaded - kc_analyzer = ar_frame.analysis["argaze.GazeAnalysis.KCoefficient"] - - # Write raw Kc analysis - if kc_analyzer.K < 0.: + except KeyError: + pass - cv2.putText(frame_image, f'K coefficient: Ambient attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - elif kc_analyzer.K > 0.: - - cv2.putText(frame_image, f'K coefficient: Focal attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA) + # Display aoi scan path K-modified coefficient analysis if loaded + try: - # Display aoi scan path K-modified coefficient analysis if loaded - aoi_kc_analyzer = ar_frame.layers["demo_layer"].analysis["argaze.GazeAnalysis.KCoefficient"] + aoi_kc_analysis = analysis[KCoefficient.AOIScanPathAnalyzer] # Write aoi Kc analysis - if aoi_kc_analyzer.K < 0.: + if aoi_kc_analysis.K < 0.: cv2.putText(frame_image, f'K-modified coefficient: Ambient attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - elif aoi_kc_analyzer.K > 0.: + elif aoi_kc_analysis.K > 0.: cv2.putText(frame_image, f'K-modified coefficient: Focal attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA) - - # Display Lempel-Ziv complexity analysis if loaded - lzc_analyzer = ar_frame.layers["demo_layer"].analysis["argaze.GazeAnalysis.LempelZivComplexity"] + + except KeyError: + pass + + # Display Lempel-Ziv complexity analysis if loaded + try: + + lzc_analysis = analysis[LempelZivComplexity.AOIScanPathAnalyzer] - cv2.putText(frame_image, f'Lempel-Ziv complexity: {lzc_analyzer.lempel_ziv_complexity}', (20, ar_frame.size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(frame_image, f'Lempel-Ziv complexity: {lzc_analysis.lempel_ziv_complexity}', (20, ar_frame.size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + except KeyError: + pass - # Display N-Gram analysis if loaded - ngram_analyzer = ar_frame.layers["demo_layer"].analysis["argaze.GazeAnalysis.NGram"] + # Display N-Gram analysis if loaded + try: + + ngram_analysis = analysis[NGram.AOIScanPathAnalyzer] # Display only 3-gram analysis - start = ar_frame.size[1] - ((len(ngram_analyzer.ngrams_count[3]) + 1) * 40) - cv2.putText(frame_image, f'{ngram_analyzer.n_max}-Gram:', (ar_frame.size[0]-700, start-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + start = ar_frame.size[1] - ((len(ngram_analysis.ngrams_count[3]) + 1) * 40) + cv2.putText(frame_image, f'{ngram_analysis.n_max}-Gram:', (ar_frame.size[0]-700, start-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - for i, (ngram, count) in enumerate(ngram_analyzer.ngrams_count[3].items()): + for i, (ngram, count) in enumerate(ngram_analysis.ngrams_count[3].items()): ngram_string = f'{ngram[0]}' for g in range(1, 3): @@ -174,22 +223,19 @@ def main(): cv2.putText(frame_image, f'{ngram_string}: {count}', (ar_frame.size[0]-700, start+(i*40)), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - # Display Entropy analysis if loaded - entropy_analyzer = ar_frame.layers["demo_layer"].analysis["argaze.GazeAnalysis.Entropy"] + except KeyError: + pass - cv2.putText(frame_image, f'Stationary entropy: {entropy_analyzer.stationary_entropy:.3f},', (20, ar_frame.size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - cv2.putText(frame_image, f'Transition entropy: {entropy_analyzer.transition_entropy:.3f},', (20, ar_frame.size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + # Display Entropy analysis if loaded + try: - # Display Nearest Neighbor index analysis if loaded - nni_analyzer = ar_frame.analysis["argaze.GazeAnalysis.NearestNeighborIndex"] - - cv2.putText(frame_image, f'Nearest neighbor index: {nni_analyzer.nearest_neighbor_index:.3f}', (20, ar_frame.size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - # Display Explore/Exploit ratio analysis if loaded - xxr_analyser = ar_frame.analysis["argaze.GazeAnalysis.ExploreExploitRatio"] - - cv2.putText(frame_image, f'Explore/Exploit ratio: {xxr_analyser.explore_exploit_ratio:.3f}', (20, ar_frame.size[1]-360), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + entropy_analysis = analysis[Entropy.AOIScanPathAnalyzer] + cv2.putText(frame_image, f'Stationary entropy: {entropy_analysis.stationary_entropy:.3f},', (20, ar_frame.size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(frame_image, f'Transition entropy: {entropy_analysis.transition_entropy:.3f},', (20, ar_frame.size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + pass # Display frame image cv2.imshow(ar_frame.name, frame_image) diff --git a/src/argaze/utils/worn_device_stream.py b/src/argaze/utils/worn_device_stream.py index faa2543..3925bbe 100644 --- a/src/argaze/utils/worn_device_stream.py +++ b/src/argaze/utils/worn_device_stream.py @@ -42,9 +42,6 @@ def main(): print(aruco_camera) - # DEBUG - print(dir(aruco_camera)) - # Gaze position processing def gaze_position_callback(timestamped_gaze_position: GazeFeatures.GazePosition): -- cgit v1.1