From f29d0ce4a84b8f13299fbcfadf09702e2f7a3152 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Tue, 4 Jul 2023 14:03:51 +0200 Subject: Refactoring look and draw methods into ArFeatures. --- src/argaze/ArFeatures.py | 166 +++++++++++++++++++---------------------------- 1 file changed, 67 insertions(+), 99 deletions(-) diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py index 62ce4d8..f983a69 100644 --- a/src/argaze/ArFeatures.py +++ b/src/argaze/ArFeatures.py @@ -374,7 +374,7 @@ class ArEnvironment(): # For each aoi scene projection for scene_name, scene in self.scenes.items(): - scene.look(timestamp, gaze_position) + yield scene_name, scene.look(timestamp, gaze_position) def to_json(self, json_filepath): """Save environment to .json file.""" @@ -446,6 +446,12 @@ class ArFrame(): # Init current gaze position self.__gaze_position = GazeFeatures.UnvalidGazePosition() + # Init current gaze movement + self.__gaze_movement = GazeFeatures.UnvalidGazeMovement() + + # Init current look at aoi + self.__look_at = self.name + # Init heatmap if required if self.heatmap: @@ -478,78 +484,25 @@ class ArFrame(): ) @property - def current_gaze_position(self): - """Get current gaze position on frame.""" - - # Wait for frame to be unlocked - while self.__looking_lock.locked(): - pass - - return self.__gaze_position - - @property - def current_gaze_movement(self): - """Get current gaze movement on frame.""" - - # Wait for frame to be unlocked - while self.__looking_lock.locked(): - pass - - # Check current frame fixation - current_fixation = self.gaze_movement_identifier.current_fixation - - if current_fixation.valid: - - return current_fixation - - # Check current frame saccade - current_saccade = self.gaze_movement_identifier.current_saccade - - if current_saccade.valid: - - return current_saccade - - return GazeFeatures.UnvalidGazeMovement() - - @property - def current_scan_path_analysis(self) -> dict: - """Get current scan path analysis.""" - - # Edit dictionary with all analysis - scan_path_analysis = {} - - if scan_path != None: - - # Wait for frame to be unlocked - while self.__looking_lock.locked(): - pass - - # For each scan path analyzer - for scan_path_analyzer_type, scan_path_analyzer in self.scan_path_analyzers.items(): - - scan_path_analysis[scan_path_analyzer_type] = scan_path_analyzer.analysis - - return scan_path_analysis - - @property - def current_aoi_scan_path_analysis(self) -> dict: - """Get current aoi scan path analysis.""" + def image(self): + """ + Get background image + heatmap image + """ - # Edit dictionary with all analysis - aoi_scan_path_analysis = {} + # Lock frame exploitation + self.__looking_lock.acquire() - if aoi_scan_path != None: + image = self.background.copy() - # Wait for frame to be unlocked - while self.__looking_lock.locked(): - pass + # Draw heatmap + if self.heatmap: - # For each aoi scan path analyzer - for aoi_scan_path_analyzer_type, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items(): + image = cv2.addWeighted(self.heatmap.image, 0.5, image, 1., 0) - aoi_scan_path_analysis[aoi_scan_path_analyzer_type] = aoi_scan_path_analyzer.analysis + # Unlock frame exploitation + self.__looking_lock.release() - return aoi_scan_path_analysis + return image def look(self, timestamp: int|float, inner_gaze_position: GazeFeatures.GazePosition) -> Tuple[GazeFeatures.GazeMovement, str, dict, dict]: """ @@ -565,12 +518,10 @@ class ArFrame(): # Lock frame exploitation self.__looking_lock.acquire() - # Update internal gaze position + # Update current gaze position self.__gaze_position = inner_gaze_position - # Prepare looking data - gaze_movement = GazeFeatures.UnvalidGazeMovement() - look_at = self.name + # Init scan path analysis report scan_step_analysis = {} aoi_scan_step_analysis = {} @@ -578,31 +529,35 @@ class ArFrame(): if self.gaze_movement_identifier: # Identify gaze movement - gaze_movement = self.gaze_movement_identifier.identify(timestamp, self.__gaze_position) + new_gaze_movement = self.gaze_movement_identifier.identify(timestamp, self.__gaze_position) - if GazeFeatures.is_fixation(gaze_movement): + if GazeFeatures.is_fixation(new_gaze_movement): + + # Update current gaze movement + self.__gaze_movement = new_gaze_movement # Does the fixation match an AOI? for name, aoi in self.aoi_2d_scene.items(): - _, _, circle_ratio = aoi.circle_intersection(gaze_movement.focus, gaze_movement.deviation_max) + _, _, circle_ratio = aoi.circle_intersection(self.__gaze_movement.focus, self.__gaze_movement.deviation_max) if circle_ratio > 0.25: if name != self.name: - look_at = name + # Update current lookt at + self.__look_at = name break # Append fixation to scan path if self.scan_path != None: - self.scan_path.append_fixation(timestamp, gaze_movement) + self.scan_path.append_fixation(timestamp, self.__gaze_movement) # Append fixation to aoi scan path if self.aoi_scan_path != None: - aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, gaze_movement, look_at) + aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, self.__gaze_movement, self.__look_at) # Analyze aoi scan path if aoi_scan_step and len(self.aoi_scan_path) > 1: @@ -613,12 +568,15 @@ class ArFrame(): aoi_scan_step_analysis[aoi_scan_path_analyzer_type] = aoi_scan_path_analyzer.analysis - elif GazeFeatures.is_saccade(gaze_movement): + elif GazeFeatures.is_saccade(new_gaze_movement): + + # Update current gaze movement + self.__gaze_movement = new_gaze_movement # Append saccade to scan path if self.scan_path != None: - scan_step = self.scan_path.append_saccade(timestamp, gaze_movement) + scan_step = self.scan_path.append_saccade(timestamp, self.__gaze_movement) # Analyze aoi scan path if scan_step and len(self.scan_path) > 1: @@ -632,7 +590,7 @@ class ArFrame(): # Append saccade to aoi scan path if self.aoi_scan_path != None: - self.aoi_scan_path.append_saccade(timestamp, gaze_movement) + self.aoi_scan_path.append_saccade(timestamp, self.__gaze_movement) # Update heatmap if self.heatmap: @@ -643,29 +601,39 @@ class ArFrame(): self.__looking_lock.release() # Return looking data - return gaze_movement, look_at, scan_step_analysis, aoi_scan_step_analysis + return self.__gaze_movement, self.__look_at, scan_step_analysis, aoi_scan_step_analysis - @property - def image(self,): + def draw(self, image:numpy.array): """ - Get frame image + Draw frame into image. + + Parameters: + image: where to draw """ - image = self.background.copy() - - self.aoi_2d_scene.draw(image, color=(255, 255, 255)) - self.current_gaze_position.draw(image, color=(255, 255, 255)) + # Lock frame exploitation + self.__looking_lock.acquire() - self.current_gaze_movement.draw(image, color=(0, 255, 255)) - self.current_gaze_movement.draw_positions(image) + # Draw aoi + self.aoi_2d_scene.draw(image, color=(0, 0, 0)) - # Check frame fixation - if GazeFeatures.is_fixation(self.current_gaze_movement): + # Draw gaze position + self.__gaze_position.draw(image, color=(255, 255, 255)) - # Draw looked AOI - self.aoi_2d_scene.draw_circlecast(image, self.current_gaze_movement.focus, self.current_gaze_movement.deviation_max, base_color=(0, 0, 0), matching_color=(255, 255, 255)) + # Draw gaze movement + if gaze_movement_identifier: - return image + self.__gaze_movement.draw(image, color=(0, 255, 255)) + self.__gaze_movement.draw_positions(image) + + # Check fixation case + if GazeFeatures.is_fixation(self.__gaze_movement): + + # Draw looked AOI + self.aoi_2d_scene.draw_circlecast(image, self.__gaze_movement.focus, self.__gaze_movement.deviation_max, base_color=(0, 0, 0), matching_color=(255, 255, 255)) + + # Unlock frame exploitation + self.__looking_lock.release() @dataclass class ArScene(): @@ -901,7 +869,7 @@ class ArScene(): self.__camera_frame_lock.acquire() # Project gaze position in camera frame - yield frame.look(timestamp, inner_gaze_position * frame.size) + yield None, self.camera_frame.look(timestamp, gaze_position) # Project gaze position into each aoi frames if possible for aoi_name, frame in self.aoi_frames.items(): @@ -919,7 +887,7 @@ class ArScene(): # QUESTION: How to project gaze precision? inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y)) - yield frame.look(timestamp, inner_gaze_position * frame.size) + yield aoi_name, frame.look(timestamp, inner_gaze_position * frame.size) # Ignore missing aoi frame projection except KeyError: @@ -931,13 +899,13 @@ class ArScene(): def draw(self, image: numpy.array): """ - Draw camera frame + Draw camera frame into image. Parameters: image: where to draw """ - self.camera_frame.aoi_2d_scene.draw(image) + self.camera_frame.draw(image) def draw_axis(self, image: numpy.array): """ -- cgit v1.1