diff options
author | Théo de la Hogue | 2023-08-30 13:49:34 +0200 |
---|---|---|
committer | Théo de la Hogue | 2023-08-30 13:49:34 +0200 |
commit | d4e458df099fe75c0812c533f85cdc1f33c8b987 (patch) | |
tree | 5cf5d5405b399c1cd5ff34d18da548385f5182d2 /src | |
parent | afbb886e5b50896b3b826ee518bb00a4278624ad (diff) | |
download | argaze-d4e458df099fe75c0812c533f85cdc1f33c8b987.zip argaze-d4e458df099fe75c0812c533f85cdc1f33c8b987.tar.gz argaze-d4e458df099fe75c0812c533f85cdc1f33c8b987.tar.bz2 argaze-d4e458df099fe75c0812c533f85cdc1f33c8b987.tar.xz |
Improving drawing features to allow fine tuning thru JSON configuration file.
Diffstat (limited to 'src')
-rw-r--r-- | src/argaze.test/AreaOfInterest/AOI2DScene.py | 8 | ||||
-rw-r--r-- | src/argaze/ArFeatures.py | 215 | ||||
-rw-r--r-- | src/argaze/GazeAnalysis/DeviationCircleCoverage.py | 78 | ||||
-rw-r--r-- | src/argaze/GazeAnalysis/DispersionThresholdIdentification.py | 31 | ||||
-rw-r--r-- | src/argaze/GazeAnalysis/FocusPointInside.py | 21 | ||||
-rw-r--r-- | src/argaze/GazeAnalysis/VelocityThresholdIdentification.py | 24 | ||||
-rw-r--r-- | src/argaze/utils/demo_environment/demo_gaze_features_setup.json | 47 | ||||
-rw-r--r-- | src/argaze/utils/demo_gaze_features_run.py | 7 |
8 files changed, 293 insertions, 138 deletions
diff --git a/src/argaze.test/AreaOfInterest/AOI2DScene.py b/src/argaze.test/AreaOfInterest/AOI2DScene.py index 55db353..4e96e98 100644 --- a/src/argaze.test/AreaOfInterest/AOI2DScene.py +++ b/src/argaze.test/AreaOfInterest/AOI2DScene.py @@ -97,7 +97,7 @@ class TestAOI2DSceneClass(unittest.TestCase): gaze_position_D = GazeFeatures.GazePosition((0.5, 1.5), precision=0.25) # Check circlecast results for gaze postion A - for name, aoi, matching_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_A.value, gaze_position_A.precision): + for name, aoi, matched_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_A.value, gaze_position_A.precision): if name == "A": @@ -112,7 +112,7 @@ class TestAOI2DSceneClass(unittest.TestCase): self.assertEqual(circle_ratio, 0.) # Check circlecast results for gaze postion B - for name, aoi, matching_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_B.value, gaze_position_B.precision): + for name, aoi, matched_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_B.value, gaze_position_B.precision): if name == "A": @@ -127,7 +127,7 @@ class TestAOI2DSceneClass(unittest.TestCase): self.assertTrue(math.isclose(circle_ratio, 1., abs_tol=1e-3)) # Check circlecast results for gaze postion C - for name, aoi, matching_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_C.value, gaze_position_C.precision): + for name, aoi, matched_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_C.value, gaze_position_C.precision): if name == "A": @@ -142,7 +142,7 @@ class TestAOI2DSceneClass(unittest.TestCase): self.assertTrue(math.isclose(circle_ratio, 1 / 4, abs_tol=1e-3)) # Check circlecast results for gaze postion D - for name, aoi, matching_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_D.value, gaze_position_D.precision): + for name, aoi, matched_region, aoi_ratio, circle_ratio in aoi_2d_scene.circlecast(gaze_position_D.value, gaze_position_D.precision): if name == "A": diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py index b187869..1d6bf84 100644 --- a/src/argaze/ArFeatures.py +++ b/src/argaze/ArFeatures.py @@ -87,7 +87,7 @@ class ArLayer(): aoi_scan_path: GazeFeatures.AOIScanPath = field(default_factory=GazeFeatures.AOIScanPath) aoi_scan_path_analyzers: dict = field(default_factory=dict) log: bool = field(default=False) - + def __post_init__(self): # Define parent attribute: it will be setup by parent later @@ -449,53 +449,79 @@ class ArLayer(): # Return look data return looked_aoi, aoi_scan_path_analysis, execution_times, exception - def draw(self, image:numpy.array) -> Exception: + def draw(self, image: numpy.array, draw_aoi_scene: dict = None, draw_aoi_matching: dict = None): """ - Draw layer into image. + Draw into image Parameters: - image: where to draw + draw_aoi_scene: AreaOfInterest.AOI2DScene.draw parameters (if None, no aoi scene is drawn) + draw_aoi_matching: AOIMatcher.draw parameters (which depends of the loaded aoi matcher module, if None, no aoi matching is drawn) """ # Lock frame exploitation self.__look_lock.acquire() - # Catch any drawing error - exception = None - - try: - - # Draw aoi - self.aoi_scene.draw(image, color=self.aoi_color) - - # Draw current gaze movement - if self.__gaze_movement.valid: - - if GazeFeatures.is_fixation(self.__gaze_movement): - - self.__gaze_movement.draw(image, color=(0, 255, 255)) - self.__gaze_movement.draw_positions(image) - - # Draw looked aoi - if self.aoi_matcher.looked_aoi != None: - - self.aoi_scene.draw_circlecast(image, self.__gaze_movement.focus, self.__gaze_movement.deviation_max, matching_aoi = [self.aoi_matcher.looked_aoi], base_color=(0, 0, 0), matching_color=(255, 255, 255)) - - elif GazeFeatures.is_saccade(self.__gaze_movement): - - self.__gaze_movement.draw(image, color=(0, 255, 255)) - self.__gaze_movement.draw_positions(image) + # Draw aoi if required + if draw_aoi_scene: + + self.aoi_scene.draw(image, **draw_aoi_scene) - except Exception as e: + # Draw aoi matching if required + if draw_aoi_matching: - # Store error to return it - exception = e + self.aoi_matcher.draw(image, **draw_aoi_matching) # Unlock frame exploitation self.__look_lock.release() - # Return drawing error - return exception +# Define default ArFrame image_paremeters values +DEFAULT_ARFRAME_IMAGE_PARAMETERS = { + "background_weight": 1., + "heatmap_weight": 0.5, + "draw_scan_path": { + "draw_fixations": { + "deviation_circle_color": (255, 255, 255), + "duration_border_color": (127, 127, 127), + "duration_factor": 1e-2 + }, + "draw_saccades": { + "line_color": (255, 255, 255) + }, + "deepness": 0 + }, + "draw_layers": { + "GrayRectangle": { + "draw_aoi_scene": { + "draw_aoi": { + "color": (255, 255, 255), + "border_size": 1 + } + }, + "draw_aoi_matching": { + "draw_matched_fixation": { + "deviation_circle_color": (255, 255, 255) + }, + "draw_matched_fixation_positions": { + "position_color": (0, 255, 255), + "line_color": (0, 0, 0) + }, + "draw_matched_region": { + "color": (0, 255, 0), + "border_size": 4 + }, + "draw_looked_aoi": { + "color": (0, 255, 0), + "border_size": 2 + }, + "looked_aoi_name_color": (255, 255, 255), + "looked_aoi_name_offset": (0, -10) + } + } + }, + "draw_gaze_position": { + "color": (0, 255, 255) + } +} @dataclass class ArFrame(): @@ -510,9 +536,10 @@ class ArFrame(): scan_path: scan path object scan_path_analyzers: dictionary of scan path analyzers heatmap: heatmap object - background: image to draw behind + background: picture to draw behind layers: dictionary of AOI layers log: enable scan path analysis logging + image_parameters: default parameters passed to image method """ name: str @@ -525,6 +552,7 @@ class ArFrame(): background: numpy.array = field(default_factory=numpy.array) layers: dict = field(default_factory=dict) log: bool = field(default=False) + image_parameters: dict = field(default_factory=DEFAULT_ARFRAME_IMAGE_PARAMETERS) def __post_init__(self): @@ -688,7 +716,7 @@ class ArFrame(): new_frame_background_value = frame_data.pop('background') new_frame_background = cv2.imread(os.path.join(working_directory, new_frame_background_value)) - new_frame_background = cv2.resize(new_frame_background, dsize=(new_frame_size[0], new_frame_size[1]), interpolation=cv2.INTER_CUBIC) + new_frame_background = cv2.resize(new_frame_background, dsize=new_frame_size, interpolation=cv2.INTER_CUBIC) except KeyError: @@ -728,6 +756,15 @@ class ArFrame(): new_frame_log = False + # Load frame image parameters + try: + + new_frame_image_parameters = frame_data.pop('image_parameters') + + except KeyError: + + new_frame_image_parameters = DEFAULT_ARFRAME_IMAGE_PARAMETERS + # Create frame return ArFrame(new_frame_name, \ new_frame_size, \ @@ -738,7 +775,8 @@ class ArFrame(): new_heatmap, \ new_frame_background, \ new_layers, \ - new_frame_log \ + new_frame_log, + new_frame_image_parameters \ ) @classmethod @@ -770,28 +808,6 @@ class ArFrame(): self.__parent = parent @property - def image(self): - """ - Get background image + heatmap image - """ - - # Lock frame exploitation - self.__look_lock.acquire() - - image = self.background.copy() - - # Draw heatmap - if self.heatmap: - - heatmap_image = cv2.resize(self.heatmap.image, dsize=(image.shape[1], image.shape[0]), interpolation=cv2.INTER_LINEAR) - image = cv2.addWeighted(heatmap_image, 0.5, image, 1., 0) - - # Unlock frame exploitation - self.__look_lock.release() - - return image - - @property def logs(self): """ Get stored logs @@ -969,40 +985,62 @@ class ArFrame(): # Return look data return identified_gaze_movement, scan_step_analysis, layer_analysis, execution_times, exception - def draw(self, image:numpy.array) -> Exception: + def image(self, background_weight: float = None, heatmap_weight: float = None, draw_scan_path: dict = None, draw_layers: dict = None, draw_gaze_position: dict = None) -> numpy.array: """ - Draw frame into image. + Get background image with overlaid visualisations. Parameters: - image: where to draw + background_weight: weight of background overlay + heatmap_weight: weight of heatmap overlay + draw_scan_path: GazeFeatures.ScanPath.draw parameters (if None, no scan path is drawn) + draw_layers: dictionary of ArLayer.draw parameters per layer (if None, no layer is drawn) + draw_gaze_position: GazeFeatures.GazePosition parameters (if None, no gaze position is drawn) """ + # If use image_parameters attribute if no parameters + if not background_weight and not heatmap_weight and not draw_scan_path and not draw_layers and not draw_gaze_position: + + return self.image(**self.image_parameters) + # Lock frame exploitation self.__look_lock.acquire() - # Catch any drawing error - exception = None + # Draw background only + if background_weight and not heatmap_weight: - try: + image = self.background.copy() - # Draw layers - for layer_name, layer in self.layers.items(): + # Draw mix background and heatmap if required + elif background_weight and heatmap_weight and self.heatmap: - exception = layer.draw(image) + background_image = self.background.copy() + heatmap_image = cv2.resize(self.heatmap.image, dsize=self.size, interpolation=cv2.INTER_LINEAR) + image = cv2.addWeighted(heatmap_image, heatmap_weight, background_image, background_weight, 0) - # Draw current gaze position - self.__gaze_position.draw(image, color=(255, 255, 255)) + # Draw heatmap only + elif not background_weight and heatmap_weight and self.heatmap: - except Exception as e: + image = cv2.resize(self.heatmap.image, dsize=self.size, interpolation=cv2.INTER_LINEAR) - # Store error to return it - exception = e + # Draw scan path if required + if draw_scan_path and self.scan_path != None: + + self.scan_path.draw(image, **draw_scan_path) + + # Draw required layers + for layer_name, draw_layer in draw_layers.items(): + + self.layers[layer_name].draw(image, **draw_layer) + + # Draw current gaze position if required + if draw_gaze_position: + + self.__gaze_position.draw(image, **draw_gaze_position) # Unlock frame exploitation self.__look_lock.release() - # Return drawing error - return exception + return image @dataclass class ArScene(): @@ -1544,7 +1582,7 @@ class ArEnvironment(): @property def image(self): - """Get camera frame image""" + """Get camera frame projections with ArUco detection visualisation.""" # Can't use camera frame when it is locked if self.__camera_frame_lock.locked(): @@ -1556,6 +1594,9 @@ class ArEnvironment(): # Get camera frame image image = self.camera_frame.image + # Draw detected markers + self.aruco_detector.draw_detected_markers(image) + # Unlock camera frame exploitation self.__camera_frame_lock.release() @@ -1736,25 +1777,3 @@ class ArEnvironment(): with open(json_filepath, 'w', encoding='utf-8') as file: json.dump(self, file, ensure_ascii=False, indent=4, cls=DataStructures.JsonEncoder) - - def draw(self, image: numpy.array) -> Exception: - """Draw ArUco detection visualisation and camera frame projections.""" - - # Draw detected markers - self.aruco_detector.draw_detected_markers(image) - - # Can't use camera frame when it is locked - if self.__camera_frame_lock.locked(): - return - - # Lock camera frame exploitation - self.__camera_frame_lock.acquire() - - # Draw camera frame - exception = self.camera_frame.draw(image) - - # Unlock camera frame exploitation - self.__camera_frame_lock.release() - - # Return camera frame drawing error - return exception diff --git a/src/argaze/GazeAnalysis/DeviationCircleCoverage.py b/src/argaze/GazeAnalysis/DeviationCircleCoverage.py index bde486d..d4fae2a 100644 --- a/src/argaze/GazeAnalysis/DeviationCircleCoverage.py +++ b/src/argaze/GazeAnalysis/DeviationCircleCoverage.py @@ -13,6 +13,7 @@ from dataclasses import dataclass, field import math from argaze import GazeFeatures +from argaze.AreaOfInterest import AOIFeatures import numpy import cv2 @@ -31,11 +32,12 @@ class AOIMatcher(GazeFeatures.AOIMatcher): """Init looked aoi data.""" self.__look_count = 0 - self.__looked_aoi = None + self.__looked_aoi_data = (None, None) self.__looked_aoi_coverage_mean = 0 self.__looked_aoi_coverage = {} + self.__matched_gaze_movement = None - def match(self, aoi_scene, gaze_movement, exclude=[]) -> str: + def match(self, aoi_scene, gaze_movement, exclude=[]) -> Tuple[str, AOIFeatures.AreaOfInterest]: """Returns AOI with the maximal fixation's deviation circle coverage if above coverage threshold.""" if GazeFeatures.is_fixation(gaze_movement): @@ -43,7 +45,7 @@ class AOIMatcher(GazeFeatures.AOIMatcher): self.__look_count += 1 max_coverage = 0. - most_likely_looked_aoi = None + most_likely_looked_aoi_data = (None, None) for name, aoi in aoi_scene.items(): @@ -51,6 +53,7 @@ class AOIMatcher(GazeFeatures.AOIMatcher): if name not in exclude and circle_ratio > 0: + # Sum circle ratio to update aoi coverage try: @@ -60,32 +63,87 @@ class AOIMatcher(GazeFeatures.AOIMatcher): self.__looked_aoi_coverage[name] = circle_ratio - # Update most likely looked aoi + # Update most likely looked aoi if self.__looked_aoi_coverage[name] > max_coverage: - most_likely_looked_aoi = name + most_likely_looked_aoi_data = (name, aoi) max_coverage = self.__looked_aoi_coverage[name] - # Update looked aoi - self.__looked_aoi = most_likely_looked_aoi + # Update looked aoi data + self.__looked_aoi_data = most_likely_looked_aoi_data # Update looked aoi coverage mean self.__looked_aoi_coverage_mean = int(100 * max_coverage / self.__look_count) / 100 + # Update matched gaze movement + self.__matched_gaze_movement = gaze_movement if max_coverage > 0. else None + # Return if self.looked_aoi_coverage_mean > self.coverage_threshold: - return self.__looked_aoi + return self.__looked_aoi_data elif GazeFeatures.is_saccade(gaze_movement): self.__post_init__() + def draw(self, image: numpy.array, draw_matched_fixation: dict = None, draw_matched_fixation_positions: dict = None, draw_matched_region: dict = None, draw_looked_aoi: dict = None, looked_aoi_name_color: tuple = None, looked_aoi_name_offset: tuple = (0, 0)): + """Draw matching into image. + + Parameters: + draw_matched_fixation: Fixation.draw parameters (which depends of the loaded gaze movement identifier module, if None, no fixation is drawn) + draw_matched_fixation_positions: GazeMovement.draw_positions parameters (if None, no fixation is drawn) + draw_matched_region: AOIFeatures.AOI.draw parameters (if None, no matched region is drawn) + draw_looked_aoi: AOIFeatures.AOI.draw parameters (if None, no looked aoi is drawn) + """ + + if self.__matched_gaze_movement: + + if GazeFeatures.is_fixation(self.__matched_gaze_movement): + + # Draw matched fixation if required + if draw_matched_fixation: + + self.__matched_gaze_movement.draw(image, **draw_matched_fixation) + + # Draw matched fixation positions if required + if draw_matched_fixation_positions: + + self.__matched_gaze_movement.draw_positions(image, **draw_matched_fixation_positions) + + # Draw matched aoi + if self.looked_aoi.all() != None: + + # BAD: we use deviation_max attribute which is an atttribute of DispersionThresholdIdentification.Fixation class + matched_region, aoi_ratio, circle_ratio = self.looked_aoi.circle_intersection(self.__matched_gaze_movement.focus, self.__matched_gaze_movement.deviation_max) + + # Draw looked aoi if required + if draw_looked_aoi: + + self.looked_aoi.draw(image, **draw_looked_aoi) + + # Draw matched region if required + if draw_matched_region: + + matched_region.draw(image, **draw_matched_region) + + # Draw looked aoi name if required + if looked_aoi_name_color: + + top_left_corner_pixel = numpy.rint(self.looked_aoi.bounding_box[0]).astype(int) + looked_aoi_name_offset + cv2.putText(image, self.looked_aoi_name, top_left_corner_pixel, cv2.FONT_HERSHEY_SIMPLEX, 1, looked_aoi_name_color, 1, cv2.LINE_AA) + + @property + def looked_aoi(self) -> AOIFeatures.AreaOfInterest: + """Get most likely looked aoi for current fixation (e.g. the aoi with the highest coverage mean value)""" + + return self.__looked_aoi_data[1] + @property - def looked_aoi(self) -> str: + def looked_aoi_name(self) -> str: """Get most likely looked aoi name for current fixation (e.g. the aoi with the highest coverage mean value)""" - return self.__looked_aoi + return self.__looked_aoi_data[0] @property def looked_aoi_coverage_mean(self) -> float: diff --git a/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py b/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py index 4a55911..bf7b862 100644 --- a/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py +++ b/src/argaze/GazeAnalysis/DispersionThresholdIdentification.py @@ -73,11 +73,24 @@ class Fixation(GazeFeatures.Fixation): return self - def draw(self, image: numpy.array, color=(127, 127, 127), border_color=(255, 255, 255)): - """Draw fixation into image.""" + def draw(self, image: numpy.array, deviation_circle_color: tuple = None, duration_border_color: tuple = None, duration_factor: float = 1.): + """Draw fixation into image. + + Parameters: + deviation_circle_color: color of circle representing fixation's deviation + duration_border_color: color of border representing fixation's duration + duration_factor: how many pixels per duration unit + """ + + # Draw deviation circle if required + if deviation_circle_color: + + cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), deviation_circle_color, -1) - cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), color, -1) - cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), border_color, int(self.duration*1e-3)) + # Draw duration border if required + if duration_border_color: + + cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), duration_border_color, int(self.duration * duration_factor)) @dataclass(frozen=True) class Saccade(GazeFeatures.Saccade): @@ -86,13 +99,17 @@ class Saccade(GazeFeatures.Saccade): def __post_init__(self): super().__post_init__() - def draw(self, image: numpy.array, color=(255, 255, 255)): - """Draw saccade into image.""" + def draw(self, image: numpy.array, line_color=(255, 255, 255)): + """Draw saccade into image. + + Parameters: + color: color of line from first position to last position + """ _, start_position = self.positions.first _, last_position = self.positions.last - cv2.line(image, (int(start_position[0]), int(start_position[1])), (int(last_position[0]), int(last_position[1])), color, 2) + cv2.line(image, (int(start_position[0]), int(start_position[1])), (int(last_position[0]), int(last_position[1])), line_color, 2) @dataclass class GazeMovementIdentifier(GazeFeatures.GazeMovementIdentifier): diff --git a/src/argaze/GazeAnalysis/FocusPointInside.py b/src/argaze/GazeAnalysis/FocusPointInside.py index ac712a7..d3073b5 100644 --- a/src/argaze/GazeAnalysis/FocusPointInside.py +++ b/src/argaze/GazeAnalysis/FocusPointInside.py @@ -13,6 +13,7 @@ from dataclasses import dataclass, field import math from argaze import GazeFeatures +from argaze.AreaOfInterest import AOIFeatures import numpy import cv2 @@ -27,9 +28,9 @@ class AOIMatcher(GazeFeatures.AOIMatcher): def __post_init__(self): """Init looked aoi data.""" - self.__looked_aoi = None + self.__looked_aoi_data = (None, None) - def match(self, aoi_scene, gaze_movement, exclude=[]) -> str: + def match(self, aoi_scene, gaze_movement, exclude=[]) -> Tuple[str, AOIFeatures.AreaOfInterest]: """Returns AOI containing fixation focus point.""" if GazeFeatures.is_fixation(gaze_movement): @@ -38,17 +39,23 @@ class AOIMatcher(GazeFeatures.AOIMatcher): if name not in exclude and aoi.contains_point(gaze_movement.focus): - # Update looked aoi - self.__looked_aoi = name + # Update looked aoi data + self.__looked_aoi_data = (name, aoi) - return self.__looked_aoi + return self.__looked_aoi_data elif GazeFeatures.is_saccade(gaze_movement): self.__post_init__() @property - def looked_aoi(self) -> str: + def looked_aoi(self) -> AOIFeatures.AreaOfInterest: + """Get most likely looked aoi for current fixation (e.g. the aoi with the highest coverage mean value)""" + + return self.__looked_aoi_data[1] + + @property + def looked_aoi_name(self) -> str: """Get most likely looked aoi name for current fixation (e.g. the aoi with the highest coverage mean value)""" - return self.__looked_aoi + return self.__looked_aoi_data[0]
\ No newline at end of file diff --git a/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py b/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py index c315b8c..1160a0d 100644 --- a/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py +++ b/src/argaze/GazeAnalysis/VelocityThresholdIdentification.py @@ -72,11 +72,17 @@ class Fixation(GazeFeatures.Fixation): return self - def draw(self, image: numpy.array, color=(127, 127, 127), border_color=(255, 255, 255)): - """Draw fixation into image.""" + def draw(self, image: numpy.array, deviation_circle_color=(255, 255, 255), duration_border_color=(255, 255, 255), duration_factor: float = 1.): + """Draw fixation into image. + + Parameters: + deviation_circle_color: color of circle representing fixation's deviation + duration_border_color: color of border representing fixation's duration + duration_factor: how many pixels per duration unit + """ - cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), color, -1) - cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), border_color, int(self.duration*1e-3)) + cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), deviation_circle_color, -1) + cv2.circle(image, (int(self.focus[0]), int(self.focus[1])), int(self.deviation_max), duration_border_color, int(self.duration * duration_factor)) @dataclass(frozen=True) class Saccade(GazeFeatures.Saccade): @@ -85,13 +91,17 @@ class Saccade(GazeFeatures.Saccade): def __post_init__(self): super().__post_init__() - def draw(self, image: numpy.array, color=(255, 255, 255)): - """Draw saccade into image.""" + def draw(self, image: numpy.array, line_color=(255, 255, 255)): + """Draw saccade into image. + + Parameters: + color: color of line from first position to last position + """ _, start_position = self.positions.first _, last_position = self.positions.last - cv2.line(image, (int(start_position[0]), int(start_position[1])), (int(last_position[0]), int(last_position[1])), color, 2) + cv2.line(image, (int(start_position[0]), int(start_position[1])), (int(last_position[0]), int(last_position[1])), line_color, 2) @dataclass class GazeMovementIdentifier(GazeFeatures.GazeMovementIdentifier): diff --git a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json b/src/argaze/utils/demo_environment/demo_gaze_features_setup.json index 6e43895..18f5719 100644 --- a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json +++ b/src/argaze/utils/demo_environment/demo_gaze_features_setup.json @@ -48,5 +48,52 @@ "Entropy":{} } } + }, + "image_parameters": { + "background_weight": 1, + "heatmap_weight": 0.5, + "draw_scan_path": { + "draw_fixations": { + "deviation_circle_color": [255, 0, 255], + "duration_border_color": [127, 0, 127], + "duration_factor": 1e-2 + }, + "draw_saccades": { + "line_color": [255, 0, 255] + }, + "deepness": 0 + }, + "draw_layers": { + "GrayRectangle": { + "draw_aoi_scene": { + "draw_aoi": { + "color": [255, 255, 255], + "border_size": 1 + } + }, + "draw_aoi_matching": { + "draw_matched_fixation": { + "deviation_circle_color": [255, 255, 255] + }, + "draw_matched_fixation_positions": { + "position_color": [0, 255, 255], + "line_color": [0, 0, 0] + }, + "draw_matched_region": { + "color": [0, 255, 0], + "border_size": 4 + }, + "draw_looked_aoi": { + "color": [0, 255, 0], + "border_size": 2 + }, + "looked_aoi_name_color": [255, 255, 255], + "looked_aoi_name_offset": [0, -10] + } + } + }, + "draw_gaze_position": { + "color": [0, 255, 255] + } } }
\ No newline at end of file diff --git a/src/argaze/utils/demo_gaze_features_run.py b/src/argaze/utils/demo_gaze_features_run.py index c19f2c3..92fa282 100644 --- a/src/argaze/utils/demo_gaze_features_run.py +++ b/src/argaze/utils/demo_gaze_features_run.py @@ -64,11 +64,8 @@ def main(): # Draw frame and mouse position analysis while True: - # Create frame image - frame_image = ar_frame.image - - # Draw frame info - ar_frame.draw(frame_image) + # Get frame image + frame_image = ar_frame.image() # Write heatmap buffer manual buffer_on_off = 'on' if enable_heatmap_buffer else 'off' |