aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2023-09-20 09:54:39 +0200
committerThéo de la Hogue2023-09-20 09:54:39 +0200
commit0fea09cdeee6367cde0454c92ea495d49dff5933 (patch)
treecdfcf5acfaa5c33cd9e24704038bd6ac9ed769d2
parentb2343f1cd1848e7e96f50088050a649aea38e6cf (diff)
parent5de2df9ac8ce070fb0b608a3f02ed9aadb076745 (diff)
downloadargaze-0fea09cdeee6367cde0454c92ea495d49dff5933.zip
argaze-0fea09cdeee6367cde0454c92ea495d49dff5933.tar.gz
argaze-0fea09cdeee6367cde0454c92ea495d49dff5933.tar.bz2
argaze-0fea09cdeee6367cde0454c92ea495d49dff5933.tar.xz
Merge branch 'master' of ssh://git.recherche.enac.fr/interne-ihm-aero/eye-tracking/argaze
-rw-r--r--docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md2
-rw-r--r--src/argaze/ArFeatures.py92
-rw-r--r--src/argaze/GazeAnalysis/DeviationCircleCoverage.py11
3 files changed, 53 insertions, 52 deletions
diff --git a/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md b/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md
index 2db69fc..81efa40 100644
--- a/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md
+++ b/docs/user_guide/gaze_analysis_pipeline/advanced_topics/scripting.md
@@ -110,7 +110,7 @@ Let's understand the meaning of each returned data.
A [GazeMovement](../../../argaze.md/#argaze.GazeFeatures.GazeMovement) once it have been identified by [ArFrame.gaze_movement_identifier](../../../argaze.md/#argaze.ArFeatures.ArFrame) object from incoming consecutive timestamped gaze positions. If no gaze movement have been identified, it returns an [UnvalidGazeMovement](../../../argaze.md/#argaze.GazeFeatures.UnvalidGazeMovement).
-This could also be the current gaze movement if [ArFrame.filter_in_progress_fixation](../../../argaze.md/#argaze.ArFeatures.ArFrame) attribute is false.
+This could also be the current gaze movement if [ArFrame.filter_in_progress_identification](../../../argaze.md/#argaze.ArFeatures.ArFrame) attribute is false.
In that case, the returned gaze movement *finished* flag is false.
Then, the returned gaze movement type can be tested thanks to [GazeFeatures.is_fixation](../../../argaze.md/#argaze.GazeFeatures.is_fixation) and [GazeFeatures.is_saccade](../../../argaze.md/#argaze.GazeFeatures.is_saccade) functions.
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 18fb430..b9a29de 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -387,59 +387,56 @@ class ArLayer():
try:
- # Check gaze movement validity
- if gaze_movement.valid:
+ if self.aoi_matcher is not None:
- if self.aoi_matcher is not None:
+ # Store aoi matching start date
+ matching_start = time.perf_counter()
- # Store aoi matching start date
- matching_start = time.perf_counter()
+ # Update looked aoi thanks to aoi matcher
+ # Note: don't filter valid/unvalid and finished/unfinished fixation/saccade as we don't know how the aoi matcher works internally
+ looked_aoi_name, looked_aoi = self.aoi_matcher.match(self.aoi_scene, gaze_movement)
- # Update looked aoi thanks to aoi matcher
- # Note: don't filter finished/unfinished fixation/saccade as we don't know how the aoi matcher works internally
- looked_aoi_name, looked_aoi = self.aoi_matcher.match(self.aoi_scene, gaze_movement)
+ # Assess aoi matching time in ms
+ execution_times['aoi_matcher'] = (time.perf_counter() - matching_start) * 1e3
- # Assess aoi matching time in ms
- execution_times['aoi_matcher'] = (time.perf_counter() - matching_start) * 1e3
-
- # Finished gaze movement has been identified
- if gaze_movement.finished:
+ # Valid and finished gaze movement has been identified
+ if gaze_movement.valid and gaze_movement.finished:
- if GazeFeatures.is_fixation(gaze_movement):
+ if GazeFeatures.is_fixation(gaze_movement):
- # Append fixation to aoi scan path
- if self.aoi_scan_path is not None and looked_aoi_name is not None:
+ # Append fixation to aoi scan path
+ if self.aoi_scan_path is not None and looked_aoi_name is not None:
- aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, gaze_movement, looked_aoi_name)
+ aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, gaze_movement, looked_aoi_name)
- # Is there a new step?
- if aoi_scan_step is not None and len(self.aoi_scan_path) > 1:
+ # Is there a new step?
+ if aoi_scan_step is not None and len(self.aoi_scan_path) > 1:
- for aoi_scan_path_analyzer_module_path, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items():
+ for aoi_scan_path_analyzer_module_path, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items():
- # Store aoi scan path analysis start date
- aoi_scan_path_analysis_start = time.perf_counter()
+ # Store aoi scan path analysis start date
+ aoi_scan_path_analysis_start = time.perf_counter()
- # Analyze aoi scan path
- aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
+ # Analyze aoi scan path
+ aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
- # Assess aoi scan step analysis time in ms
- execution_times['aoi_scan_step_analyzers'][aoi_scan_path_analyzer_module_path] = (time.perf_counter() - aoi_scan_path_analysis_start) * 1e3
+ # Assess aoi scan step analysis time in ms
+ execution_times['aoi_scan_step_analyzers'][aoi_scan_path_analyzer_module_path] = (time.perf_counter() - aoi_scan_path_analysis_start) * 1e3
- # Store analysis
- aoi_scan_path_analysis[aoi_scan_path_analyzer_module_path] = aoi_scan_path_analyzer.analysis
+ # Store analysis
+ aoi_scan_path_analysis[aoi_scan_path_analyzer_module_path] = aoi_scan_path_analyzer.analysis
- # Log analysis
- if self.log:
+ # Log analysis
+ if self.log:
- self.__ts_logs[aoi_scan_path_analyzer_module_path][timestamp] = aoi_scan_path_analyzer.analysis
+ self.__ts_logs[aoi_scan_path_analyzer_module_path][timestamp] = aoi_scan_path_analyzer.analysis
- elif GazeFeatures.is_saccade(gaze_movement):
+ elif GazeFeatures.is_saccade(gaze_movement):
- # Append saccade to aoi scan path
- if self.aoi_scan_path is not None:
+ # Append saccade to aoi scan path
+ if self.aoi_scan_path is not None:
- self.aoi_scan_path.append_saccade(timestamp, gaze_movement)
+ self.aoi_scan_path.append_saccade(timestamp, gaze_movement)
except Exception as e:
@@ -528,7 +525,7 @@ class ArFrame():
name: name of the frame
size: defines the dimension of the rectangular area where gaze positions are projected.
gaze_movement_identifier: gaze movement identification algorithm
- filter_in_progress_fixation: ignore in progress fixation
+ filter_in_progress_identification: ignore in progress gaze movement identification
scan_path: scan path object
scan_path_analyzers: dictionary of scan path analyzers
heatmap: heatmap object
@@ -541,7 +538,7 @@ class ArFrame():
name: str
size: tuple[int] = field(default=(1, 1))
gaze_movement_identifier: GazeFeatures.GazeMovementIdentifier = field(default_factory=GazeFeatures.GazeMovementIdentifier)
- filter_in_progress_fixation: bool = field(default=True)
+ filter_in_progress_identification: bool = field(default=True)
scan_path: GazeFeatures.ScanPath = field(default_factory=GazeFeatures.ScanPath)
scan_path_analyzers: dict = field(default_factory=dict)
heatmap: AOIFeatures.Heatmap = field(default_factory=AOIFeatures.Heatmap)
@@ -624,11 +621,11 @@ class ArFrame():
# Current fixation matching
try:
- filter_in_progress_fixation = frame_data.pop('filter_in_progress_fixation')
+ filter_in_progress_identification = frame_data.pop('filter_in_progress_identification')
except KeyError:
- filter_in_progress_fixation = True
+ filter_in_progress_identification = True
# Load scan path
try:
@@ -765,7 +762,7 @@ class ArFrame():
return ArFrame(new_frame_name, \
new_frame_size, \
new_gaze_movement_identifier, \
- filter_in_progress_fixation, \
+ filter_in_progress_identification, \
new_scan_path, \
new_scan_path_analyzers, \
new_heatmap, \
@@ -823,7 +820,7 @@ class ArFrame():
gaze_position: gaze position to project
Returns:
- identified_gaze_movement: identified gaze movement from incoming consecutive timestamped gaze positions if gaze_movement_identifier is instanciated. Current gaze movement if filter_in_progress_fixation is False.
+ identified_gaze_movement: identified gaze movement from incoming consecutive timestamped gaze positions if gaze_movement_identifier is instanciated. Current gaze movement if filter_in_progress_identification is False.
scan_path_analysis: scan path analysis at each new scan step if scan_path is instanciated.
layers_analysis: aoi scan path analysis at each new aoi scan step for each instanciated layers aoi scan path.
execution_times: all pipeline steps execution times.
@@ -909,14 +906,10 @@ class ArFrame():
self.__ts_logs[scan_path_analyzer_module_path][timestamp] = scan_path_analyzer.analysis
- # No valid finished gaze movement: optionnaly stop in progress fixation filtering
- elif self.gaze_movement_identifier is not None and not self.filter_in_progress_fixation:
-
- current_fixation = self.gaze_movement_identifier.current_fixation
-
- if current_fixation.valid:
+ # No valid finished gaze movement: optionnaly stop in progress identification filtering
+ elif self.gaze_movement_identifier is not None and not self.filter_in_progress_identification:
- identified_gaze_movement = current_fixation
+ identified_gaze_movement = self.gaze_movement_identifier.current_gaze_movement
# Update heatmap
if self.heatmap is not None:
@@ -933,7 +926,8 @@ class ArFrame():
# Assess heatmap time in ms
execution_times['heatmap'] = (time.perf_counter() - heatmap_start) * 1e3
- # Look layers
+ # Look layers with valid identified gaze movement
+ # Note: don't filter valid/unvalid finished/unfished gaze movement to allow layers to reset internally
for layer_name, layer in self.layers.items():
looked_aoi, aoi_scan_path_analysis, layer_execution_times, layer_exception = layer.look(timestamp, identified_gaze_movement)
diff --git a/src/argaze/GazeAnalysis/DeviationCircleCoverage.py b/src/argaze/GazeAnalysis/DeviationCircleCoverage.py
index 4cfab72..f0decfc 100644
--- a/src/argaze/GazeAnalysis/DeviationCircleCoverage.py
+++ b/src/argaze/GazeAnalysis/DeviationCircleCoverage.py
@@ -51,7 +51,7 @@ class AOIMatcher(GazeFeatures.AOIMatcher):
for name, aoi in aoi_scene.items():
- # BAD: we use deviation_max attribute which is an atttribute of DispersionThresholdIdentification.Fixation class
+ # BAD: we use deviation_max attribute which is an attribute of DispersionThresholdIdentification.Fixation class
region, _, circle_ratio = aoi.circle_intersection(gaze_movement.focus, gaze_movement.deviation_max)
if name not in self.exclude and circle_ratio > 0:
@@ -83,7 +83,10 @@ class AOIMatcher(GazeFeatures.AOIMatcher):
for aoi_name, circle_ratio_sum in self.__circle_ratio_sum.items():
- self.__aois_coverages[aoi_name] = int(100 * circle_ratio_sum / self.__look_count) / 100
+ circle_ratio_mean = circle_ratio_sum / self.__look_count
+
+ # filter circle ration mean greater than 1
+ self.__aois_coverages[aoi_name] = circle_ratio_mean if circle_ratio_mean < 1 else 1
# Update matched gaze movement
self.__matched_gaze_movement = gaze_movement
@@ -100,6 +103,10 @@ class AOIMatcher(GazeFeatures.AOIMatcher):
self.__post_init__()
+ elif not gaze_movement.valid:
+
+ self.__post_init__()
+
return (None, None)
def draw(self, image: numpy.array, aoi_scene: AOIFeatures.AOIScene, draw_matched_fixation: dict = None, draw_matched_fixation_positions: dict = None, draw_matched_region: dict = None, draw_looked_aoi: dict = None, update_looked_aoi: bool = False, looked_aoi_name_color: tuple = None, looked_aoi_name_offset: tuple = (0, 0)):