aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2023-07-10 19:24:53 +0200
committerThéo de la Hogue2023-07-10 19:24:53 +0200
commit77396cff852c637e5c6e52a46bd4d2c1d84a52f7 (patch)
tree19d6f38f48f217278a00528d5d0e4d29d6e8d1e1 /src
parent95be300b3af83db71307ce3d00c51ad3889013bc (diff)
downloadargaze-77396cff852c637e5c6e52a46bd4d2c1d84a52f7.zip
argaze-77396cff852c637e5c6e52a46bd4d2c1d84a52f7.tar.gz
argaze-77396cff852c637e5c6e52a46bd4d2c1d84a52f7.tar.bz2
argaze-77396cff852c637e5c6e52a46bd4d2c1d84a52f7.tar.xz
Catching a reporting exception in ArFrame look method.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/ArFeatures.py154
-rw-r--r--src/argaze/utils/demo_ar_features_run.py2
-rw-r--r--src/argaze/utils/demo_gaze_features_run.py2
3 files changed, 90 insertions, 68 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 43496ef..1569873 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -288,7 +288,7 @@ class ArFrame():
return image
- def look(self, timestamp: int|float, inner_gaze_position: GazeFeatures.GazePosition) -> Tuple[GazeFeatures.GazeMovement, str, dict, dict]:
+ def look(self, timestamp: int|float, inner_gaze_position: GazeFeatures.GazePosition) -> Tuple[GazeFeatures.GazeMovement, str, dict, dict, dict]:
"""
GazeFeatures.AOIScanStepError
@@ -316,109 +316,122 @@ class ArFrame():
scan_step_analysis = {}
aoi_scan_step_analysis = {}
- # Identify gaze movement
- if self.gaze_movement_identifier:
+ # Catch any error
+ exception = None
- # Identify finished gaze movement
- finished_gaze_movement = self.gaze_movement_identifier.identify(timestamp, self.__gaze_position)
+ try:
- # Valid and finished gaze movement has been identified
- if finished_gaze_movement.valid:
+ # Identify gaze movement
+ if self.gaze_movement_identifier:
- if GazeFeatures.is_fixation(finished_gaze_movement):
+ # Identify finished gaze movement
+ finished_gaze_movement = self.gaze_movement_identifier.identify(timestamp, self.__gaze_position)
- # Update current fixation
- fixation = finished_gaze_movement
+ # Valid and finished gaze movement has been identified
+ if finished_gaze_movement.valid:
- # Does the fixation match an aoi?
- for name, aoi in self.aoi_2d_scene.items():
+ if GazeFeatures.is_fixation(finished_gaze_movement):
- _, _, circle_ratio = aoi.circle_intersection(finished_gaze_movement.focus, finished_gaze_movement.deviation_max)
+ # Update current fixation
+ fixation = finished_gaze_movement
- if circle_ratio > 0.25:
+ # Does the fixation match an aoi?
+ for name, aoi in self.aoi_2d_scene.items():
- if name != self.name:
+ _, _, circle_ratio = aoi.circle_intersection(finished_gaze_movement.focus, finished_gaze_movement.deviation_max)
- # Update current look at
- look_at = name
- break
+ if circle_ratio > 0.25:
- # Append fixation to scan path
- if self.scan_path != None:
+ if name != self.name:
- self.scan_path.append_fixation(timestamp, finished_gaze_movement)
+ # Update current look at
+ look_at = name
+ break
- # Append fixation to aoi scan path
- if self.aoi_scan_path != None and look_at != None:
+ # Append fixation to scan path
+ if self.scan_path != None:
- aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, finished_gaze_movement, look_at)
+ self.scan_path.append_fixation(timestamp, finished_gaze_movement)
- # Analyze aoi scan path
- if aoi_scan_step and len(self.aoi_scan_path) > 1:
+ # Append fixation to aoi scan path
+ if self.aoi_scan_path != None and look_at != None:
- for aoi_scan_path_analyzer_type, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items():
+ aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, finished_gaze_movement, look_at)
- aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
+ # Analyze aoi scan path
+ if aoi_scan_step and len(self.aoi_scan_path) > 1:
- aoi_scan_step_analysis[aoi_scan_path_analyzer_type] = aoi_scan_path_analyzer.analysis
+ for aoi_scan_path_analyzer_type, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items():
- elif GazeFeatures.is_saccade(finished_gaze_movement):
+ aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
- # Update current look at
- look_at = None
+ aoi_scan_step_analysis[aoi_scan_path_analyzer_type] = aoi_scan_path_analyzer.analysis
- # Append saccade to scan path
- if self.scan_path != None:
-
- scan_step = self.scan_path.append_saccade(timestamp, finished_gaze_movement)
+ elif GazeFeatures.is_saccade(finished_gaze_movement):
- # Analyze aoi scan path
- if scan_step and len(self.scan_path) > 1:
+ # Update current look at
+ look_at = None
- for scan_path_analyzer_type, scan_path_analyzer in self.scan_path_analyzers.items():
+ # Append saccade to scan path
+ if self.scan_path != None:
+
+ scan_step = self.scan_path.append_saccade(timestamp, finished_gaze_movement)
- scan_path_analyzer.analyze(self.scan_path)
+ # Analyze aoi scan path
+ if scan_step and len(self.scan_path) > 1:
- scan_step_analysis[scan_path_analyzer_type] = scan_path_analyzer.analysis
+ for scan_path_analyzer_type, scan_path_analyzer in self.scan_path_analyzers.items():
- # Append saccade to aoi scan path
- if self.aoi_scan_path != None:
+ scan_path_analyzer.analyze(self.scan_path)
- self.aoi_scan_path.append_saccade(timestamp, finished_gaze_movement)
+ scan_step_analysis[scan_path_analyzer_type] = scan_path_analyzer.analysis
- # No valid finished gaze movement: check current fixation
- else:
+ # Append saccade to aoi scan path
+ if self.aoi_scan_path != None:
- current_fixation = self.gaze_movement_identifier.current_fixation
+ self.aoi_scan_path.append_saccade(timestamp, finished_gaze_movement)
- if current_fixation.valid:
+ # No valid finished gaze movement: check current fixation
+ else:
- # Update current fixation
- fixation = current_fixation
+ current_fixation = self.gaze_movement_identifier.current_fixation
- # Does the fixation match an aoi?
- for name, aoi in self.aoi_2d_scene.items():
+ if current_fixation.valid:
- _, _, circle_ratio = aoi.circle_intersection(current_fixation.focus, current_fixation.deviation_max)
+ # Update current fixation
+ fixation = current_fixation
- if circle_ratio > 0.25:
+ # Does the fixation match an aoi?
+ for name, aoi in self.aoi_2d_scene.items():
- if name != self.name:
+ _, _, circle_ratio = aoi.circle_intersection(current_fixation.focus, current_fixation.deviation_max)
- # Update current look at
- look_at = name
- break
+ if circle_ratio > 0.25:
- # Update heatmap
- if self.heatmap:
+ if name != self.name:
+
+ # Update current look at
+ look_at = name
+ break
+
+ # Update heatmap
+ if self.heatmap:
+
+ self.heatmap.update(self.__gaze_position.value, sigma=0.05)
+
+ except Exception as e:
- self.heatmap.update(self.__gaze_position.value, sigma=0.05)
+ fixation = GazeFeatures.UnvalidGazeMovement()
+ look_at = None
+ scan_step_analysis = {}
+ aoi_scan_step_analysis = {}
+ exception = e
# Unlock frame exploitation
self.__look_lock.release()
# Return look data
- return fixation, look_at, scan_step_analysis, aoi_scan_step_analysis
+ return fixation, look_at, scan_step_analysis, aoi_scan_step_analysis, exception
def draw(self, image:numpy.array):
"""
@@ -551,6 +564,15 @@ class ArScene():
new_aruco_scene = None
+ # Load optional aoi filter
+ try:
+
+ aoi_exclude_list = scene_data.pop('aoi_exclude')
+
+ except KeyError:
+
+ aoi_exclude_list = []
+
# Load aoi 3d scene
try:
@@ -561,12 +583,12 @@ class ArScene():
if type(aoi_3d_scene_value) == str:
obj_filepath = os.path.join(working_directory, aoi_3d_scene_value)
- new_aoi_3d_scene = AOI3DScene.AOI3DScene.from_obj(obj_filepath)
+ new_aoi_3d_scene = AOI3DScene.AOI3DScene.from_obj(obj_filepath).copy(exclude=aoi_exclude_list)
# dict:
else:
- new_aoi_3d_scene = AOI3DScene.AOI3DScene(aoi_3d_scene_value)
+ new_aoi_3d_scene = AOI3DScene.AOI3DScene(aoi_3d_scene_value).copy(exclude=aoi_exclude_list)
except KeyError:
@@ -1022,7 +1044,7 @@ class ArEnvironment():
return detection_time, exceptions
def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition):
- """Project timestamped gaze position into each scene."""
+ """Project timestamped gaze position into each frame."""
# Can't use camera frame when it is locked
if self.__camera_frame_lock.locked():
@@ -1054,7 +1076,7 @@ class ArEnvironment():
# QUESTION: How to project gaze precision?
inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y))
-
+
yield aoi_frame, aoi_frame.look(timestamp, inner_gaze_position * aoi_frame.size)
# Ignore missing aoi frame projection
diff --git a/src/argaze/utils/demo_ar_features_run.py b/src/argaze/utils/demo_ar_features_run.py
index f8158d6..5e63a2d 100644
--- a/src/argaze/utils/demo_ar_features_run.py
+++ b/src/argaze/utils/demo_ar_features_run.py
@@ -48,7 +48,7 @@ def main():
for frame, look_data in ar_environment.look(timestamp, GazeFeatures.GazePosition((x, y))):
# Unpack look data
- fixation, look_at, scan_step_analysis, aoi_scan_step_analysis = look_data
+ fixation, look_at, scan_step_analysis, aoi_scan_step_analysis, exception = look_data
# Do something with look data
# ...
diff --git a/src/argaze/utils/demo_gaze_features_run.py b/src/argaze/utils/demo_gaze_features_run.py
index 8719e84..d15a3dc 100644
--- a/src/argaze/utils/demo_gaze_features_run.py
+++ b/src/argaze/utils/demo_gaze_features_run.py
@@ -56,7 +56,7 @@ def main():
timestamp = int((time.time() - start_time) * 1e3)
# Project gaze position into frame
- fixation, look_at, scan_step_analysis, aoi_scan_step_analysis = ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y)))
+ fixation, look_at, scan_step_analysis, aoi_scan_step_analysis, exception = ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y)))
# Do something with look data
# ...