aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/ArFeatures.py106
1 files changed, 74 insertions, 32 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index e1edfdb..d20f359 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -343,6 +343,7 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
self.__aoi_scan_path_analyzed = False
if self.__aoi_matcher is not None and self.__aoi_scene is not None:
+
# Update looked aoi thanks to aoi matcher
# Note: don't filter valid/invalid and finished/unfinished fixation/saccade as we don't know how the aoi matcher works internally
self.__looked_aoi_name, _ = self.__aoi_matcher.match(gaze_movement, self.__aoi_scene)
@@ -369,6 +370,7 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Analyze aoi scan path
for aoi_scan_path_analyzer in self.__aoi_scan_path_analyzers:
+
aoi_scan_path_analyzer.analyze(self.__aoi_scan_path, timestamp=gaze_movement.timestamp)
# Update aoi scan path analyzed state
@@ -378,8 +380,8 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Append saccade to aoi scan path
if self.__aoi_scan_path is not None:
- logging.debug('\t> append saccade')
+ logging.debug('\t> append saccade')
self.__aoi_scan_path.append_saccade(gaze_movement)
@DataFeatures.PipelineStepDraw
@@ -576,12 +578,13 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
for a in self.__scan_path_analyzers:
if type(a) is property_type:
+
setattr(analyzer, name, a)
found = True
if not found:
- raise DataFeatures.PipelineStepLoadingFaile(
- f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.')
+
+ raise DataFeatures.PipelineStepLoadingFaile(f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.')
# Force scan path creation
if len(self.__scan_path_analyzers) > 0 and self.__scan_path is None:
@@ -726,9 +729,9 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Identify gaze movement
if self.__gaze_movement_identifier is not None:
+
# Identify finished gaze movement
- self.__identified_gaze_movement = self.__gaze_movement_identifier.identify(
- self.__calibrated_gaze_position)
+ self.__identified_gaze_movement = self.__gaze_movement_identifier.identify(self.__calibrated_gaze_position)
# Valid and finished gaze movement has been identified
if self.__identified_gaze_movement and self.__identified_gaze_movement.is_finished():
@@ -737,6 +740,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Append fixation to scan path
if self.__scan_path is not None:
+
self.__scan_path.append_fixation(self.__identified_gaze_movement)
elif GazeFeatures.is_saccade(self.__identified_gaze_movement):
@@ -751,6 +755,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Analyze aoi scan path
for scan_path_analyzer in self.__scan_path_analyzers:
+
scan_path_analyzer.analyze(self.__scan_path, timestamp=self.__identified_gaze_movement.timestamp)
# Update scan path analyzed state
@@ -763,6 +768,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Update heatmap
if self.__heatmap is not None:
+
# Scale gaze position value
scale = numpy.array([self.__heatmap.size[0] / self.__size[0], self.__heatmap.size[1] / self.__size[1]])
@@ -772,6 +778,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Look layers with valid identified gaze movement
# Note: don't filter valid/invalid finished/unfinished gaze movement to allow layers to reset internally
for layer_name, layer in self._layers.items():
+
layer.look(self.__identified_gaze_movement)
@DataFeatures.PipelineStepImage
@@ -1543,31 +1550,54 @@ class ArContext(DataFeatures.PipelineStepObject):
lap_time, nb_laps, elapsed_time = self.__process_gaze_position_chrono.lap()
if elapsed_time > 1e3:
+
self.__process_gaze_position_frequency = nb_laps
self.__process_gaze_position_chrono.restart()
if issubclass(type(self.__pipeline), ArFrame):
- try:
+ #try:
- if x is None and y is None:
+ if x is None and y is None:
- # Edit empty gaze position
- self.__pipeline.look(GazeFeatures.GazePosition(timestamp=timestamp), catch_exceptions=self.__catch_exceptions)
+ # Edit empty gaze position
+ self.__pipeline.look(GazeFeatures.GazePosition(timestamp=timestamp), catch_exceptions=self.__catch_exceptions)
- else:
+ else:
- # Edit gaze position
- self.__pipeline.look(GazeFeatures.GazePosition((x, y), precision=precision, timestamp=timestamp), catch_exceptions=self.__catch_exceptions)
+ # Edit gaze position
+ self.__pipeline.look(GazeFeatures.GazePosition((x, y), precision=precision, timestamp=timestamp), catch_exceptions=self.__catch_exceptions)
- except DataFeatures.TimestampedException as e:
+ #except DataFeatures.TimestampedException as e:
- self.__exceptions.append(e)
+ #self.__exceptions.append(e)
else:
raise (TypeError('Pipeline is not ArFrame instance.'))
+ def process_gaze_position_performance(self) -> tuple[float, float]:
+ """Get process gaze position time and frequency.
+
+ Returns:
+ time: how many time the execution of pipeline look method took.
+ frequency: how many times persecond the pipeline look method is called.
+ """
+
+ look_time = 0.
+
+ if issubclass(type(self.__pipeline), ArFrame):
+
+ try:
+
+ look_time = self.__pipeline.execution_times['look']
+
+ except KeyError:
+
+ look_time = 0.
+
+ return look_time, self.__process_gaze_position_frequency
+
def _process_camera_image(self, timestamp: int | float, image: numpy.array):
"""Request pipeline to process new camera image at a timestamp."""
@@ -1577,6 +1607,7 @@ class ArContext(DataFeatures.PipelineStepObject):
lap_time, nb_laps, elapsed_time = self.__process_camera_image_chrono.lap()
if elapsed_time > 1e3:
+
self.__process_camera_image_frequency = nb_laps
self.__process_camera_image_chrono.restart()
@@ -1586,8 +1617,8 @@ class ArContext(DataFeatures.PipelineStepObject):
# Compare image size with ArCamera frame size
if list(image.shape[0:2][::-1]) != self.__pipeline.size:
- logging.warning('%s._process_camera_image: image size (%i x %i) is different of ArCamera frame size (%i x %i)',
- DataFeatures.get_class_path(self), width, height, self.__pipeline.size[0], self.__pipeline.size[1])
+
+ logging.warning('%s._process_camera_image: image size (%i x %i) is different of ArCamera frame size (%i x %i)', DataFeatures.get_class_path(self), width, height, self.__pipeline.size[0], self.__pipeline.size[1])
return
try:
@@ -1609,6 +1640,28 @@ class ArContext(DataFeatures.PipelineStepObject):
raise (TypeError('Pipeline is not ArCamera instance.'))
+ def process_camera_image_performance(self) -> tuple[float, float]:
+ """Get process camera image time and frequency.
+
+ Returns:
+ time: how many time the execution of pipeline watch method took.
+ frequency: how many times persecond the pipeline watch method is called.
+ """
+
+ watch_time = 0.
+
+ if issubclass(type(self.__pipeline), ArCamera):
+
+ try:
+
+ watch_time = self.__pipeline.execution_times['watch']
+
+ except KeyError:
+
+ watch_time = 0.
+
+ return watch_time, self.__process_camera_image_frequency
+
@DataFeatures.PipelineStepImage
def image(self, draw_times: bool = None, draw_exceptions: bool = None):
"""
@@ -1638,16 +1691,10 @@ class ArContext(DataFeatures.PipelineStepObject):
if issubclass(type(self.__pipeline), ArCamera):
- try:
-
- watch_time = int(self.__pipeline.execution_times['watch'])
-
- except KeyError:
-
- watch_time = math.nan
+ time, frequency = self.process_camera_image_performance()
info_stack += 1
- cv2.putText(image, f'Watch {watch_time}ms at {self.__process_camera_image_frequency}Hz', (20, info_stack * 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(image, f'Watch {int(time)}ms at {frequency}Hz', (20, info_stack * 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
if last_position is not None:
@@ -1656,21 +1703,16 @@ class ArContext(DataFeatures.PipelineStepObject):
if issubclass(type(self.__pipeline), ArFrame):
- try:
-
- look_time = self.__pipeline.execution_times['look']
-
- except KeyError:
-
- look_time = math.nan
+ time, frequency = self.process_gaze_position_performance()
info_stack += 1
- cv2.putText(image, f'Look {look_time:.2f}ms at {self.__process_gaze_position_frequency}Hz', (20, info_stack * 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(image, f'Look {time:.2f}ms at {frequency}Hz', (20, info_stack * 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
if draw_exceptions:
# Write exceptions
while self.__exceptions:
+
e = self.__exceptions.pop()
i = len(self.__exceptions)