aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2024-01-24 00:19:05 +0100
committerThéo de la Hogue2024-01-24 00:19:05 +0100
commit75e2e040edee78139ea5e60e9dcf8962fa54cb7c (patch)
tree33928564a219396e79885481a032a7310248a1c2 /src
parentd904b99cc969c977f911d36cfeb2279544c528e5 (diff)
downloadargaze-75e2e040edee78139ea5e60e9dcf8962fa54cb7c.zip
argaze-75e2e040edee78139ea5e60e9dcf8962fa54cb7c.tar.gz
argaze-75e2e040edee78139ea5e60e9dcf8962fa54cb7c.tar.bz2
argaze-75e2e040edee78139ea5e60e9dcf8962fa54cb7c.tar.xz
Making look method as analysis iterator.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/ArFeatures.py193
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py2
-rw-r--r--src/argaze/DataFeatures.py24
-rw-r--r--src/argaze/utils/demo_aruco_markers_run.py46
-rw-r--r--src/argaze/utils/demo_data/demo_gaze_analysis_setup.json35
-rw-r--r--src/argaze/utils/demo_gaze_analysis_run.py29
6 files changed, 147 insertions, 182 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index cb9658e..4989e65 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -7,7 +7,7 @@ __credits__ = []
__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
__license__ = "BSD"
-from typing import TypeVar, Tuple, Any
+from typing import TypeVar, Tuple, Any, Iterator, Union
from dataclasses import dataclass, field
import json
import os
@@ -107,7 +107,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
aoi_matcher: AOI matcher object
aoi_scan_path: AOI scan path object
aoi_scan_path_analyzers: dictionary of AOI scan path analyzers
- loggers: dictionary of timestamped data loggers
draw_parameters: default parameters passed to draw method
"""
@@ -116,7 +115,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
aoi_matcher: GazeFeatures.AOIMatcher = field(default_factory=GazeFeatures.AOIMatcher)
aoi_scan_path: GazeFeatures.AOIScanPath = field(default_factory=GazeFeatures.AOIScanPath)
aoi_scan_path_analyzers: dict = field(default_factory=dict)
- loggers: dict = field(default=dict)
draw_parameters: dict = field(default_factory=DEFAULT_ARLAYER_DRAW_PARAMETERS)
def __post_init__(self):
@@ -133,9 +131,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Init current looked aoi name
self.__looked_aoi_name = None
- # Init new analysis available state
- self.__new_analysis_available = False
-
# Cast aoi scene to its effective dimension
if self.aoi_scene.dimension == 2:
@@ -290,24 +285,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
pass
- # Load loggers
- new_loggers = {}
-
- try:
-
- new_loggers_value = layer_data.pop('loggers')
-
- for logger_name, logger_data in new_loggers_value.items():
-
- logger = DataFeatures.TimeStampedDataLogger.from_dict(logger_data)
- logger.name = logger_name
-
- new_loggers[logger_name] = logger
-
- except KeyError:
-
- pass
-
# Load image parameters
try:
@@ -323,7 +300,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
new_aoi_matcher, \
new_aoi_scan_path, \
new_aoi_scan_path_analyzers, \
- new_loggers, \
new_layer_draw_parameters \
)
@@ -361,14 +337,8 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
return self.__looked_aoi_name
- @property
- def new_analysis_available(self) -> bool:
- """Is there new aoi scan path analysis to check?"""
-
- return self.__new_analysis_available
-
@DataFeatures.PipelineStepMethod
- def look(self, timestamp: int|float, gaze_movement: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()) -> dict:
+ def look(self, timestamp: int|float, gaze_movement: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()) -> Iterator[Union[object, type, dict]]:
"""
Project timestamped gaze movement into layer.
@@ -377,6 +347,9 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
Parameters:
gaze_movement: gaze movement to project
+
+ Returns:
+ iterator: this layer, analyzer type and analysis dictionary.
"""
# Use try block to always release the layer lock in finally block
@@ -388,9 +361,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Update current gaze movement
self.__gaze_movement = gaze_movement
- # No new analysis available by default
- self.__new_analysis_available = False
-
# No looked aoi by default
self.__looked_aoi_name = None
@@ -418,8 +388,8 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Analyze aoi scan path
aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
- # Update new analysis available state
- self.__new_analysis_available = True
+ # Output analysis
+ yield self, aoi_scan_path_analyzer_module_path, aoi_scan_path_analyzer.analysis
elif GazeFeatures.is_saccade(gaze_movement):
@@ -427,11 +397,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
if self.aoi_scan_path is not None:
self.aoi_scan_path.append_saccade(timestamp, gaze_movement)
-
- # Log look data
- for logger_name, logger in self.loggers.items():
-
- logger.emit(locals())
finally:
@@ -489,6 +454,11 @@ DEFAULT_ARFRAME_IMAGE_PARAMETERS = {
}
}
+def is_layer(obj):
+ """Is an object a layer?"""
+
+ return type(obj) == ArLayer
+
@dataclass
class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
"""
@@ -508,7 +478,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
heatmap: heatmap object
background: picture to draw behind
layers: dictionary of AOI layers
- loggers: dictionary of timestamped data loggers
image_parameters: default parameters passed to image method
"""
@@ -522,7 +491,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
heatmap: AOIFeatures.Heatmap = field(default_factory=AOIFeatures.Heatmap)
background: numpy.array = field(default_factory=lambda : numpy.array([]))
layers: dict = field(default_factory=dict)
- loggers: dict = field(default=dict)
image_parameters: dict = field(default_factory=DEFAULT_ARFRAME_IMAGE_PARAMETERS)
def __post_init__(self):
@@ -544,9 +512,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Init current gaze movement
self.__identified_gaze_movement = GazeFeatures.UnvalidGazeMovement()
- # Init new analysis available state
- self.__new_analysis_available = False
-
@classmethod
def from_dict(self, frame_data: dict, working_directory: str = None) -> ArFrameType:
"""Load attributes from dictionary.
@@ -734,24 +699,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
pass
- # Load loggers
- new_loggers = {}
-
- try:
-
- new_loggers_value = frame_data.pop('loggers')
-
- for logger_name, logger_data in new_loggers_value.items():
-
- logger = DataFeatures.TimeStampedDataLogger.from_dict(logger_data)
- logger.name = logger_name
-
- new_loggers[logger_name] = logger
-
- except KeyError:
-
- pass
-
# Load image parameters
try:
@@ -772,7 +719,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
new_heatmap, \
new_frame_background, \
new_layers, \
- new_loggers,
new_frame_image_parameters \
)
@@ -816,16 +762,10 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
return self.__identified_gaze_movement
- @property
- def new_analysis_available(self) -> bool:
- """Is there new scan path analysis to check?"""
-
- return self.__new_analysis_available
-
@DataFeatures.PipelineStepMethod
- def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()):
+ def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition = GazeFeatures.UnvalidGazePosition()) -> Iterator[Union[object, type, dict]]:
"""
- Project gaze position into frame.
+ Project timestamped gaze position into frame.
!!! warning
Be aware that gaze positions are in the same range of value than size attribute.
@@ -833,6 +773,9 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
Parameters:
timestamp: any number used to know when the given gaze position occurs
gaze_position: gaze position to project
+
+ Returns:
+ iterator: this frame or one of its layers, analyzer type and analysis dictionary.
"""
# Use try block to always release the frame lock in finally block
@@ -841,9 +784,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Lock frame exploitation
self.acquire()
- # No new analysis by default
- self.__new_analysis_available = False
-
# No gaze movement identified by default
self.__identified_gaze_movement = GazeFeatures.UnvalidGazeMovement()
@@ -888,8 +828,8 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Analyze aoi scan path
scan_path_analyzer.analyze(self.scan_path)
- # Update new analysis available state
- self.__new_analysis_available = True
+ # Output analysis
+ yield self, scan_path_analyzer_module_path, scan_path_analyzer.analysis
# No valid finished gaze movement: optionnaly stop in progress identification filtering
elif self.gaze_movement_identifier is not None and not self.filter_in_progress_identification:
@@ -909,12 +849,10 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Note: don't filter valid/unvalid finished/unfished gaze movement to allow layers to reset internally
for layer_name, layer in self.layers.items():
- layer.look(timestamp, self.__identified_gaze_movement)
+ for layer_output in layer.look(timestamp, self.__identified_gaze_movement):
- # Log look data
- for logger_name, logger in self.loggers.items():
-
- logger.emit(locals())
+ # Output layer analysis
+ yield layer_output
finally:
@@ -1012,6 +950,11 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
return self.__image(**self.image_parameters)
+def is_frame(obj):
+ """Is an object a frame?"""
+
+ return type(obj) == ArFrame or is_camera(obj)
+
@dataclass
class ArScene():
"""
@@ -1210,7 +1153,7 @@ class ArScene():
raise NotImplementedError('estimate_pose() method not implemented')
- def project(self, tvec: numpy.array, rvec: numpy.array, visual_hfov: float = 0., visual_vfov: float = 0.) -> Tuple[str, AOI2DScene.AOI2DScene]:
+ def project(self, tvec: numpy.array, rvec: numpy.array, visual_hfov: float = 0., visual_vfov: float = 0.) -> Iterator[Union[str, AOI2DScene.AOI2DScene]]:
"""Project layers according estimated pose and optional field of view clipping angles.
Parameters:
@@ -1220,8 +1163,7 @@ class ArScene():
visual_vfov: vertical field of view clipping angle
Returns:
- layer_name: name of projected layer
- layer_projection: AOI2DScene projection
+ iterator: name of projected layer and AOI2DScene projection
"""
for name, layer in self.layers.items():
@@ -1259,6 +1201,11 @@ class ArScene():
raise NotImplementedError('draw() method not implemented')
+def is_scene(obj):
+ """Is an object a scene?"""
+
+ return type(obj).__bases__[0] == ArScene
+
@dataclass
class ArCamera(ArFrame):
"""
@@ -1359,7 +1306,7 @@ class ArCamera(ArFrame):
raise NotImplementedError('from_json() method not implemented')
@property
- def scene_frames(self):
+ def scene_frames(self) -> Iterator[ArFrame]:
"""Iterate over all scenes frames"""
# For each scene
@@ -1381,56 +1328,65 @@ class ArCamera(ArFrame):
raise NotImplementedError('watch() method not implemented')
- def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition):
+ @DataFeatures.PipelineStepMethod
+ def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition) -> Iterator[Union[object, type, dict]]:
"""Project timestamped gaze position into each scene frames.
+ !!! warning
+ watch method needs to be called first.
+
Parameters:
timestamp: gaze position time stamp (unit does'nt matter)
gaze_position: GazePosition object
- !!! warning
- watch method needs to be called first.
+ Returns:
+ iterator: this camera frame or a scene frame or one of their layers, analyzer type and analysis dictionary.
"""
# Project gaze position into camera frame
- super().look(timestamp, gaze_position)
+ for camera_frame_output in super().look(timestamp, gaze_position):
- # yield camera frame to process its results
- yield self
+ # Output camera frame analysis
+ yield camera_frame_output
- # Lock camera frame exploitation
- self.acquire()
+ # Use try block to always release the camera frame lock in finally block
+ try:
- # Project gaze position into each scene frames if possible
- for scene_frame in self.scene_frames:
+ # Lock camera frame exploitation
+ self.acquire()
- # Is there an AOI inside camera frame layers projection which its name equals to a scene frame name?
- for camera_layer_name, camera_layer in self.layers.items():
+ # Project gaze position into each scene frames if possible
+ for scene_frame in self.scene_frames:
- try:
+ # Is there an AOI inside camera frame layers projection which its name equals to a scene frame name?
+ for camera_layer_name, camera_layer in self.layers.items():
- aoi_2d = camera_layer.aoi_scene[scene_frame.name]
+ try:
- # TODO?: Should we prefer to use camera frame AOIMatcher object?
- if aoi_2d.contains_point(gaze_position.value):
+ aoi_2d = camera_layer.aoi_scene[scene_frame.name]
- inner_x, inner_y = aoi_2d.clockwise().inner_axis(*gaze_position.value)
+ # TODO?: Should we prefer to use camera frame AOIMatcher object?
+ if aoi_2d.contains_point(gaze_position.value):
- # QUESTION: How to project gaze precision?
- inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y))
+ inner_x, inner_y = aoi_2d.clockwise().inner_axis(*gaze_position.value)
- scene_frame.look(timestamp, inner_gaze_position * scene_frame.size)
-
- # yield scene frame to process its results
- yield scene_frame
+ # QUESTION: How to project gaze precision?
+ inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y))
- # Ignore missing aoi in camera frame layer projection
- except KeyError:
+ for scene_frame_output in scene_frame.look(timestamp, inner_gaze_position * scene_frame.size):
+
+ # output scene frame analysis
+ yield scene_frame_output
- pass
+ # Ignore missing aoi in camera frame layer projection
+ except KeyError as e:
- # Unlock camera frame exploitation
- self.release()
+ pass
+
+ finally:
+
+ # Unlock camera frame exploitation
+ self.release()
def map(self):
"""Project camera frame background into scene frames background.
@@ -1472,3 +1428,8 @@ class ArCamera(ArFrame):
with open(json_filepath, 'w', encoding='utf-8') as file:
json.dump(self, file, ensure_ascii=False, indent=4, cls=DataFeatures.JsonEncoder)
+
+def is_camera(obj):
+ """Is an object a camera?"""
+
+ return type(obj).__bases__[0] == ArCamera \ No newline at end of file
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index 52979cc..dcabd81 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -143,7 +143,7 @@ class ArUcoCamera(ArFeatures.ArCamera):
return ArUcoCamera.from_dict(aruco_camera_data, working_directory)
@DataFeatures.PipelineStepMethod
- def watch(self, timestamp: int|float, image: numpy.array) -> Tuple[float, float, dict]:
+ def watch(self, timestamp: int|float, image: numpy.array):
"""Detect environment aruco markers from image and project scenes into camera frame.
!!! note
diff --git a/src/argaze/DataFeatures.py b/src/argaze/DataFeatures.py
index fec9f6d..a751fc5 100644
--- a/src/argaze/DataFeatures.py
+++ b/src/argaze/DataFeatures.py
@@ -450,25 +450,18 @@ TimeStampedDataLoggerType = TypeVar('TimeStampedDataLogger', bound="TimeStampedD
class TimeStampedDataLogger():
"""Abstract class to define what should provide a timestamped data logger."""
- name: str = field(default='')
- """Name of logger."""
-
- selector: str = field(default='True')
- """Code evaluated to handle log under a condition. Default 'True' string means that all incoming data will be accepted."""
-
formatter: str = field(default='')
"""Code evaluated to edit the log."""
@classmethod
- def from_dict(self, logger_data: dict) -> TimeStampedDataLoggerType:
+ def from_dict(self, logger_module_path: str, logger_parameters: dict) -> TimeStampedDataLoggerType:
"""Load timestamped data logger from dictionary.
Parameters:
- logger_data: dict to load
+ logger_module_path: module to load
+ logger_parameters: instance parameters
"""
- logger_module_path, logger_parameters = logger_data.popitem()
-
# Prepend argaze.DataLog path when a single name is provided
if len(logger_module_path.split('.')) == 1:
logger_module_path = f'argaze.DataLog.{logger_module_path}'
@@ -481,15 +474,18 @@ class TimeStampedDataLogger():
try:
- if eval(self.selector, globals(), context):
-
- self.handle(eval(self.formatter, globals(), context))
+ self.handle(eval(self.formatter, globals(), context))
except Exception as e:
print(f'Warning: the following error occurs in TimeStampedDataLogger.emit method ({self.name}):', e)
- def handle(self, log: any):
+ def setup(self, log: str|tuple):
+ """Prepare log emission to destination."""
+
+ raise NotImplementedError('setup() method not implemented')
+
+ def handle(self, log: str|tuple):
"""Handle log emission to destination."""
raise NotImplementedError('handle() method not implemented')
diff --git a/src/argaze/utils/demo_aruco_markers_run.py b/src/argaze/utils/demo_aruco_markers_run.py
index a0d044c..091b1e1 100644
--- a/src/argaze/utils/demo_aruco_markers_run.py
+++ b/src/argaze/utils/demo_aruco_markers_run.py
@@ -65,11 +65,19 @@ def main():
# Edit millisecond timestamp
timestamp = int((time.time() - start_time) * 1e3)
- # Project gaze position into camera
- aruco_camera.look(timestamp, GazeFeatures.GazePosition((x, y))):
+ try:
- # Assess gaze analysis
- gaze_analysis_time = aruco_camera.execution_times['look']
+ # Project gaze position into camera
+ for _ in aruco_camera.look(timestamp, GazeFeatures.GazePosition((x, y))):
+
+ pass
+
+ # Assess gaze analysis
+ gaze_analysis_time = aruco_camera.execution_times['look']
+
+ except Exception as e:
+
+ gaze_analysis_time = 0
# Attach mouse callback to window
cv2.setMouseCallback(aruco_camera.name, on_mouse_event)
@@ -88,12 +96,12 @@ def main():
# Waiting for 'ctrl+C' interruption
with contextlib.suppress(KeyboardInterrupt):
+ # Assess capture time
+ capture_start = time.time()
+
# Capture images
while video_capture.isOpened():
- # Assess capture time
- capture_start = time.time()
-
# Read video image
success, video_image = video_capture.read()
@@ -110,8 +118,17 @@ def main():
video_fps = nb_laps
video_chrono.restart()
- # Detect and project AR features
- detection_time, projection_time, exceptions = aruco_camera.watch(capture_time, video_image)
+ try:
+
+ # Detect and project AR features
+ aruco_camera.watch(capture_time, video_image)
+
+ exception = None
+
+ # Write errors
+ except Exception as e:
+
+ exception = e
# Assess visualisation time
visualisation_start = time.time()
@@ -119,17 +136,18 @@ def main():
# Get ArUcoCamera frame image
aruco_camera_image = aruco_camera.image()
+ # Get execution times
+ detection_time = aruco_camera.aruco_detector.execution_times['detect_markers']
+ projection_time = aruco_camera.execution_times['watch'] - detection_time
+
# Write time info
cv2.rectangle(aruco_camera_image, (0, 0), (aruco_camera.size[0], 100), (63, 63, 63), -1)
cv2.putText(aruco_camera_image, f'{video_fps} FPS | Capture {capture_time}ms | Detection {int(detection_time)}ms | Projection {int(projection_time)}ms | Visualisation {visualisation_time}ms', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
cv2.putText(aruco_camera_image, f'{gaze_positions_frequency} gaze positions/s | Gaze analysis {gaze_analysis_time:.2f}ms', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
# Handle exceptions
- for i, (scene_name, e) in enumerate(exceptions.items()):
-
- # Write errors
- cv2.rectangle(aruco_camera_image, (0, (i+1)*100), (aruco_camera.size[0], (i+2)*80), (127, 127, 127), -1)
- cv2.putText(aruco_camera_image, f'{scene_name} error: {e}', (20, (i+1)*140), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ cv2.rectangle(aruco_camera_image, (0, 100), (aruco_camera.size[0], 80), (127, 127, 127), -1)
+ cv2.putText(aruco_camera_image, f'error: {exception}', (20, 140), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
# Write hint
cv2.putText(aruco_camera_image, 'Mouve mouse pointer over gray rectangle area', (20, aruco_camera.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
diff --git a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
index 0600101..f921662 100644
--- a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
+++ b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
@@ -46,41 +46,6 @@
"n_max": 3
},
"Entropy":{}
- },
- "loggers": {
- "aoi_scan_path_metrics": {
- "FileWriter" : {
- "path": "_export/logs/aoi_scan_path_metrics.csv",
- "header": "Timestamp (ms), Duration (ms), Step, K, LZC",
- "selector": "self.new_analysis_available",
- "formatter": "timestamp, self.aoi_scan_path_analyzers['argaze.GazeAnalysis.Basic'].path_duration, self.aoi_scan_path_analyzers['argaze.GazeAnalysis.Basic'].steps_number, self.aoi_scan_path_analyzers['argaze.GazeAnalysis.KCoefficient'].K, self.aoi_scan_path_analyzers['argaze.GazeAnalysis.LempelZivComplexity'].lempel_ziv_complexity"
- }
- }
- }
- }
- },
- "loggers": {
- "fixations": {
- "FileWriter" : {
- "path": "_export/logs/Fixations.csv",
- "header": "Timestamp (ms), Focus (px), Duration (ms), AOI",
- "selector": "GazeFeatures.is_fixation(self.gaze_movement) and self.gaze_movement.finished",
- "formatter": "timestamp, self.gaze_movement.focus, self.gaze_movement.duration, self.layers['main_layer'].looked_aoi_name"
- }
- },
- "messages": {
- "FileWriter" : {
- "path": "_export/logs/Messages.csv",
- "selector": "GazeFeatures.is_fixation(self.gaze_movement) and not self.gaze_movement.finished",
- "formatter": "f'FixationInProgress Start={self.gaze_movement.positions.first[0]} Duration={self.gaze_movement.duration} AOI={self.layers[\"main_layer\"].looked_aoi_name} Probabilities={self.layers[\"main_layer\"].aoi_matcher.looked_probabilities}'"
- }
- },
- "scan_path_metrics": {
- "FileWriter" : {
- "path": "_export/logs/scan_path_metrics.csv",
- "header": "Timestamp (ms), Duration (ms), Step, K, NNI, XXR",
- "selector": "self.new_analysis_available",
- "formatter": "timestamp, self.scan_path_analyzers['argaze.GazeAnalysis.Basic'].path_duration, self.scan_path_analyzers['argaze.GazeAnalysis.Basic'].steps_number, self.scan_path_analyzers['argaze.GazeAnalysis.KCoefficient'].K, self.scan_path_analyzers['argaze.GazeAnalysis.NearestNeighborIndex'].nearest_neighbor_index, self.scan_path_analyzers['argaze.GazeAnalysis.ExploreExploitRatio'].explore_exploit_ratio"
}
}
},
diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py
index 5f46596..d36f1c8 100644
--- a/src/argaze/utils/demo_gaze_analysis_run.py
+++ b/src/argaze/utils/demo_gaze_analysis_run.py
@@ -14,6 +14,7 @@ import time
from argaze import ArFeatures, GazeFeatures
from argaze.AreaOfInterest import AOIFeatures
from argaze.GazeAnalysis import *
+from argaze.DataLog import FileWriter
import cv2
import numpy
@@ -34,6 +35,11 @@ def main():
# Load ArFrame
ar_frame = ArFeatures.ArFrame.from_json(args.frame)
+ # Create FileWriter loggers
+ fixation_logger = FileWriter.TimeStampedDataLogger(path="_export/logs/fixations.csv", header="Timestamp (ms), Focus (px), Duration (ms), AOI")
+ scan_path_logger = FileWriter.TimeStampedDataLogger(path="_export/logs/scan_path_metrics.csv", header="Timestamp (ms), Duration (ms), Step, K, NNI, XXR")
+ aoi_scan_path_logger = FileWriter.TimeStampedDataLogger(path="_export/logs/aoi_scan_path_metrics.csv", header="Timestamp (ms), Duration (ms), Step, K, LZC")
+
# Create a window to display ArCamera
cv2.namedWindow(ar_frame.name, cv2.WINDOW_AUTOSIZE)
@@ -49,8 +55,27 @@ def main():
# Edit millisecond timestamp
timestamp = int((time.time() - start_time) * 1e3)
- # Project gaze position into frame
- ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y)))
+ # Project gaze position into frame and iterate over analysis
+ for element, module, analysis in ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y))):
+
+ # Ckeck if analysis comes from frame
+ if ArFeatures.is_frame(element):
+
+ # Do something with scan path module analysis
+ ...
+
+ # Ckeck if analysis comes from frame
+ elif ArFeatures.is_layer(element):
+
+ # Do something with aoi scan path module analysis
+ ...
+
+ # Log fixations
+ if GazeFeatures.is_fixation(ar_frame.gaze_movement) and ar_frame.gaze_movement.finished:
+
+ log = timestamp, ar_frame.gaze_movement.focus, ar_frame.gaze_movement.duration, ar_frame.layers['main_layer'].looked_aoi_name
+
+ fixation_logger.handle(log)
# Attach mouse callback to window
cv2.setMouseCallback(ar_frame.name, on_mouse_event)