aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/user_guide/utils/demonstrations_scripts.md26
-rw-r--r--docs/user_guide/utils/ready-made_scripts.md8
-rw-r--r--src/argaze/ArFeatures.py265
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py21
-rw-r--r--src/argaze/DataFeatures.py322
-rw-r--r--src/argaze/utils/demo/A3_demo.pdf (renamed from src/argaze/utils/demo_data/A3_demo.pdf)bin125304 -> 125304 bytes
-rw-r--r--src/argaze/utils/demo/aoi_2d_scene.json (renamed from src/argaze/utils/demo_data/aoi_2d_scene.json)0
-rw-r--r--src/argaze/utils/demo/aoi_3d_scene.obj (renamed from src/argaze/utils/demo_data/aoi_3d_scene.obj)0
-rw-r--r--src/argaze/utils/demo/aruco_markers_group.json (renamed from src/argaze/utils/demo_data/aruco_markers_group.json)0
-rw-r--r--src/argaze/utils/demo/aruco_markers_group.obj (renamed from src/argaze/utils/demo_data/aruco_markers_group.obj)0
-rw-r--r--src/argaze/utils/demo/aruco_markers_pipeline.json64
-rw-r--r--src/argaze/utils/demo/demo.mov (renamed from src/argaze/utils/demo_data/demo.mov)bin13345258 -> 13345258 bytes
-rw-r--r--src/argaze/utils/demo/eyetracker_setup.json (renamed from src/argaze/utils/demo_data/provider_setup.json)9
-rw-r--r--src/argaze/utils/demo/frame_background.jpg (renamed from src/argaze/utils/demo_data/frame_background.jpg)bin19108 -> 19108 bytes
-rw-r--r--src/argaze/utils/demo/gaze_analysis_pipeline.json135
-rw-r--r--src/argaze/utils/demo/loggers.py (renamed from src/argaze/utils/demo_data/demo_loggers.py)0
-rw-r--r--src/argaze/utils/demo_data/demo_aruco_markers_setup.json167
-rw-r--r--src/argaze/utils/demo_data/demo_gaze_analysis_setup.json133
-rw-r--r--src/argaze/utils/eyetrackers/TobiiProGlasses2.py (renamed from src/argaze/utils/Providers/TobiiProGlasses2.py)71
-rw-r--r--src/argaze/utils/eyetrackers/__init__.py (renamed from src/argaze/utils/Providers/__init__.py)0
-rw-r--r--src/argaze/utils/pipeline_run.py (renamed from src/argaze/utils/worn_device_stream.py)68
21 files changed, 674 insertions, 615 deletions
diff --git a/docs/user_guide/utils/demonstrations_scripts.md b/docs/user_guide/utils/demonstrations_scripts.md
index a911173..c3a5c9b 100644
--- a/docs/user_guide/utils/demonstrations_scripts.md
+++ b/docs/user_guide/utils/demonstrations_scripts.md
@@ -14,7 +14,7 @@ Collection of command-line scripts for demonstration purpose.
Load ArFrame with a single ArLayer from **demo_gaze_analysis_setup.json** file then, simulate gaze position using mouse pointer to illustrate gaze features.
```shell
-python ./src/argaze/utils/demo_gaze_analysis_run.py ./src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
+python ./src/argaze/utils/demo_gaze_analysis_run.py ./src/argaze/utils/demo/gaze_analysis_pipeline.json
```
## ArUco markers pipeline demonstration
@@ -22,7 +22,7 @@ python ./src/argaze/utils/demo_gaze_analysis_run.py ./src/argaze/utils/demo_data
Load ArUcoCamera from **demo_aruco_markers_setup.json** file then, detect ArUco markers into a demo video source and estimate camera pose.
```shell
-python ./src/argaze/utils/demo_aruco_markers_run.py ./src/argaze/utils/demo_data/demo_aruco_markers_setup.json -s ./src/argaze/utils/demo_data/demo.mov
+python ./src/argaze/utils/demo_aruco_markers_run.py ./src/argaze/utils/demo/aruco_markers_pipeline.json -s ./src/argaze/utils/demo_data/demo.mov
```
!!! note
@@ -34,18 +34,28 @@ Load ArUcoCamera from a configuration file then, stream and process gaze positio
### With Tobii Pro Glasses 2 device
-To use a Tobii Pro Glasses 2 device, you need to edit **provider_setup.json** file as below with your own parameters values:
+To use a Tobii Pro Glasses 2 device, you need to edit **eyetracker_setup.json** file as below with your own parameters values:
-```shell
+```json
{
- "TobiiProGlasses2" : {
- "address": "10.34.0.17",
+ "argaze.utils.eyetrackers.TobiiProGlasses2.LiveStream" : {
+ "address": "10.34.0.12",
"project": "MyProject",
- "participant": "NewParticipant"
+ "participant": "NewParticipant",
+ "configuration": {
+ "sys_ec_preset": "Indoor",
+ "sys_sc_width": 1920,
+ "sys_sc_height": 1080,
+ "sys_sc_fps": 25,
+ "sys_sc_preset": "Auto",
+ "sys_et_freq": 50,
+ "sys_mems_freq": 100
+ },
+ "pipeline": "demo_aruco_markers_setup.json"
}
}
```
```shell
-python ./src/argaze/utils/worn_device_stream.py ./src/argaze/utils/demo_data/demo_aruco_markers_setup.json
+python ./src/argaze/utils/pipeline_run.py ./src/argaze/utils/demo/eyetracker_setup.json
``` \ No newline at end of file
diff --git a/docs/user_guide/utils/ready-made_scripts.md b/docs/user_guide/utils/ready-made_scripts.md
index 9b8d455..262a0ef 100644
--- a/docs/user_guide/utils/ready-made_scripts.md
+++ b/docs/user_guide/utils/ready-made_scripts.md
@@ -9,6 +9,14 @@ Collection of command-line scripts to provide useful features.
!!! note
*Use -h option to get command arguments documentation.*
+## Eyetracker pipeline handler
+
+Load and execute eyetracker pipeline.
+
+```shell
+python ./src/argaze/utils/pipeline_run.py CONFIGURATION
+```
+
## ArUco markers group exporter
Detect DICTIONARY and SIZE ArUco markers inside a MOVIE frame then, export detected ArUco markers group as .obj file into an OUTPUT folder.
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 5e219ff..6b4b182 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -28,7 +28,6 @@ import time
from argaze import DataFeatures, GazeFeatures
from argaze.AreaOfInterest import *
from argaze.GazeAnalysis import *
-from argaze.utils import Providers
import numpy
import cv2
@@ -53,15 +52,6 @@ class SceneProjectionFailed(Exception):
super().__init__(message)
-class LoadingFailed(Exception):
- """
- Exception raised when attributes loading fails.
- """
-
- def __init__(self, message):
-
- super().__init__(message)
-
class DrawingFailed(Exception):
"""
Exception raised when drawing fails.
@@ -263,7 +253,7 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
if not found:
- raise LoadingFailed(f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.')
+ raise DataFeatures.PipelineStepLoadingFailed(f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.')
# Force scan path creation
if len(self.__aoi_scan_path_analyzers) > 0 and self.aoi_scan_path == None:
@@ -473,7 +463,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Init private attributes
self.__size = (1, 1)
- self.__provider = None
self.__gaze_position_calibrator = None
self.__gaze_movement_identifier = None
self.__filter_in_progress_identification = True
@@ -497,24 +486,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@size.setter
def size(self, size: tuple[int]):
self.__size = size
-
- @property
- def provider(self) -> DataFeatures.PipelineInputProvider:
- """Provider object related to this frame."""
- return self.__provider
-
- @provider.setter
- @DataFeatures.PipelineStepAttributeSetter
- def provider(self, provider: DataFeatures.PipelineInputProvider):
-
- assert(issubclass(type(provider), DataFeatures.PipelineInputProvider))
-
- self.__provider = provider
-
- # Edit parent
- if self.__provider is not None:
-
- self.__provider.parent = self
@property
def gaze_position_calibrator(self) -> GazeFeatures.GazePositionCalibrator:
@@ -625,7 +596,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
if not found:
- raise LoadingFailed(f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.')
+ raise DataFeatures.PipelineStepLoadingFaile(f'{type(analyzer)} analyzer loading fails because {property_type} analyzer is missing.')
# Force scan path creation
if len(self.__scan_path_analyzers) > 0 and self.scan_path == None:
@@ -734,7 +705,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
d = {
**DataFeatures.PipelineStepObject.as_dict(self),
"size": self.__size,
- "provider": self.__provider,
"gaze_position_calibrator": self.__gaze_position_calibrator,
"gaze_movement_identifier": self.__gaze_movement_identifier,
"filter_in_progress_identification": self.__filter_in_progress_identification,
@@ -963,7 +933,17 @@ class ArScene(DataFeatures.PipelineStepObject):
for layer_name, layer_data in layers.items():
- self._layers[layer_name] = ArLayer(name = layer_name, **layer_data)
+ if type(layer_data) == dict:
+
+ self._layers[layer_name] = ArLayer(name = layer_name, **layer_data)
+
+ # str: relative path to JSON file
+ elif type(layer_data) == str:
+
+ self._layers[layer_name] = DataFeatures.from_json(os.path.join(DataFeatures.get_working_directory(), layer_data))
+
+ # Loaded layer name have to be equals to dictionary key
+ assert(self._layers[layer_name].name == frame_name)
# Edit parent
for name, layer in self._layers.items():
@@ -984,7 +964,17 @@ class ArScene(DataFeatures.PipelineStepObject):
for frame_name, frame_data in frames.items():
- new_frame = ArFrame(name = frame_name, **frame_data)
+ if type(frame_data) == dict:
+
+ new_frame = ArFrame(name = frame_name, **frame_data)
+
+ # str: relative path to JSON file
+ elif type(frame_data) == str:
+
+ new_frame = DataFeatures.from_json(os.path.join(DataFeatures.get_working_directory(), frame_data))
+
+ # Loaded frame name have to be equals to dictionary key
+ assert(new_frame.name == frame_name)
# Look for a scene layer with an AOI named like the frame
for scene_layer_name, scene_layer in self.layers.items():
@@ -1295,27 +1285,29 @@ class ArCamera(ArFrame):
# Is there an AOI inside camera frame layers projection which its name equals to a scene frame name?
for camera_layer_name, camera_layer in self.layers.items():
- try:
+ if camera_layer.aoi_scene:
- aoi_2d = camera_layer.aoi_scene[scene_frame.name]
+ try:
- if timestamped_gaze_position:
+ aoi_2d = camera_layer.aoi_scene[scene_frame.name]
- # TODO?: Should we prefer to use camera frame AOIMatcher object?
- if aoi_2d.contains_point(timestamped_gaze_position):
+ if timestamped_gaze_position:
- inner_x, inner_y = aoi_2d.clockwise().inner_axis(*timestamped_gaze_position)
+ # TODO?: Should we prefer to use camera frame AOIMatcher object?
+ if aoi_2d.contains_point(timestamped_gaze_position):
- # QUESTION: How to project gaze precision?
- inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y), timestamp=timestamped_gaze_position.timestamp)
+ inner_x, inner_y = aoi_2d.clockwise().inner_axis(*timestamped_gaze_position)
- # Project inner gaze position into scene frame
- scene_frame.look(inner_gaze_position * scene_frame.size)
+ # QUESTION: How to project gaze precision?
+ inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y), timestamp=timestamped_gaze_position.timestamp)
- # Ignore missing aoi in camera frame layer projection
- except KeyError as e:
+ # Project inner gaze position into scene frame
+ scene_frame.look(inner_gaze_position * scene_frame.size)
- pass
+ # Ignore missing aoi in camera frame layer projection
+ except KeyError as e:
+
+ pass
@DataFeatures.PipelineStepMethod
def map(self):
@@ -1348,3 +1340,182 @@ class ArCamera(ArFrame):
except KeyError:
pass
+
+
+# Define default ArContext image parameters
+DEFAULT_ARCONTEXT_IMAGE_PARAMETERS = {
+ "draw_exceptions": True
+}
+
+class ArContext(DataFeatures.PipelineStepObject):
+ """
+ Define class to ...
+ """
+
+ @DataFeatures.PipelineStepInit
+ def __init__(self, **kwargs):
+
+ logging.debug('ArContext.__init__')
+
+ DataFeatures.PipelineStepObject.__init__(self)
+
+ # Init private attributes
+ self.__pipeline = None
+ self.__exceptions = DataFeatures.TimeStampedExceptions()
+
+ # Init protected attributes
+ self._image_parameters = DEFAULT_ARCONTEXT_IMAGE_PARAMETERS
+
+ @property
+ def pipeline(self) -> DataFeatures.PipelineStepObject:
+ """ArFrame used to process gaze data or ArCamera used to process gaze data and video of environment."""
+ return self.__pipeline
+
+ @pipeline.setter
+ @DataFeatures.PipelineStepAttributeSetter
+ def pipeline(self, pipeline: DataFeatures.PipelineStepObject):
+
+ assert(issubclass(type(pipeline), DataFeatures.PipelineStepObject))
+
+ self.__pipeline = pipeline
+
+ @property
+ def image_parameters(self) -> dict:
+ """Default image method parameters dictionary."""
+ return self._image_parameters
+
+ @image_parameters.setter
+ @DataFeatures.PipelineStepAttributeSetter
+ def image_parameters(self, image_parameters: dict):
+
+ self._image_parameters = image_parameters
+
+ def exceptions(self) -> DataFeatures.TimestampedException:
+ """Get exceptions list"""
+ return self.__exceptions
+
+ def as_dict(self) -> dict:
+ """Export Arcontext properties as dictionary."""
+
+ return {
+ **DataFeatures.PipelineStepObject.as_dict(self),
+ "pipeline": self.__pipeline,
+ "image_parameters": self._image_parameters
+ }
+
+ def __enter__(self):
+ """
+ Define abstract __enter__ method to use device as a context.
+
+ !!! warning
+ This method is called provided that the PipelineInput is created as a context using a with statement.
+ """
+ return self
+
+ def __exit__(self, type, value, traceback):
+ """
+ Define abstract __exit__ method to use device as a context.
+
+ !!! warning
+ This method is called provided that the PipelineInput is created as a context using a with statement.
+ """
+ pass
+
+ def _process_gaze_position(self, timestamp: int|float, x: int|float = None, y: int|float = None, precision: int|float = None):
+ """Request pipeline to process new gaze position at a timestamp."""
+
+ logging.debug('%s._process_gaze_position timestamp: %f', type(self).__name__, timestamp)
+
+ if issubclass(type(self.__pipeline), ArFrame):
+
+ try:
+
+ if x is None and y is None:
+
+ # Edit empty gaze position
+ self.__pipeline.look( GazeFeatures.GazePosition( timestamp = timestamp) )
+
+ else:
+
+ # Edit gaze position
+ self.__pipeline.look( GazeFeatures.GazePosition( (x, y), precision = precision, timestamp = timestamp) )
+
+ except DataFeatures.TimestampedException as e:
+
+ self.__exceptions.append(e)
+
+ else:
+
+ raise(TypeError('Pipeline is not ArFrame instance.'))
+
+ def _process_camera_image(self, timestamp: int|float, image: numpy.ndarray):
+ """Request pipeline to process new camera image at a timestamp."""
+
+ logging.debug('%s._process_camera_image timestamp: %f', type(self).__name__, timestamp)
+
+ if issubclass(type(self.__pipeline), ArCamera):
+
+ height, width, _ = image.shape
+
+ # Compare image size with ArCamera frame size
+ if width != self.__pipeline.size[0] or height != self.__pipeline.size[1]:
+
+ logging.warning('image size (%i x %i) is different of ArCamera frame size (%i x %i)', width, height, self.__pipeline.size[0], self.__pipeline.size[1])
+ return
+
+ try:
+
+ logging.debug('\t> watch image (%i x %i)', width, height)
+
+ self.__pipeline.watch( image, timestamp = timestamp)
+
+ except DataFeatures.TimestampedException as e:
+
+ logging.warning('%s', e)
+
+ self.__exceptions.append(e)
+
+ else:
+
+ raise(TypeError('Pipeline is not ArCamera instance.'))
+
+ def __image(self, draw_exceptions: bool):
+ """
+ Get pipeline image with execution informations.
+
+ Parameters:
+ draw_exceptions: ...
+ """
+ logging.debug('%s.__image', type(self).__name__)
+
+ image = self.__pipeline.image()
+ height, width, _ = image.shape
+
+ logging.debug('\t> get image (%i x %i)', width, height)
+
+ if draw_exceptions:
+
+ # Write exceptions
+ while self.__exceptions:
+
+ e = self.__exceptions.pop()
+ i = len(self.__exceptions)
+
+ cv2.rectangle(image, (0, height-(i+1)*50), (width, height-(i)*50), (0, 0, 127), -1)
+ cv2.putText(image, f'error: {e}', (20, height-(i+1)*50+25), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+
+ return image
+
+ def image(self, **kwargs: dict) -> numpy.array:
+ """
+ Get pipeline image.
+
+ Parameters:
+ kwargs: PipelineInput.__image parameters
+ """
+ # Use image_parameters attribute if no kwargs
+ if kwargs:
+
+ return self.__image(**kwargs)
+
+ return self.__image(**self._image_parameters)
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index df1362a..dda55be 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -52,6 +52,7 @@ class ArUcoCamera(ArFeatures.ArCamera):
# Init private attribute
self.__aruco_detector = None
+ self.__sides_mask = 0
self._image_parameters = {**ArFeatures.DEFAULT_ARFRAME_IMAGE_PARAMETERS, **DEFAULT_ARUCOCAMERA_IMAGE_PARAMETERS}
@property
@@ -71,7 +72,7 @@ class ArUcoCamera(ArFeatures.ArCamera):
# Optic parameters dimensions should be equal to camera frame size
if self.__aruco_detector.optic_parameters.dimensions != self.size:
- raise ArFeatures.LoadingFailed('ArUcoCamera: aruco_detector.optic_parameters.dimensions have to be equal to size.')
+ raise DataFeatures.PipelineStepLoadingFaile('ArUcoCamera: aruco_detector.optic_parameters.dimensions have to be equal to size.')
# No optic parameters loaded
else:
@@ -85,6 +86,16 @@ class ArUcoCamera(ArFeatures.ArCamera):
self.__aruco_detector.parent = self
+ @property
+ def sides_mask(self) -> int:
+ """Size of mask (pixel) to hide video left and right sides."""
+ return self.__sides_mask
+
+ @sides_mask.setter
+ def sides_mask(self, size: int):
+
+ self.__sides_mask = size
+
@ArFeatures.ArCamera.scenes.setter
@DataFeatures.PipelineStepAttributeSetter
def scenes(self, scenes: dict):
@@ -124,6 +135,14 @@ class ArUcoCamera(ArFeatures.ArCamera):
# Use camera frame locker feature
with self._lock:
+ # Draw black rectangles to mask sides
+ if self.__sides_mask > 0:
+
+ height, width, _ = image.shape
+
+ cv2.rectangle(image, (0, 0), (self.__sides_mask, height), (0, 0, 0), -1)
+ cv2.rectangle(image, (width - self.__sides_mask, 0), (width, height), (0, 0, 0), -1)
+
# Detect aruco markers
self.__aruco_detector.detect_markers(image, timestamp=self.timestamp)
diff --git a/src/argaze/DataFeatures.py b/src/argaze/DataFeatures.py
index e24ecf1..f573f1c 100644
--- a/src/argaze/DataFeatures.py
+++ b/src/argaze/DataFeatures.py
@@ -41,12 +41,6 @@ WORKING_DIRECTORY = [None]
def get_working_directory() -> str:
"""Get global working directory."""
-
- # Check global working directory
- if WORKING_DIRECTORY[0] is None:
-
- raise(ValueError(f'No working directory.'))
-
return WORKING_DIRECTORY[0]
def set_working_directory(working_directory: str):
@@ -62,15 +56,6 @@ def set_working_directory(working_directory: str):
WORKING_DIRECTORY[0] = working_directory
-def module_path(obj) -> str:
- """
- Get object module path.
-
- Returns:
- module path
- """
- return obj.__class__.__module__
-
def get_class(class_path: str) -> object:
"""Get class object from 'path.to.class' string.
@@ -105,6 +90,113 @@ def properties(cls) -> list:
return properties
+def from_json(configuration_filepath: str, patch_filepath: str = None) -> object:
+ """
+ Load object instance from .json file.
+
+ !!! note
+ The directory where configuration file is will be the global working directory.
+
+ Parameters:
+ configuration_filepath: path to json configuration file
+ patch_filepath: path to json patch file to modify any configuration entries
+ """
+
+ logging.debug('DataFeatures.from_json')
+
+ # Edit working directory once
+ if get_working_directory() is None:
+
+ set_working_directory(os.path.dirname(os.path.abspath(configuration_filepath)))
+
+ logging.debug('\t> set global working directory as %s', get_working_directory())
+
+ # Load configuration from JSON file
+ with open(configuration_filepath) as configuration_file:
+
+ object_data = json.load(configuration_file)
+
+ # Apply patch to configuration if required
+ if patch_filepath is not None:
+
+ with open(patch_filepath) as patch_file:
+
+ patch_data = json.load(patch_file)
+
+ import collections.abc
+
+ def update(d, u):
+
+ for k, v in u.items():
+
+ if isinstance(v, collections.abc.Mapping):
+
+ d[k] = update(d.get(k, {}), v)
+
+ elif v is None:
+
+ del d[k]
+
+ else:
+
+ d[k] = v
+
+ return d
+
+ objects_data = update(object_data, patch_data)
+
+ # Load unique object
+ object_class, object_data = object_data.popitem()
+
+ # Instanciate class
+ logging.debug('\t+ create %s object', object_class)
+
+ return get_class(object_class)(**object_data)
+
+def from_dict(expected_value_type: type, data: dict) -> any:
+ """Load expected type instance(s) from dict values."""
+
+ logging.debug('\t> load %s from dict', expected_value_type.__name__)
+
+ # Check if json keys are PipelineStepObject class and store them in a list
+ new_objects_list = []
+
+ for key, value in data.items():
+
+ try:
+
+ new_class = get_class(key)
+
+ except ValueError as e:
+
+ # Keys are not class name
+ if str(e) == 'Empty module name':
+
+ break
+
+ else:
+
+ raise(e)
+
+ logging.debug('\t+ create %s object from key using value as argument', key)
+
+ new_objects_list.append( new_class(**value) )
+
+ # Only one object have been loaded: pass the object if it is a subclass of expected type
+ if len(new_objects_list) == 1 and issubclass(type(new_objects_list[0]), expected_value_type):
+
+ return new_objects_list[0]
+
+ # Pass non empty objects list
+ elif len(new_objects_list) > 0:
+
+ return new_objects_list
+
+ # Otherwise, data are parameters of the expected class
+ logging.debug('\t+ create %s object using dict as argument', expected_value_type.__name__)
+
+ return expected_value_type(**data)
+
def as_dict(obj, filter: bool=True) -> dict:
"""Export object as dictionary.
@@ -229,7 +321,7 @@ class TimestampedObjectsList(list):
def append(self, ts_object: TimestampedObject|dict):
"""Append timestamped object."""
- # Convert dict into GazePosition
+ # Convert dict into object
if type(ts_object) == dict:
ts_object = self.__object_type.from_dict(ts_object)
@@ -472,6 +564,33 @@ class SharedObject(TimestampedObject):
self._execution_times = {}
self._exceptions = {}
+class TimestampedException(Exception, TimestampedObject):
+ """Wrap exception to keep track of raising timestamp."""
+
+ def __init__(self, exception = Exception, timestamp: int|float = math.nan):
+
+ Exception.__init__(self, exception)
+ TimestampedObject.__init__(self, timestamp)
+
+class TimeStampedExceptions(TimestampedObjectsList):
+ """Handle timestamped exceptions into a list."""
+
+ def __init__(self, exceptions: list = []):
+
+ TimestampedObjectsList.__init__(self, TimestampedException, exceptions)
+
+ def values(self) -> list[str]:
+ """Get all timestamped exception values as list of messages."""
+ return [ts_exception.message for ts_exception in self]
+
+class PipelineStepLoadingFailed(Exception):
+ """
+ Exception raised when pipeline step object loading fails.
+ """
+ def __init__(self, message):
+
+ super().__init__(message)
+
def PipelineStepInit(method):
"""Define a decorator use into PipelineStepObject class to declare pipeline step init method."""
@@ -512,53 +631,10 @@ def PipelineStepAttributeSetter(method):
except KeyError:
- raise(ValueError(f'Annotations are missing for {method.__name__}: {method.__annotations__}'))
-
- logging.debug('@PipelineStepAttributeSetter %s.%s.setter(%s) with %s', type(self).__name__, method.__name__, expected_value_type.__name__, new_value_type.__name__)
+ raise(PipelineStepLoadingFailed(f'Annotations are missing for {method.__name__}: {method.__annotations__}'))
- # Define function to load dict values
- def load_dict(data: dict) -> any:
-
- logging.debug('\t> load %s from %s', expected_value_type.__name__, new_value_type.__name__)
-
- # Check if json keys are PipelineStepObject class and store them in a list
- new_objects_list = []
-
- for key, value in data.items():
-
- try:
-
- new_class = get_class(key)
-
- except ValueError as e:
-
- # Keys are not class name
- if str(e) == 'Empty module name':
-
- break
-
- else:
-
- raise(e)
-
- logging.debug('\t+ create %s object from key using value as argument', key)
-
- new_objects_list.append( new_class(**value) )
-
- # Only one object have been loaded: pass the object if it is a subclass of expected type
- if len(new_objects_list) == 1 and issubclass(type(new_objects_list[0]), expected_value_type):
-
- return new_objects_list[0]
-
- # Pass non empty objects list
- elif len(new_objects_list) > 0:
-
- return new_objects_list
-
- # Otherwise, data are parameters of the expected class
- logging.debug('\t+ create %s object using %s as argument', expected_value_type.__name__, new_value_type.__name__)
-
- return expected_value_type(**data)
+ logging.debug('%s@%s.setter', type(self).__name__, method.__name__)
+ logging.debug('\t> set %s with %s', expected_value_type.__name__, new_value_type.__name__)
# String not expected: load value from file
if new_value_type == str and new_value_type != expected_value_type:
@@ -568,28 +644,28 @@ def PipelineStepAttributeSetter(method):
# String have a dot inside: file path with format
if len(split_point) > 1:
- file_format = split_point[-1]
+ file_format = split_point[-1].upper()
- logging.debug('\t> %s is a path to a %s file', new_value, file_format.upper())
+ logging.debug('\t> %s is a path to a %s file', new_value, file_format)
filepath = os.path.join(get_working_directory(), new_value)
# Load image from JPG and PNG formats
- if file_format == 'jpg' or file_format == 'png':
+ if file_format == 'JPG' or file_format == 'PNG':
return method(self, cv2.imread(filepath))
# Load image from OBJ formats
- elif file_format == 'obj':
+ elif file_format == 'OBJ':
return method(self, expected_value_type.from_obj(filepath))
# Load object from JSON file
- elif file_format == 'json':
+ elif file_format == 'JSON':
with open(filepath) as file:
- return method(self, load_dict(json.load(file)))
+ return method(self, from_dict(expected_value_type, json.load(file)))
# No point inside string: identifier name
else:
@@ -602,7 +678,7 @@ def PipelineStepAttributeSetter(method):
# Dict not expected: load value from dict
if new_value_type == dict and expected_value_type != dict:
- return method(self, load_dict(new_value))
+ return method(self, from_dict(expected_value_type, new_value))
# Otherwise, pass new value to setter method
logging.debug('\t> use %s value as passed', new_value_type.__name__)
@@ -620,7 +696,7 @@ class PipelineStepObject():
def __init__(self, **kwargs):
"""Initialize PipelineStepObject."""
- logging.debug('PipelineStepObject.__init__ %s', type(self).__name__)
+ logging.debug('%s.__init__', type(self).__name__)
# Init private attribute
self.__name = None
@@ -633,6 +709,8 @@ class PipelineStepObject():
def __enter__(self):
"""At with statement start."""
+ logging.debug('%s.__enter__', type(self).__name__)
+
# Start children pipeline step objects
for child in self.children:
@@ -648,6 +726,8 @@ class PipelineStepObject():
def __exit__(self, exception_type, exception_value, exception_traceback):
"""At with statement end."""
+ logging.debug('%s.__exit__', type(self).__name__)
+
# End observers
for observer in self.__observers:
@@ -665,7 +745,7 @@ class PipelineStepObject():
if hasattr(self, key):
- logging.debug('PipelineStepObject.update_attributes %s.%s with %s value', type(self).__name__, key, type(value).__name__)
+ logging.debug('%s.update_attributes > update %s with %s value', type(self).__name__, key, type(value).__name__)
setattr(self, key, value)
@@ -721,65 +801,6 @@ class PipelineStepObject():
"observers": self.__observers
}
- @classmethod
- def from_json(cls, configuration_filepath: str, patch_filepath: str = None) -> object:
- """
- Load instance from .json file.
-
- !!! note
- The directory where configuration file is will be the global working directory.
-
- Parameters:
- configuration_filepath: path to json configuration file
- patch_filepath: path to json patch file to modify any configuration entries
- """
-
- logging.debug('%s.from_json', cls.__name__)
-
- # Edit working directory
- set_working_directory(os.path.dirname(os.path.abspath(configuration_filepath)))
-
- logging.debug('\t> set global working directory as %s', get_working_directory())
-
- # Load configuration from JSON file
- with open(configuration_filepath) as configuration_file:
-
- object_data = json.load(configuration_file)
-
- # Apply patch to configuration if required
- if patch_filepath is not None:
-
- with open(patch_filepath) as patch_file:
-
- patch_data = json.load(patch_file)
-
- import collections.abc
-
- def update(d, u):
-
- for k, v in u.items():
-
- if isinstance(v, collections.abc.Mapping):
-
- d[k] = update(d.get(k, {}), v)
-
- elif v is None:
-
- del d[k]
-
- else:
-
- d[k] = v
-
- return d
-
- object_data = update(object_data, patch_data)
-
- # Instanciate class
- logging.debug('\t+ create %s object from configuration updated by patch', cls.__name__)
-
- return cls(**object_data)
-
def to_json(self, json_filepath: str = None):
"""Save pipeline step object into .json file."""
@@ -791,10 +812,10 @@ class PipelineStepObject():
# Open file
with open(self.__json_filepath, 'w', encoding='utf-8') as object_file:
- json.dump({module_path(self):as_dict(self)}, object_file, ensure_ascii=False, indent=4)
+ json.dump({self.__class__.__module__:as_dict(self)}, object_file, ensure_ascii=False, indent=4)
# QUESTION: maybe we need two saving mode?
- #json.dump(self, object_file, ensure_ascii=False, indent=4, cls=DataFeatures.JsonEncoder)
+ #json.dump(self, object_file, ensure_ascii=False, indent=4, cls=JsonEncoder)
def __str__(self) -> str:
"""
@@ -973,10 +994,10 @@ def PipelineStepMethod(method):
# Call subscription
subscription(timestamp, self, exception)
- # Raise exception
+ # Raise timestamped exception
if exception is not None:
- raise exception
+ raise TimestampedException(exception, timestamp)
return result
@@ -1006,36 +1027,3 @@ class PipelineStepObserver():
This method is called provided that the observed PipelineStepObject is created as a context using a with statement.
"""
pass
-
-class PipelineInputProvider(PipelineStepObject):
- """
- Define class to ...
- """
- @PipelineStepInit
- def __init__(self, **kwargs):
-
- logging.debug('PipelineInputProvider.__init__')
-
- PipelineStepObject.__init__(self)
-
- def attach(self, method):
-
- logging.debug('PipelineInputProvider.attach', method)
-
- def __enter__(self):
- """
- Define abstract __enter__ method to use device as a context.
-
- !!! warning
- This method is called provided that the PipelineInputProvider is created as a context using a with statement.
- """
- return self
-
- def __exit__(self, type, value, traceback):
- """
- Define abstract __exit__ method to use device as a context.
-
- !!! warning
- This method is called provided that the PipelineInputProvider is created as a context using a with statement.
- """
- pass \ No newline at end of file
diff --git a/src/argaze/utils/demo_data/A3_demo.pdf b/src/argaze/utils/demo/A3_demo.pdf
index cc51bc2..cc51bc2 100644
--- a/src/argaze/utils/demo_data/A3_demo.pdf
+++ b/src/argaze/utils/demo/A3_demo.pdf
Binary files differ
diff --git a/src/argaze/utils/demo_data/aoi_2d_scene.json b/src/argaze/utils/demo/aoi_2d_scene.json
index ac58b63..ac58b63 100644
--- a/src/argaze/utils/demo_data/aoi_2d_scene.json
+++ b/src/argaze/utils/demo/aoi_2d_scene.json
diff --git a/src/argaze/utils/demo_data/aoi_3d_scene.obj b/src/argaze/utils/demo/aoi_3d_scene.obj
index 0ce97de..0ce97de 100644
--- a/src/argaze/utils/demo_data/aoi_3d_scene.obj
+++ b/src/argaze/utils/demo/aoi_3d_scene.obj
diff --git a/src/argaze/utils/demo_data/aruco_markers_group.json b/src/argaze/utils/demo/aruco_markers_group.json
index e103d14..e103d14 100644
--- a/src/argaze/utils/demo_data/aruco_markers_group.json
+++ b/src/argaze/utils/demo/aruco_markers_group.json
diff --git a/src/argaze/utils/demo_data/aruco_markers_group.obj b/src/argaze/utils/demo/aruco_markers_group.obj
index 83935ef..83935ef 100644
--- a/src/argaze/utils/demo_data/aruco_markers_group.obj
+++ b/src/argaze/utils/demo/aruco_markers_group.obj
diff --git a/src/argaze/utils/demo/aruco_markers_pipeline.json b/src/argaze/utils/demo/aruco_markers_pipeline.json
new file mode 100644
index 0000000..a4fe400
--- /dev/null
+++ b/src/argaze/utils/demo/aruco_markers_pipeline.json
@@ -0,0 +1,64 @@
+{
+ "argaze.ArUcoMarkers.ArUcoCamera.ArUcoCamera": {
+ "name": "demo_camera",
+ "size": [1920, 1080],
+ "aruco_detector": {
+ "dictionary": "DICT_APRILTAG_16h5",
+ "parameters": {
+ "useAruco3Detection": 1
+ }
+ },
+ "sides_mask": 420,
+ "layers": {
+ "demo_layer": {}
+ },
+ "image_parameters": {
+ "background_weight": 1,
+ "draw_layers": {
+ "demo_layer": {
+ "draw_aoi_scene": {
+ "draw_aoi": {
+ "color": [255, 255, 255],
+ "border_size": 1
+ }
+ }
+ }
+ },
+ "draw_gaze_positions": {
+ "color": [0, 255, 255],
+ "size": 4
+ },
+ "draw_detected_markers": {
+ "color": [0, 255, 0],
+ "draw_axes": {
+ "thickness": 3
+ }
+ },
+ "draw_scenes": {
+ "demo_scene": {
+ "draw_aruco_markers_group": {
+ "draw_axes": {
+ "thickness": 3,
+ "length": 10
+ }
+ }
+ }
+ }
+ },
+ "scenes": {
+ "demo_scene" : {
+ "aruco_markers_group": "aruco_markers_group.json",
+ "layers": {
+ "demo_layer" : {
+ "aoi_scene": "aoi_3d_scene.obj"
+ }
+ },
+ "frames": {
+ "GrayRectangle": "gaze_analysis_pipeline.json"
+ },
+ "angle_tolerance": 15.0,
+ "distance_tolerance": 2.54
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/argaze/utils/demo_data/demo.mov b/src/argaze/utils/demo/demo.mov
index bba7999..bba7999 100644
--- a/src/argaze/utils/demo_data/demo.mov
+++ b/src/argaze/utils/demo/demo.mov
Binary files differ
diff --git a/src/argaze/utils/demo_data/provider_setup.json b/src/argaze/utils/demo/eyetracker_setup.json
index eac909c..70f85e4 100644
--- a/src/argaze/utils/demo_data/provider_setup.json
+++ b/src/argaze/utils/demo/eyetracker_setup.json
@@ -1,6 +1,7 @@
{
- "argaze.utils.Providers.TobiiProGlasses2.Provider" : {
- "address": "10.34.0.12",
+ "argaze.utils.eyetrackers.TobiiProGlasses2.LiveStream" : {
+ "name": "Tobii Pro Glasses 2 live stream",
+ "address": "10.34.0.17",
"project": "MyProject",
"participant": "NewParticipant",
"configuration": {
@@ -11,6 +12,10 @@
"sys_sc_preset": "Auto",
"sys_et_freq": 50,
"sys_mems_freq": 100
+ },
+ "pipeline": "aruco_markers_pipeline.json",
+ "image_parameters": {
+ "draw_exceptions": true
}
}
} \ No newline at end of file
diff --git a/src/argaze/utils/demo_data/frame_background.jpg b/src/argaze/utils/demo/frame_background.jpg
index 7aabe63..7aabe63 100644
--- a/src/argaze/utils/demo_data/frame_background.jpg
+++ b/src/argaze/utils/demo/frame_background.jpg
Binary files differ
diff --git a/src/argaze/utils/demo/gaze_analysis_pipeline.json b/src/argaze/utils/demo/gaze_analysis_pipeline.json
new file mode 100644
index 0000000..07b7e78
--- /dev/null
+++ b/src/argaze/utils/demo/gaze_analysis_pipeline.json
@@ -0,0 +1,135 @@
+{
+ "argaze.ArFeatures.ArFrame": {
+ "name": "GrayRectangle",
+ "size": [1920, 1149],
+ "background": "frame_background.jpg",
+ "gaze_movement_identifier": {
+ "argaze.GazeAnalysis.DispersionThresholdIdentification.GazeMovementIdentifier": {
+ "deviation_max_threshold": 50,
+ "duration_min_threshold": 200
+ }
+ },
+ "filter_in_progress_identification": false,
+ "scan_path": {
+ "duration_max": 10000
+ },
+ "scan_path_analyzers": {
+ "argaze.GazeAnalysis.Basic.ScanPathAnalyzer": {},
+ "argaze.GazeAnalysis.KCoefficient.ScanPathAnalyzer": {},
+ "argaze.GazeAnalysis.NearestNeighborIndex.ScanPathAnalyzer": {
+ "size": [1920, 1149]
+ },
+ "argaze.GazeAnalysis.ExploreExploitRatio.ScanPathAnalyzer": {
+ "short_fixation_duration_threshold": 0
+ }
+ },
+ "heatmap": {
+ "size": [320, 240]
+ },
+ "layers": {
+ "demo_layer": {
+ "aoi_scene": "aoi_2d_scene.json",
+ "aoi_matcher": {
+ "argaze.GazeAnalysis.DeviationCircleCoverage.AOIMatcher": {
+ "coverage_threshold": 0.5
+ }
+ },
+ "aoi_scan_path": {
+ "duration_max": 30000
+ },
+ "aoi_scan_path_analyzers": {
+ "argaze.GazeAnalysis.Basic.AOIScanPathAnalyzer": {},
+ "argaze.GazeAnalysis.TransitionMatrix.AOIScanPathAnalyzer": {},
+ "argaze.GazeAnalysis.KCoefficient.AOIScanPathAnalyzer": {},
+ "argaze.GazeAnalysis.LempelZivComplexity.AOIScanPathAnalyzer": {},
+ "argaze.GazeAnalysis.NGram.AOIScanPathAnalyzer": {
+ "n_min": 3,
+ "n_max": 3
+ },
+ "argaze.GazeAnalysis.Entropy.AOIScanPathAnalyzer":{}
+ },
+ "observers": {
+ "loggers.AOIScanPathAnalysisLogger": {
+ "path": "_export/logs/aoi_scan_path_metrics.csv",
+ "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "LZC"]
+ }
+ }
+ }
+ },
+ "image_parameters": {
+ "background_weight": 1,
+ "heatmap_weight": 0.5,
+ "draw_scan_path": {
+ "draw_fixations": {
+ "deviation_circle_color": [255, 0, 255],
+ "duration_border_color": [127, 0, 127],
+ "duration_factor": 1e-2
+ },
+ "draw_saccades": {
+ "line_color": [255, 0, 255]
+ }
+ },
+ "draw_layers": {
+ "demo_layer": {
+ "draw_aoi_scene": {
+ "draw_aoi": {
+ "color": [255, 255, 255],
+ "border_size": 1
+ }
+ },
+ "draw_aoi_matching": {
+ "draw_matched_fixation": {
+ "deviation_circle_color": [255, 255, 255],
+ "draw_positions": {
+ "position_color": [0, 255, 0],
+ "line_color": [0, 0, 0]
+ }
+ },
+ "draw_matched_region": {
+ "color": [0, 255, 0],
+ "border_size": 4
+ },
+ "draw_looked_aoi": {
+ "color": [0, 255, 0],
+ "border_size": 2
+ },
+ "looked_aoi_name_color": [255, 255, 255],
+ "looked_aoi_name_offset": [0, -10]
+ }
+ }
+ },
+ "draw_fixations": {
+ "deviation_circle_color": [255, 255, 255],
+ "duration_border_color": [127, 0, 127],
+ "duration_factor": 1e-2,
+ "draw_positions": {
+ "position_color": [0, 255, 255],
+ "line_color": [0, 0, 0]
+ }
+ },
+ "draw_saccades": {
+ "line_color": [255, 0, 255]
+ },
+ "draw_gaze_positions": {
+ "color": [0, 255, 255],
+ "size": 2
+ }
+ },
+ "observers": {
+ "loggers.FixationLogger": {
+ "path": "_export/logs/fixations.csv",
+ "header": ["Timestamp (ms)", "Focus (px)", "Duration (ms)", "AOI"]
+ },
+ "loggers.ScanPathAnalysisLogger": {
+ "path": "_export/logs/scan_path_metrics.csv",
+ "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "NNI", "XXR"]
+ },
+ "loggers.VideoRecorder": {
+ "path": "_export/logs/video.mp4",
+ "width": 1920,
+ "height": 1080,
+ "fps": 15
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/argaze/utils/demo_data/demo_loggers.py b/src/argaze/utils/demo/loggers.py
index 5f1986e..5f1986e 100644
--- a/src/argaze/utils/demo_data/demo_loggers.py
+++ b/src/argaze/utils/demo/loggers.py
diff --git a/src/argaze/utils/demo_data/demo_aruco_markers_setup.json b/src/argaze/utils/demo_data/demo_aruco_markers_setup.json
deleted file mode 100644
index 9a95524..0000000
--- a/src/argaze/utils/demo_data/demo_aruco_markers_setup.json
+++ /dev/null
@@ -1,167 +0,0 @@
-{
- "name": "demo_camera",
- "size": [1280, 720],
- "aruco_detector": {
- "dictionary": "DICT_APRILTAG_16h5",
- "parameters": {
- "useAruco3Detection": 1
- }
- },
- "layers": {
- "demo_layer": {}
- },
- "image_parameters": {
- "background_weight": 1,
- "draw_layers": {
- "demo_layer": {
- "draw_aoi_scene": {
- "draw_aoi": {
- "color": [255, 255, 255],
- "border_size": 1
- }
- }
- }
- },
- "draw_gaze_positions": {
- "color": [0, 255, 255],
- "size": 4
- },
- "draw_detected_markers": {
- "color": [0, 255, 0],
- "draw_axes": {
- "thickness": 3
- }
- },
- "draw_scenes": {
- "demo_scene": {
- "draw_aruco_markers_group": {
- "draw_axes": {
- "thickness": 3,
- "length": 10
- }
- }
- }
- }
- },
- "scenes": {
- "demo_scene" : {
- "aruco_markers_group": "aruco_markers_group.json",
- "layers": {
- "demo_layer" : {
- "aoi_scene": "aoi_3d_scene.obj"
- }
- },
- "frames": {
- "GrayRectangle": {
- "size": [1920, 1149],
- "background": "frame_background.jpg",
- "gaze_movement_identifier": {
- "argaze.GazeAnalysis.DispersionThresholdIdentification.GazeMovementIdentifier": {
- "deviation_max_threshold": 50,
- "duration_min_threshold": 200
- }
- },
- "scan_path": {
- "duration_max": 10000
- },
- "scan_path_analyzers": {
- "argaze.GazeAnalysis.Basic.ScanPathAnalyzer": {},
- "argaze.GazeAnalysis.KCoefficient.ScanPathAnalyzer": {},
- "argaze.GazeAnalysis.NearestNeighborIndex.ScanPathAnalyzer": {
- "size": [1920, 1149]
- },
- "argaze.GazeAnalysis.ExploreExploitRatio.ScanPathAnalyzer": {
- "short_fixation_duration_threshold": 0
- }
- },
- "layers": {
- "demo_layer": {
- "aoi_scene": "aoi_2d_scene.json",
- "aoi_matcher": {
- "argaze.GazeAnalysis.FocusPointInside.AOIMatcher": {}
- },
- "aoi_scan_path": {
- "duration_max": 30000
- },
- "aoi_scan_path_analyzers": {
- "argaze.GazeAnalysis.Basic.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.TransitionMatrix.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.KCoefficient.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.LempelZivComplexity.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.NGram.AOIScanPathAnalyzer": {
- "n_min": 3,
- "n_max": 3
- },
- "argaze.GazeAnalysis.Entropy.AOIScanPathAnalyzer":{}
- },
- "observers": {
- "demo_loggers.AOIScanPathAnalysisLogger": {
- "path": "_export/logs/aoi_scan_path_metrics.csv",
- "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "LZC"]
- }
- }
- }
- },
- "image_parameters": {
- "background_weight": 1,
- "draw_scan_path": {
- "draw_fixations": {
- "deviation_circle_color": [255, 0, 255],
- "duration_border_color": [127, 0, 127],
- "duration_factor": 1e-2
- },
- "draw_saccades": {
- "line_color": [255, 0, 255]
- }
- },
- "draw_layers": {
- "demo_layer": {
- "draw_aoi_scene": {
- "draw_aoi": {
- "color": [255, 255, 255],
- "border_size": 1
- }
- },
- "draw_aoi_matching": {
- "draw_looked_aoi": {
- "color": [0, 255, 255],
- "border_size": 10
- },
- "looked_aoi_name_color": [255, 255, 255],
- "looked_aoi_name_offset": [10, 10]
- }
- }
- },
- "draw_fixations": {
- "deviation_circle_color": [255, 255, 255],
- "duration_border_color": [127, 0, 127],
- "duration_factor": 1e-2
- },
- "draw_gaze_positions": {
- "color": [0, 255, 255],
- "size": 2
- }
- },
- "observers": {
- "demo_loggers.FixationLogger": {
- "path": "_export/logs/fixations.csv",
- "header": ["Timestamp (ms)", "Focus (px)", "Duration (ms)", "AOI"]
- },
- "demo_loggers.ScanPathAnalysisLogger": {
- "path": "_export/logs/scan_path_metrics.csv",
- "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "NNI", "XXR"]
- },
- "demo_loggers.VideoRecorder": {
- "path": "_export/logs/video.mp4",
- "width": 1920,
- "height": 1080,
- "fps": 15
- }
- }
- }
- },
- "angle_tolerance": 15.0,
- "distance_tolerance": 2.54
- }
- }
-} \ No newline at end of file
diff --git a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
deleted file mode 100644
index c897fa0..0000000
--- a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
+++ /dev/null
@@ -1,133 +0,0 @@
-{
- "name": "demo_frame",
- "size": [1920, 1149],
- "background": "frame_background.jpg",
- "gaze_movement_identifier": {
- "argaze.GazeAnalysis.DispersionThresholdIdentification.GazeMovementIdentifier": {
- "deviation_max_threshold": 50,
- "duration_min_threshold": 200
- }
- },
- "filter_in_progress_identification": false,
- "scan_path": {
- "duration_max": 10000
- },
- "scan_path_analyzers": {
- "argaze.GazeAnalysis.Basic.ScanPathAnalyzer": {},
- "argaze.GazeAnalysis.KCoefficient.ScanPathAnalyzer": {},
- "argaze.GazeAnalysis.NearestNeighborIndex.ScanPathAnalyzer": {
- "size": [1920, 1149]
- },
- "argaze.GazeAnalysis.ExploreExploitRatio.ScanPathAnalyzer": {
- "short_fixation_duration_threshold": 0
- }
- },
- "heatmap": {
- "size": [320, 240]
- },
- "layers": {
- "demo_layer": {
- "aoi_scene": "aoi_2d_scene.json",
- "aoi_matcher": {
- "argaze.GazeAnalysis.DeviationCircleCoverage.AOIMatcher": {
- "coverage_threshold": 0.5
- }
- },
- "aoi_scan_path": {
- "duration_max": 30000
- },
- "aoi_scan_path_analyzers": {
- "argaze.GazeAnalysis.Basic.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.TransitionMatrix.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.KCoefficient.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.LempelZivComplexity.AOIScanPathAnalyzer": {},
- "argaze.GazeAnalysis.NGram.AOIScanPathAnalyzer": {
- "n_min": 3,
- "n_max": 3
- },
- "argaze.GazeAnalysis.Entropy.AOIScanPathAnalyzer":{}
- },
- "observers": {
- "demo_loggers.AOIScanPathAnalysisLogger": {
- "path": "_export/logs/aoi_scan_path_metrics.csv",
- "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "LZC"]
- }
- }
- }
- },
- "image_parameters": {
- "background_weight": 1,
- "heatmap_weight": 0.5,
- "draw_scan_path": {
- "draw_fixations": {
- "deviation_circle_color": [255, 0, 255],
- "duration_border_color": [127, 0, 127],
- "duration_factor": 1e-2
- },
- "draw_saccades": {
- "line_color": [255, 0, 255]
- }
- },
- "draw_layers": {
- "demo_layer": {
- "draw_aoi_scene": {
- "draw_aoi": {
- "color": [255, 255, 255],
- "border_size": 1
- }
- },
- "draw_aoi_matching": {
- "draw_matched_fixation": {
- "deviation_circle_color": [255, 255, 255],
- "draw_positions": {
- "position_color": [0, 255, 0],
- "line_color": [0, 0, 0]
- }
- },
- "draw_matched_region": {
- "color": [0, 255, 0],
- "border_size": 4
- },
- "draw_looked_aoi": {
- "color": [0, 255, 0],
- "border_size": 2
- },
- "looked_aoi_name_color": [255, 255, 255],
- "looked_aoi_name_offset": [0, -10]
- }
- }
- },
- "draw_fixations": {
- "deviation_circle_color": [255, 255, 255],
- "duration_border_color": [127, 0, 127],
- "duration_factor": 1e-2,
- "draw_positions": {
- "position_color": [0, 255, 255],
- "line_color": [0, 0, 0]
- }
- },
- "draw_saccades": {
- "line_color": [255, 0, 255]
- },
- "draw_gaze_positions": {
- "color": [0, 255, 255],
- "size": 2
- }
- },
- "observers": {
- "demo_loggers.FixationLogger": {
- "path": "_export/logs/fixations.csv",
- "header": ["Timestamp (ms)", "Focus (px)", "Duration (ms)", "AOI"]
- },
- "demo_loggers.ScanPathAnalysisLogger": {
- "path": "_export/logs/scan_path_metrics.csv",
- "header": ["Timestamp (ms)", "Duration (ms)", "Step", "K", "NNI", "XXR"]
- },
- "demo_loggers.VideoRecorder": {
- "path": "_export/logs/video.mp4",
- "width": 1920,
- "height": 1080,
- "fps": 15
- }
- }
-} \ No newline at end of file
diff --git a/src/argaze/utils/Providers/TobiiProGlasses2.py b/src/argaze/utils/eyetrackers/TobiiProGlasses2.py
index 8ab7417..94f31a7 100644
--- a/src/argaze/utils/Providers/TobiiProGlasses2.py
+++ b/src/argaze/utils/eyetrackers/TobiiProGlasses2.py
@@ -21,6 +21,7 @@ import sys
import logging
import socket
import threading
+import collections
import json
import time
import datetime
@@ -37,10 +38,11 @@ except ImportError:
from urllib import urlencode
from urllib2 import urlopen, Request, HTTPError, URLError
-from argaze import DataFeatures, GazeFeatures
+from argaze import ArFeatures, DataFeatures, GazeFeatures
from argaze.utils import UtilsFeatures
import numpy
+import cv2
import av
socket.IPPROTO_IPV6 = 41
@@ -278,7 +280,7 @@ class TobiiJsonDataParser():
return MarkerPosition(json_data['marker3d'], json_data['marker2d'])
-class Provider(DataFeatures.PipelineInputProvider):
+class LiveStream(ArFeatures.ArContext):
@DataFeatures.PipelineStepInit
def __init__(self, **kwargs):
@@ -298,6 +300,8 @@ class Provider(DataFeatures.PipelineInputProvider):
self.__configuration = {}
+ self.__video_borders_size = 0
+
self.__parser = TobiiJsonDataParser()
@property
@@ -332,7 +336,8 @@ class Provider(DataFeatures.PipelineInputProvider):
return self.__configuration
@configuration.setter
- def configuration(self, configuration:dict):
+ @DataFeatures.PipelineStepAttributeSetter
+ def configuration(self, configuration: dict):
self.__configuration = configuration
@@ -469,8 +474,8 @@ class Provider(DataFeatures.PipelineInputProvider):
if self.__configuration:
- configuration.update(self.__configuration)
- configuration = self.__post_request('/api/system/conf', configuration)
+ #configuration.update(self.__configuration)
+ configuration = self.__post_request('/api/system/conf', self.__configuration)
# Log current configuration
logging.info('Tobii Pro Glasses 2 configuration:')
@@ -588,6 +593,7 @@ class Provider(DataFeatures.PipelineInputProvider):
except TimeoutError:
logging.error('> timeout occurred while receiving data')
+ continue
if data is not None:
@@ -604,21 +610,16 @@ class Provider(DataFeatures.PipelineInputProvider):
# When gaze position is valid
if data_object.validity == 0:
- # Edit timestamped gaze position
- timestamped_gaze_position = GazeFeatures.GazePosition((int(data_object.value[0] * self.__video_width), int(data_object.value[1] * self.__video_height)), timestamp=timestamp)
-
- # DEBUG
- print('TobiiProGlasses2.__stream_data', timestamped_gaze_position)
- #self.gaze_position_callback(timestamped_gaze_position)
+ # Process timestamped gaze position
+ self._process_gaze_position(
+ timestamp = timestamp,
+ x = int(data_object.value[0] * self.__video_width),
+ y = int(data_object.value[1] * self.__video_height) )
else:
- # Edit empty gaze position
- empty_gaze_position = GazeFeatures.GazePosition(timestamp=timestamp)
-
- # DEBUG
- print('TobiiProGlasses2.__stream_data', empty_gaze_position)
- #self.gaze_position_callback(empty_gaze_position)
+ # Process empty gaze position
+ self._process_gaze_position(timestamp = timestamp)
def __stream_video(self):
"""Stream video from dedicated socket."""
@@ -627,7 +628,7 @@ class Provider(DataFeatures.PipelineInputProvider):
container = av.open(f'rtsp://{self.__address}:8554/live/scene', options={'rtsp_transport': 'tcp'})
self.__stream = container.streams.video[0]
- #self.__buffer = collections.OrderedDict()
+ self.__buffer = collections.OrderedDict()
for image in container.decode(self.__stream):
@@ -641,36 +642,18 @@ class Provider(DataFeatures.PipelineInputProvider):
if image is not None:
- timestamp = int(image.time * 1e6)
-
- logging.debug('> image timestamp: %d', image.time)
- '''
- # Select callback reading mode
- if len(self.reading_callbacks) > 0:
-
- # Lock data subcription
- self.__subcription_lock.acquire()
-
- # Share incoming data to all subscribers
- for callback in self.reading_callbacks:
-
- callback(timestamp, image.to_ndarray(format='bgr24'))
-
- # Unlock data subscription
- self.__subcription_lock.release()
+ if image.time is not None:
- # Select buffer reading mode
- else:
+ timestamp = int(image.time * 1e6)
+ image = image.to_ndarray(format='bgr24')
- # Lock buffer access
- self.__buffer_lock.acquire()
+ logging.debug('> image timestamp: %f', timestamp)
- # Decoding image and storing at time index
- self.__buffer[timestamp] = image.to_ndarray(format='bgr24')
+ # Process camera image
+ self._process_camera_image(
+ timestamp = timestamp,
+ image = image)
- # Unlock buffer access
- self.__buffer_lock.release()
- '''
def __keep_alive(self):
"""Maintain network connection."""
diff --git a/src/argaze/utils/Providers/__init__.py b/src/argaze/utils/eyetrackers/__init__.py
index b76cd8b..b76cd8b 100644
--- a/src/argaze/utils/Providers/__init__.py
+++ b/src/argaze/utils/eyetrackers/__init__.py
diff --git a/src/argaze/utils/worn_device_stream.py b/src/argaze/utils/pipeline_run.py
index 3925bbe..dc9ef53 100644
--- a/src/argaze/utils/worn_device_stream.py
+++ b/src/argaze/utils/pipeline_run.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-"""Load ArUcoCamera from a configuration file then, stream and process gaze positions and image from any worn eye-tracker device.
+"""Load and execute eyetracker pipeline.
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
@@ -19,61 +19,34 @@ __copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
__license__ = "GPLv3"
import argparse
+import logging
import contextlib
-from argaze import GazeFeatures, DataFeatures
-from argaze.ArUcoMarkers import ArUcoCamera
+from argaze import DataFeatures, ArFeatures
import cv2
# Manage arguments
parser = argparse.ArgumentParser(description=__doc__.split('-')[0])
-parser.add_argument('configuration', metavar='CONFIGURATION', type=str, help='configuration filepath')
-parser.add_argument('-p', '--patch', metavar='PATCH', type=str, help='configuration patch filepath')
+parser.add_argument('configuration', metavar='CONFIGURATION', type=str, help='JSON configuration filepath')
+parser.add_argument('-p', '--patch', metavar='PATCH', type=str, help='JSON configuration patch filepath')
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='enable verbose mode to print information in console')
args = parser.parse_args()
+# Manage logging
+logging.basicConfig(format = '%(levelname)s: %(message)s', level = logging.DEBUG if args.verbose else logging.INFO)
+
def main():
- # Load ArUcoCamera configuration
- with ArUcoCamera.ArUcoCamera.from_json(args.configuration, args.patch) as aruco_camera:
+ # Load ArGaze context
+ with DataFeatures.from_json(args.configuration, args.patch) as context:
if args.verbose:
- print(aruco_camera)
-
- # Gaze position processing
- def gaze_position_callback(timestamped_gaze_position: GazeFeatures.GazePosition):
-
- # Project gaze position into environment
- try:
-
- aruco_camera.look(timestamped_gaze_position)
-
- # Handle exceptions
- except Exception as e:
-
- print(e)
-
- # Attach gaze position callback to provider
- aruco_camera.provider.attach(gaze_position_callback)
-
- # Image processing
- def image_callback(timestamp: int|float, image):
-
- # Detect ArUco code and project ArScenes
- try:
-
- # Watch ArUco markers into image and estimate camera pose
- aruco_camera.watch(image, timestamp=timestamp)
-
- # Handle exceptions
- except Exception as e:
-
- print(e)
+ print(context)
- # Attach image callback to provider
- aruco_camera.provider.attach(image_callback)
+ # Create a window to display context
+ cv2.namedWindow(context.name, cv2.WINDOW_AUTOSIZE)
# Waiting for 'ctrl+C' interruption
with contextlib.suppress(KeyboardInterrupt):
@@ -81,16 +54,19 @@ def main():
# Visualisation loop
while True:
- # Display camera frame image
- image = aruco_camera.image()
+ # DEBUG
+ print("DISPLAY", context.name)
- cv2.imshow(aruco_camera.name, image)
+ # Display context
+ cv2.imshow(context.name, context.image())
- # Display each scene frames image
- for scene_frame in aruco_camera.scene_frames():
+ # Head-monted eye tracker case: display environment frames image
+ if issubclass(type(context.pipeline), ArFeatures.ArCamera):
- cv2.imshow(scene_frame.name, scene_frame.image())
+ for scene_frame in context.pipeline.scene_frames():
+ cv2.imshow(scene_frame.name, scene_frame.image())
+
# Key interaction
key_pressed = cv2.waitKey(10)