aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2024-03-22 20:53:38 +0100
committerThéo de la Hogue2024-03-22 20:53:38 +0100
commita0e75293bba1dad08aabbb3b17beaa849fb35c5c (patch)
tree958926ff77b47aba696965492bfcbecc00dabc55 /src
parent95d7da25d4bbf3c50876bd2b2180319b7bc6ed80 (diff)
downloadargaze-a0e75293bba1dad08aabbb3b17beaa849fb35c5c.zip
argaze-a0e75293bba1dad08aabbb3b17beaa849fb35c5c.tar.gz
argaze-a0e75293bba1dad08aabbb3b17beaa849fb35c5c.tar.bz2
argaze-a0e75293bba1dad08aabbb3b17beaa849fb35c5c.tar.xz
Fixing expected and excluded aoi in ArCamera.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/ArFeatures.py143
1 files changed, 93 insertions, 50 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 4027bb8..8d0da7d 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -191,6 +191,8 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepAttributeSetter
def aoi_matcher(self, aoi_matcher: GazeFeatures.AOIMatcher):
+ assert(issubclass(type(aoi_matcher), GazeFeatures.AOIMatcher))
+
self.__aoi_matcher = aoi_matcher
# Edit parent
@@ -207,6 +209,8 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepAttributeSetter
def aoi_scan_path(self, aoi_scan_path: GazeFeatures.AOIScanPath):
+ assert(isinstance(aoi_scan_path, GazeFeatures.AOIScanPath))
+
self.__aoi_scan_path = aoi_scan_path
# Update expected AOI of AOI scan path
@@ -231,6 +235,8 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Connect analyzers if required
for analyzer in self.__aoi_scan_path_analyzers:
+ assert(issubclass(type(analyzer), GazeFeatures.AOIScanPathAnalyzer))
+
# Check scan path analyzer properties type
for name, item in type(analyzer).__dict__.items():
@@ -458,12 +464,12 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
self.__scan_path_analyzers = []
self.__background = numpy.full((1, 1, 3), 127).astype(numpy.uint8)
self.__heatmap = None
- self.__layers = {}
self.__calibrated_gaze_position = GazeFeatures.GazePosition()
self.__identified_gaze_movement = GazeFeatures.GazeMovement()
self.__scan_path_analyzed = False
# Init protected attributes
+ self._layers = {}
self._image_parameters = DEFAULT_ARFRAME_IMAGE_PARAMETERS
@property
@@ -484,6 +490,8 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepAttributeSetter
def provider(self, provider: DataFeatures.PipelineInputProvider):
+ assert(issubclass(type(provider), DataFeatures.PipelineInputProvider))
+
self.__provider = provider
# Edit parent
@@ -498,7 +506,9 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@gaze_position_calibrator.setter
@DataFeatures.PipelineStepAttributeSetter
- def gaze_position_calibrator(self, gaze_position_calibrator:GazeFeatures.GazePositionCalibrator):
+ def gaze_position_calibrator(self, gaze_position_calibrator: GazeFeatures.GazePositionCalibrator):
+
+ assert(issubclass(type(gaze_position_calibrator), GazeFeatures.GazePositionCalibrator))
self.__gaze_position_calibrator = gaze_position_calibrator
@@ -516,6 +526,8 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepAttributeSetter
def gaze_movement_identifier(self, gaze_movement_identifier: GazeFeatures.GazeMovementIdentifier):
+ assert(issubclass(type(gaze_movement_identifier), GazeFeatures.GazeMovementIdentifier))
+
self.__gaze_movement_identifier = gaze_movement_identifier
# Edit parent
@@ -543,6 +555,8 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepAttributeSetter
def scan_path(self, scan_path: GazeFeatures.ScanPath) -> GazeFeatures.ScanPath:
+ assert(isinstance(scan_path, GazeFeatures.ScanPath))
+
self.__scan_path = scan_path
# Edit parent
@@ -564,6 +578,8 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Connect analyzers if required
for analyzer in self.__scan_path_analyzers:
+ assert(issubclass(type(analyzer), GazeFeatures.ScanPathAnalyzer))
+
# Check scan path analyzer properties type
for name, item in type(analyzer).__dict__.items():
@@ -625,6 +641,8 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepAttributeSetter
def heatmap(self, heatmap: AOIFeatures.Heatmap):
+ assert(isinstance(heatmap, AOIFeatures.Heatmap))
+
self.__heatmap = heatmap
# Default heatmap size equals frame size
@@ -640,20 +658,20 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
@property
def layers(self) -> dict:
"""Layers dictionary."""
- return self.__layers
+ return self._layers
@layers.setter
@DataFeatures.PipelineStepAttributeSetter
def layers(self, layers: dict):
- self.__layers = {}
+ self._layers = {}
for layer_name, layer_data in layers.items():
- self.__layers[layer_name] = ArLayer(name = layer_name, **layer_data)
+ self._layers[layer_name] = ArLayer(name = layer_name, **layer_data)
# Edit parent
- for name, layer in self.__layers.items():
+ for name, layer in self._layers.items():
layer.parent = self
@@ -707,7 +725,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
"scan_path_analyzers": self.__scan_path_analyzers,
"background": self.__background,
"heatmap": self.__heatmap,
- "layers": self.__layers,
+ "layers": self._layers,
"image_parameters": self._image_parameters
}
@@ -794,7 +812,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
# Look layers with valid identified gaze movement
# Note: don't filter valid/unvalid finished/unfished gaze movement to allow layers to reset internally
- for layer_name, layer in self.__layers.items():
+ for layer_name, layer in self._layers.items():
layer.look(self.__identified_gaze_movement)
@@ -869,7 +887,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
try :
- self.__layers[layer_name].draw(image, **draw_layer)
+ self._layers[layer_name].draw(image, **draw_layer)
except KeyError:
@@ -909,7 +927,7 @@ class ArScene(DataFeatures.PipelineStepObject):
super().__init__()
# Init private attributes
- self.__layers = {}
+ self._layers = {}
self.__frames = {}
self.__angle_tolerance = 0,
self.__distance_tolerance = 0,
@@ -918,20 +936,20 @@ class ArScene(DataFeatures.PipelineStepObject):
def layers(self) -> dict:
"""Dictionary of ArLayers to project once the pose is estimated.
See [project][argaze.ArFeatures.ArScene.project] function below."""
- return self.__layers
+ return self._layers
@layers.setter
@DataFeatures.PipelineStepAttributeSetter
def layers(self, layers:dict):
- self.__layers = {}
+ self._layers = {}
for layer_name, layer_data in layers.items():
- self.__layers[layer_name] = ArLayer(name = layer_name, **layer_data)
+ self._layers[layer_name] = ArLayer(name = layer_name, **layer_data)
# Edit parent
- for name, layer in self.__layers.items():
+ for name, layer in self._layers.items():
layer.parent = self
@@ -1002,7 +1020,7 @@ class ArScene(DataFeatures.PipelineStepObject):
return {
**DataFeatures.PipelineStepObject.as_dict(self),
- "layers": self.__layers,
+ "layers": self._layers,
"frames": self.__frames,
"angle_tolerance": self.__angle_tolerance,
"distance_tolerance": self.__distance_tolerance
@@ -1037,7 +1055,7 @@ class ArScene(DataFeatures.PipelineStepObject):
iterator: name of projected layer and AOI2DScene projection
"""
- for name, layer in self.__layers.items():
+ for name, layer in self._layers.items():
# Clip AOI out of the visual horizontal field of view (optional)
# TODO: use HFOV and VFOV and don't use vision_cone method
@@ -1091,46 +1109,23 @@ class ArCamera(ArFrame):
# Init protected attributes
self._scenes = {}
- # Setup expected aoi of each layer aoi scan path with the aoi of corresponding scene layer
- # Edit aoi matcher exclude attribute to ignore frame aoi
- for layer_name, layer in self.layers.items():
-
- expected_aoi_list = []
- exclude_aoi_list = []
-
- for scene_name, scene in self._scenes.items():
-
- # Append scene layer aoi to corresponding expected camera layer aoi
- try:
-
- scene_layer = scene.layers[layer_name]
-
- expected_aoi_list.extend(list(scene_layer.aoi_scene.keys()))
-
- except KeyError:
-
- continue
-
- # Remove scene frame from expected camera layer aoi
- # Exclude scene frame from camera layer aoi matching
- for frame_name, frame in scene.frames.items():
-
- try:
-
- expected_aoi_list.remove(frame_name)
- exclude_aoi_list.append(frame_name)
+ @ArFrame.layers.setter
+ @DataFeatures.PipelineStepAttributeSetter
+ def layers(self, layers: dict):
- except ValueError:
+ self._layers = {}
- continue
+ for layer_name, layer_data in layers.items():
- if layer.aoi_scan_path is not None:
+ self._layers[layer_name] = ArLayer(name = layer_name, **layer_data)
- layer.aoi_scan_path.expected_aoi = expected_aoi_list
+ # Edit parent
+ for name, layer in self._layers.items():
- if layer.aoi_matcher is not None:
+ layer.parent = self
- layer.aoi_matcher.exclude = exclude_aoi_list
+ # Update expected and excluded aoi
+ self.__update_expected_and_excluded_aoi()
@property
def scenes(self) -> dict:
@@ -1152,6 +1147,9 @@ class ArCamera(ArFrame):
scene.parent = self
+ # Update expected and excluded aoi
+ self.__update_expected_and_excluded_aoi()
+
@property
def visual_hfov(self) -> float:
"""Angle in degree to clip scenes projection according visual horizontal field of view (HFOV)."""
@@ -1193,6 +1191,51 @@ class ArCamera(ArFrame):
"visual_vfov": self.__visual_vfov
}
+ def __update_expected_and_excluded_aoi(self):
+ """Edit expected aoi of each layer aoi scan path with the aoi of corresponding scene layer.
+ Edit excluded aoi to ignore frame aoi from aoi matching.
+ """
+ if self._layers and self._scenes:
+
+ for layer_name, layer in self._layers.items():
+
+ expected_aoi_list = []
+ excluded_aoi_list = []
+
+ for scene_name, scene in self._scenes.items():
+
+ # Append scene layer aoi to corresponding expected camera layer aoi
+ try:
+
+ scene_layer = scene.layers[layer_name]
+
+ expected_aoi_list.extend(list(scene_layer.aoi_scene.keys()))
+
+ except KeyError:
+
+ continue
+
+ # Remove scene frame from expected camera layer aoi
+ # Exclude scene frame from camera layer aoi matching
+ for frame_name, frame in scene.frames.items():
+
+ try:
+
+ expected_aoi_list.remove(frame_name)
+ excluded_aoi_list.append(frame_name)
+
+ except ValueError:
+
+ continue
+
+ if layer.aoi_scan_path is not None:
+
+ layer.aoi_scan_path.expected_aoi = expected_aoi_list
+
+ if layer.aoi_matcher is not None:
+
+ layer.aoi_matcher.exclude = excluded_aoi_list
+
@DataFeatures.PipelineStepMethod
def watch(self, image: numpy.array):
"""Detect AR features from image and project scenes into camera frame.