aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/ArFeatures.py71
1 files changed, 51 insertions, 20 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index f983a69..ceca69f 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -126,7 +126,7 @@ class ArEnvironment():
# Build scenes
new_scenes = {}
- for scene_name, scene_data in data.pop('scenes').items():
+ for new_scene_name, scene_data in data.pop('scenes').items():
new_aruco_scene = None
new_aoi_scene = None
@@ -291,28 +291,35 @@ class ArEnvironment():
return new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers
- # Build camera frame as large as aruco dectector optic parameters
+ # Load camera frame as large as aruco dectector optic parameters
try:
camera_frame_data = scene_data.pop('camera_frame')
new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers = frame_data_processor(camera_frame_data, force_frame_size=new_optic_parameters.dimensions)
- new_camera_frame = ArFrame.from_scene(new_aoi_3d_scene, None, new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers, **camera_frame_data)
+ new_camera_frame = ArFrame.from_scene(new_aoi_3d_scene, new_scene_name, new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers, **camera_frame_data)
except KeyError:
- new_camera_frame = None #ArFrame.from_scene(new_aoi_3d_scene, None, new_optic_parameters.dimensions)
+ new_camera_frame = None
- # Build AOI frames
+ # Load AOI frames
new_aoi_frames = {}
- for aoi_name, aoi_frame_data in scene_data.pop('aoi_frames').items():
- new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers = frame_data_processor(aoi_frame_data)
+ try:
+
+ for aoi_name, aoi_frame_data in scene_data.pop('aoi_frames').items():
+
+ new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers = frame_data_processor(aoi_frame_data)
+
+ # Append new AOI frame
+ new_aoi_frames[aoi_name] = ArFrame.from_scene(new_aoi_3d_scene, aoi_name, new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers, **aoi_frame_data)
- # Append new AOI frame
- new_aoi_frames[aoi_name] = ArFrame.from_scene(new_aoi_3d_scene, aoi_name, new_frame_size, new_frame_background, new_gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers, **aoi_frame_data)
+ except KeyError:
+
+ pass
# Append new scene
- new_scenes[scene_name] = ArScene(new_aruco_scene, new_aoi_3d_scene, new_camera_frame, new_aoi_frames, **scene_data)
+ new_scenes[new_scene_name] = ArScene(new_scene_name, new_aruco_scene, new_aoi_3d_scene, new_camera_frame, new_aoi_frames, **scene_data)
return ArEnvironment(new_name, new_aruco_detector, new_scenes)
@@ -362,10 +369,13 @@ class ArEnvironment():
# Estimate scene markers poses
self.aruco_detector.estimate_markers_pose(scene.aruco_scene.identifiers)
+ # Clear scene projection
+ scene.clear()
+
# Estimate scene pose from detected scene markers
tvec, rmat, _, _ = scene.estimate_pose(self.aruco_detector.detected_markers)
- # Project AOI scene into camera frame according estimated pose
+ # Project scene into camera frame according estimated pose
scene.project(tvec, rmat)
def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition):
@@ -450,7 +460,7 @@ class ArFrame():
self.__gaze_movement = GazeFeatures.UnvalidGazeMovement()
# Init current look at aoi
- self.__look_at = self.name
+ self.__look_at = None
# Init heatmap if required
if self.heatmap:
@@ -463,13 +473,14 @@ class ArFrame():
@classmethod
def from_scene(self, aoi_3d_scene, aoi_name, size, background: numpy.array = numpy.empty((0, 0)), gaze_movement_identifier: GazeFeatures.GazeMovementIdentifier = None, scan_path_analyzers: list = [], aoi_scan_path_analyzers: list = [], heatmap: bool = False) -> ArFrameType:
- if aoi_name:
+ # If aoi_name is part of the scene
+ try:
aoi_2d_scene = aoi_3d_scene.orthogonal_projection.reframe(aoi_name, size)
- else:
+ except KeyError:
- aoi_2d_scene = AOI2DScene.AOI2DScene()
+ aoi_2d_scene = aoi_3d_scene.orthogonal_projection
return ArFrame(aoi_name, \
size, \
@@ -537,6 +548,8 @@ class ArFrame():
self.__gaze_movement = new_gaze_movement
# Does the fixation match an AOI?
+ self.__look_at = None
+
for name, aoi in self.aoi_2d_scene.items():
_, _, circle_ratio = aoi.circle_intersection(self.__gaze_movement.focus, self.__gaze_movement.deviation_max)
@@ -555,7 +568,7 @@ class ArFrame():
self.scan_path.append_fixation(timestamp, self.__gaze_movement)
# Append fixation to aoi scan path
- if self.aoi_scan_path != None:
+ if self.aoi_scan_path != None and self.__look_at != None:
aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, self.__gaze_movement, self.__look_at)
@@ -573,6 +586,9 @@ class ArFrame():
# Update current gaze movement
self.__gaze_movement = new_gaze_movement
+ # Update current look at
+ self.__look_at = None
+
# Append saccade to scan path
if self.scan_path != None:
@@ -601,7 +617,7 @@ class ArFrame():
self.__looking_lock.release()
# Return looking data
- return self.__gaze_movement, self.__look_at, scan_step_analysis, aoi_scan_step_analysis
+ return new_gaze_movement, self.__look_at, scan_step_analysis, aoi_scan_step_analysis
def draw(self, image:numpy.array):
"""
@@ -621,7 +637,7 @@ class ArFrame():
self.__gaze_position.draw(image, color=(255, 255, 255))
# Draw gaze movement
- if gaze_movement_identifier:
+ if self.gaze_movement_identifier:
self.__gaze_movement.draw(image, color=(0, 255, 255))
self.__gaze_movement.draw_positions(image)
@@ -641,6 +657,9 @@ class ArScene():
Define an Augmented Reality scene with ArUco markers and AOI scenes.
Parameters:
+
+ name: name of the scene
+
aruco_scene: ArUco markers 3D scene description used to estimate scene pose from detected markers: see [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function below.
aoi_3d_scene: AOI 3D scene description that will be projected onto estimated scene once its pose will be estimated : see [project][argaze.ArFeatures.ArScene.project] function below.
@@ -658,7 +677,7 @@ class ArScene():
distance_tolerance: Optional distance error tolerance to validate marker pose in centimeter used into [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function.
"""
-
+ name: str
aruco_scene: ArUcoScene.ArUcoScene = field(default_factory=ArUcoScene.ArUcoScene)
aoi_3d_scene: AOI3DScene.AOI3DScene = field(default_factory=AOI3DScene.AOI3DScene)
camera_frame: ArFrame = field(default_factory=ArFrame)
@@ -695,6 +714,18 @@ class ArScene():
return output
+ def clear(self):
+ """Clear scene projection."""
+
+ # Lock camera frame exploitation
+ self.__camera_frame_lock.acquire()
+
+ # Update camera frame
+ self.camera_frame.aoi_2d_scene = AOI2DScene.AOI2DScene()
+
+ # Unlock camera frame exploitation
+ self.__camera_frame_lock.release()
+
def estimate_pose(self, detected_markers) -> Tuple[numpy.array, numpy.array, str, dict]:
"""Estimate scene pose from detected ArUco markers.
@@ -869,7 +900,7 @@ class ArScene():
self.__camera_frame_lock.acquire()
# Project gaze position in camera frame
- yield None, self.camera_frame.look(timestamp, gaze_position)
+ yield self.name, self.camera_frame.look(timestamp, gaze_position)
# Project gaze position into each aoi frames if possible
for aoi_name, frame in self.aoi_frames.items():