aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/AreaOfInterest/AOI3DScene.py63
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py11
-rw-r--r--src/argaze/utils/live_tobii_aruco_aoi_ivy_application.py6
-rw-r--r--src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py5
4 files changed, 65 insertions, 20 deletions
diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py
index aa7e27f..44b8e3c 100644
--- a/src/argaze/AreaOfInterest/AOI3DScene.py
+++ b/src/argaze/AreaOfInterest/AOI3DScene.py
@@ -10,6 +10,15 @@ from argaze.AreaOfInterest import AOIFeatures, AOI2DScene
import numpy
import cv2 as cv
+# Define defaut translation vector
+T0 = numpy.array([0., 0., 0.])
+
+# Define defaut rotation vector
+R0 = numpy.array([0., 0., 0.])
+
+# Define defaut optical parameter
+K0 = numpy.array([[1., 0., 1.], [0., 1., 1.], [0., 0., 1.]])
+
# Define a zero distorsion matrix
D0 = numpy.array([0.0, 0.0, 0.0, 0.0, 0.0])
@@ -17,9 +26,6 @@ D0 = numpy.array([0.0, 0.0, 0.0, 0.0, 0.0])
class AOI3DScene(AOIFeatures.AOIScene):
"""Define AOI 3D scene."""
- rotation: list = field(init=False, default=numpy.array([0., 0., 0.]))
- translation: list = field(init=False, default=numpy.array([0., 0., 0.]))
-
def __post_init__(self, **aois):
# set dimension member
@@ -96,6 +102,37 @@ class AOI3DScene(AOIFeatures.AOIScene):
except IOError:
raise IOError(f'File not found: {obj_filepath}')
+ def save(self, obj_filepath: str):
+ """Save AOI3D scene into .obj file."""
+
+ with open(obj_filepath, 'w', encoding='utf-8') as file:
+
+ file.write('# ArGaze OBJ file\n')
+
+ vertices_count = 0
+
+ for name, aoi3D in self.items():
+
+ file.write(f'o {name}\n')
+
+ vertices_ids = 'f'
+
+ for vertices in aoi3D:
+
+ vertices_coords = 'v'
+
+ for coord in vertices:
+
+ vertices_coords += f' {coord:.6f}'
+
+ file.write(vertices_coords + '\n')
+
+ vertices_count += 1
+ vertices_ids += f' {vertices_count}'
+
+ file.write('s off\n')
+ file.write(vertices_ids + '\n')
+
def clip(self, cone_radius, cone_height, cone_tip=[0., 0., 0.], cone_direction=[0., 0., 1.]):
"""Select AOI which are inside a given cone field.
By default, the cone have the tip at origin and the base oriented to positive Z axis.
@@ -135,18 +172,32 @@ class AOI3DScene(AOIFeatures.AOIScene):
return aoi3D_scene_clipped
- def project(self, K, D=D0):
- """Project 3D scene onto 2D scene according optical parameters.
+ def project(self, T=T0, R=R0, K=K0, D=D0):
+ """Project 3D scene onto 2D scene according translation, rotation and optical parameters.
**Returns:** AOI2DScene"""
aoi2D_scene = AOI2DScene.AOI2DScene()
for name, aoi3D in self.items():
- vertices_2D, J = cv.projectPoints(aoi3D, self.rotation, self.translation, K, D)
+ vertices_2D, J = cv.projectPoints(aoi3D, R, T, K, D)
aoi2D = vertices_2D.reshape((len(vertices_2D), 2)).astype(numpy.float32).view(AOIFeatures.AreaOfInterest)
aoi2D_scene[name] = aoi2D
return aoi2D_scene
+
+ def transform(self, T=T0, R=D0):
+ """Translate and/or rotate 3D scene.
+ **Returns:** AOI3DScene"""
+
+ aoi3D_scene = AOI3DScene()
+
+ R, _ = cv.Rodrigues(R)
+
+ for name, aoi3D in self.items():
+
+ aoi3D_scene[name] = aoi3D.dot(R.T) + T
+
+ return aoi3D_scene
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index 51fee22..5e487c1 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -149,13 +149,13 @@ def main():
cv.putText(aoi2D_visu_frames[marker_id], f'Segment time range: {int(args.time_range[0] * 1000)} - {int(args.time_range[1] * 1000)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 1, cv.LINE_AA)
# Project 3D scene onto the visualisation plan
- aoi3D_scenes[marker_id].rotation = numpy.array([[-numpy.pi, 0.0, 0.0]])
- aoi3D_scenes[marker_id].translation = aoi3D_scenes[marker_id].center()*[-1, 1, 0] + [0, 0, scene_height]
+ aoi3D_scene_rotation = numpy.array([[-numpy.pi, 0.0, 0.0]])
+ aoi3D_scene_translation = aoi3D_scenes[marker_id].center()*[-1, 1, 0] + [0, 0, scene_height]
# Edit a projection matrix for the reference frame
K0 = numpy.array([[visu_height, 0.0, visu_width/2], [0.0, visu_height, visu_height/2], [0.0, 0.0, 1.0]])
- aoi2D_visu_scenes[marker_id] = aoi3D_scenes[marker_id].project(K0)
+ aoi2D_visu_scenes[marker_id] = aoi3D_scenes[marker_id].project(aoi3D_scene_translation, aoi3D_scene_rotation, K0)
for name, aoi in aoi2D_visu_scenes[marker_id].items():
if name != 'Visualisation_Plan':
@@ -267,9 +267,6 @@ def main():
if distance_to_center > int(video_frame.width/3):
continue
-
- aoi3D_scene.rotation = aruco_tracker.get_marker_rotation(i)
- aoi3D_scene.translation = aruco_tracker.get_marker_translation(i)
# Remove aoi outside vision field
# The vision cone tip is positionned behind the head
@@ -277,7 +274,7 @@ def main():
# DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_video_scene = aoi3D_scene.project(aruco_camera.get_K())
+ aoi2D_video_scene = aoi3D_scene.project(aruco_tracker.get_marker_translation(i), aruco_tracker.get_marker_rotation(i), aruco_camera.get_K())
# Store each 2D aoi for further scene merging
for name, aoi in aoi2D_video_scene.items():
diff --git a/src/argaze/utils/live_tobii_aruco_aoi_ivy_application.py b/src/argaze/utils/live_tobii_aruco_aoi_ivy_application.py
index 6ae7782..70190e2 100644
--- a/src/argaze/utils/live_tobii_aruco_aoi_ivy_application.py
+++ b/src/argaze/utils/live_tobii_aruco_aoi_ivy_application.py
@@ -58,13 +58,13 @@ def main():
ref_aoi = 'Scene_Plan'
# TODO: pass the reference AOI in argument
- aoi3D_scene.rotation = numpy.array([[-numpy.pi, 0.0, 0.0]])
- aoi3D_scene.translation = numpy.array([[19.0, 8.0, 25.0]])
+ aoi3D_scene_rotation = numpy.array([[-numpy.pi, 0.0, 0.0]])
+ aoi3D_scene_translation = numpy.array([[19.0, 8.0, 25.0]])
# Edit a projection matrix for the reference frame
K0 = numpy.array([[visu_ratio, 0.0, visu_width/2], [0.0, visu_ratio, visu_height/2], [0.0, 0.0, 1.0]])
- aoi2D_visu_scene = aoi3D_scene.project(K0)
+ aoi2D_visu_scene = aoi3D_scene.project(aoi3D_scene_translation, aoi3D_scene_rotation, K0)
# Create aruco markers dictionary
aruco_markers_dict = ArUcoMarkersDictionary.ArUcoMarkersDictionary(args.dictionary)
diff --git a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
index 6db1de4..e1a6e87 100644
--- a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
+++ b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
@@ -163,9 +163,6 @@ def main():
if distance_to_center > int(video_frame.width/3):
continue
-
- aoi3D_scene.rotation = aruco_tracker.get_marker_rotation(i)
- aoi3D_scene.translation = aruco_tracker.get_marker_translation(i)
# Remove aoi outside vision field
# The vision cone tip is positionned behind the head
@@ -176,7 +173,7 @@ def main():
# DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_video_scene = aoi3D_scene.project(aruco_camera.get_K(), D0)
+ aoi2D_video_scene = aoi3D_scene.project(aruco_tracker.get_marker_translation(i), aruco_tracker.get_marker_rotation(i), aruco_camera.get_K())
# Store each 2D aoi for further scene merging
for name, aoi in aoi2D_video_scene.items():