aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py7
-rw-r--r--src/argaze/AreaOfInterest/AOI3DScene.py17
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py66
-rw-r--r--src/argaze/DataStructures.py4
-rw-r--r--src/argaze/GazeFeatures.py2
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py100
-rw-r--r--src/argaze/utils/live_tobii_aruco_aois.py2
7 files changed, 133 insertions, 65 deletions
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index c48210a..a120a12 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -23,7 +23,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
looked = {}
ignored = {}
- for name, aoi in self.areas.items():
+ for name, aoi in self.items():
if aoi.looked():
@@ -38,14 +38,15 @@ class AOI2DScene(AOIFeatures.AOIScene):
def draw(self, frame, gaze_position: GazeFeatures.GazePosition):
"""Draw AOI polygons on frame."""
- for name, aoi2D in self.areas.items():
+ for name, aoi2D in self.items():
looked = aoi2D.looked(gaze_position)
color = (0, 255, 0) if looked else (0, 0, 255)
if looked:
- cv.putText(frame, name, aoi2D[3], cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ top_left_corner_pixel = numpy.rint(aoi2D.clockwise()[0]).astype(int)
+ cv.putText(frame, name, top_left_corner_pixel, cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
looked_pixel = aoi2D.looked_pixel(aoi2D.look_at(gaze_position))
cv.circle(frame, looked_pixel, 8, color, 2)
diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py
index 28150c6..658fa64 100644
--- a/src/argaze/AreaOfInterest/AOI3DScene.py
+++ b/src/argaze/AreaOfInterest/AOI3DScene.py
@@ -10,6 +10,9 @@ from argaze.AreaOfInterest import AOIFeatures, AOI2DScene
import numpy
import cv2 as cv
+# Define a zero distorsion matrix
+D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
+
@dataclass
class AOI3DScene(AOIFeatures.AOIScene):
"""Define AOI 3D scene."""
@@ -87,25 +90,23 @@ class AOI3DScene(AOIFeatures.AOIScene):
# retreive all aoi3D vertices
for name, face in faces.items():
- aoi3D = numpy.array([ vertices[i-1] for i in face ]).astype('float32').view(AOIFeatures.AreaOfInterest)
- self.append(name, aoi3D)
+ aoi3D = numpy.array([ vertices[i-1] for i in face ]).astype(numpy.float32).view(AOIFeatures.AreaOfInterest)
+ self[name] = aoi3D
except IOError:
raise IOError(f'File not found: {obj_filepath}')
- def project(self, K, D):
+ def project(self, K, D=D0):
"""Project 3D scene onto 2D scene according optical parameters.
**Returns:** AOI2DScene"""
aoi2D_scene = AOI2DScene.AOI2DScene()
- for name, aoi3D in self.areas.items():
+ for name, aoi3D in self.items():
vertices_2D, J = cv.projectPoints(aoi3D, self.rotation, self.translation, K, D)
- vertices_2D = vertices_2D.astype('int').reshape((len(vertices_2D), 2)).tolist()
-
- aoi2D = numpy.array(vertices_2D).view(AOIFeatures.AreaOfInterest)
+ aoi2D = vertices_2D.reshape((len(vertices_2D), 2)).astype(numpy.float32).view(AOIFeatures.AreaOfInterest)
- aoi2D_scene.append(name, aoi2D)
+ aoi2D_scene[name] = aoi2D
return aoi2D_scene
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index 001affa..4495df5 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -40,21 +40,21 @@ class AreaOfInterest(numpy.ndarray):
return mpath.Path(self).contains_points([gaze_position])[0]
- def look_at(self, gaze_position):
- """Get where the area is looked using non orthogonal projection."""
+ def look_at(self, gaze_pixel):
+ """Get where the area is looked using perpespective transformation."""
if self.dimension() != 2:
raise RuntimeError(f'Bad area dimension ({self.dimension()})')
- clockwise_area = self.clockwise()
+ Src = self.clockwise()
+ Src_origin = Src[0]
+ Src = (Src - Src_origin).reshape((len(Src)), 2)
- O = clockwise_area[0] # Origin
- G = numpy.array(gaze_position) - O # Gaze point
+ Dst = numpy.array([[0., 0.], [1., 0.], [1., 1.], [0., 1.]]).astype(numpy.float32)
- M = numpy.array([clockwise_area[1] - O, clockwise_area[-1] - O]) # Basis projection matrix M = {U | V}
- Mt = numpy.transpose(M)
-
- Gp = numpy.dot(numpy.dot(numpy.linalg.inv(numpy.dot(Mt, M)), Mt), G)# Projected gaze point
+ P = cv.getPerspectiveTransform(Src, Dst)
+ G = gaze_pixel - Src_origin
+ Gp = numpy.dot(P, numpy.array([G[0], G[1], 1]))[:-1]
return numpy.around(Gp, 4).tolist()
@@ -64,40 +64,54 @@ class AreaOfInterest(numpy.ndarray):
if self.dimension() != 2:
raise RuntimeError(f'Bad area dimension ({self.dimension()})')
- clockwise_area = self.clockwise()
+ Src = numpy.array([[0., 0.], [1., 0.], [1., 1.], [0., 1.]]).astype(numpy.float32)
- O = clockwise_area[0] # Origin
- Gp = numpy.array(look_at) # Projected gaze point
- M = numpy.array([clockwise_area[1] - O, clockwise_area[-1] - O]) # Basis projection matrix M = {U | V}
- Mt = numpy.transpose(M)
+ Dst = self.clockwise()
+ Dst_origin = Dst[0]
+ Dst = (Dst - Dst_origin).reshape((len(Dst)), 2)
- Lp = O + numpy.dot(M, Gp) # Projected gaze pixel
+ P = cv.getPerspectiveTransform(Src, Dst)
+ L = look_at
+ Lp = Dst_origin + numpy.dot(P, numpy.array([L[0], L[1], 1]))[:-1]
return numpy.rint(Lp).astype(int).tolist()
def draw(self, frame, color):
# Draw form
- cv.line(frame, self[-1], self[0], color, 1)
- for A, B in zip(self, self[1:]):
+ pixels = numpy.rint(self).astype(int)
+ cv.line(frame, pixels[-1], pixels[0], color, 1)
+ for A, B in zip(pixels, pixels[1:]):
cv.line(frame, A, B, color, 1)
# Draw center
- cv.circle(frame, self.center().astype(int), 1, color, -1)
+ center_pixel = numpy.rint(self.center()).astype(int)
+ cv.circle(frame, center_pixel, 1, color, -1)
@dataclass
class AOIScene():
- """Define 2D/3D AOI scene."""
+ """Define 2D/3D AOI scene."""
- dimension: int = field(init=False, default=None)
- """Dimension of the AOIs in scene."""
+ dimension: int = field(init=False, repr=False, default=None)
+ """Dimension of the AOIs in scene."""
- areas: dict = field(init=False, default_factory=dict)
- """All aois in the scene."""
+ areas: dict = field(init=False, default_factory=dict)
+ """All aois in the scene."""
- def append(self, name, aoi: AreaOfInterest):
- """Add an aoi to the scene."""
- self.areas[name] = aoi
+ def __getitem__(self, key):
+ """Get an aoi from the scene."""
+ return numpy.array(self.areas[key]).astype(numpy.float32).view(AreaOfInterest)
+
+ def __setitem__(self, name, aoi: AreaOfInterest):
+ """Add an aoi to the scene."""
+ self.areas[name] = aoi.tolist()
+
+ def items(self):
+ for name, area in self.areas.items():
+ yield name, numpy.array(area).astype(numpy.float32).view(AreaOfInterest)
+
+ def keys(self):
+ return self.areas.keys()
class TimeStampedAOIScenes(DataStructures.TimeStampedBuffer):
"""Define timestamped buffer to store AOI scenes in time."""
diff --git a/src/argaze/DataStructures.py b/src/argaze/DataStructures.py
index abd75a8..8517378 100644
--- a/src/argaze/DataStructures.py
+++ b/src/argaze/DataStructures.py
@@ -29,7 +29,7 @@ class DictObject():
__type_value = self.__dict__.pop(__type_key)
self.__dict__.update({key:value})
self.__dict__[__type_key] = __type_value
-
+
class TimeStampedBuffer(collections.OrderedDict):
"""Ordered dictionary to handle timestamped data.
```
@@ -52,7 +52,7 @@ class TimeStampedBuffer(collections.OrderedDict):
super().__setitem__(key, value)
def __str__(self):
- return json.dumps(self)
+ return json.dumps(self, default=vars)
def append(self, timestamped_buffer):
"""Append a timestamped buffer."""
diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py
index ebe6fe5..32ec571 100644
--- a/src/argaze/GazeFeatures.py
+++ b/src/argaze/GazeFeatures.py
@@ -303,7 +303,7 @@ class PointerBasedVisualScan(VisualScanGenerator):
gaze_position = self.__ts_gaze_positions[ts_current]
- for name, aoi in aoi_scene_current.areas.items():
+ for name, aoi in aoi_scene_current.items():
looked = aoi.looked(gaze_position)
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index 4622b00..4f84943 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -14,6 +14,17 @@ import numpy
import cv2 as cv
+aoi_color = {
+ 'Scene_Plan': (127, 127, 127),
+ 'PFD_Plan': (63, 127, 63),
+ 'Attitude_Plan': (0, 255, 0),
+ 'Air_Speed_Plan': (255, 0, 255),
+ 'Vertical_Speed_Plan': (255, 255, 0),
+ 'Localiser_Plan': (0, 0, 255),
+ 'ND_Plan': (127, 63, 63),
+ 'Marker_Plan': (0, 0, 0)
+}
+
def main():
"""
Track any ArUco marker into Tobii Glasses Pro 2 segment video file.
@@ -50,13 +61,15 @@ def main():
os.makedirs(os.path.dirname(args.output))
print(f'{os.path.dirname(args.output)} folder created')
- visual_scan_filepath = f'{args.output}/visual_scan.csv'
- video_filepath = f'{args.output}/fullstream+visu.mp4'
+ vs_data_filepath = f'{args.output}/visual_scan.csv'
+ vs_visu_filepath = f'{args.output}/visual_scan.jpg'
+ vs_video_filepath = f'{args.output}/visual_scan.mp4'
else:
- visual_scan_filepath = f'{args.segment_path}/visual_scan.csv'
- video_filepath = f'{args.segment_path}/fullstream+visu.mp4'
+ vs_data_filepath = f'{args.segment_path}/visual_scan.csv'
+ vs_visu_filepath = f'{args.segment_path}/visual_scan.jpg'
+ vs_video_filepath = f'{args.segment_path}/visual_scan.mp4'
# Load a tobii segment
tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1000000), int(args.time_range[1] * 1000000) if args.time_range[1] != None else None)
@@ -69,12 +82,16 @@ def main():
tobii_segment_data = tobii_segment.load_data()
print(f'Data keys: {tobii_segment_data.keys()}')
- # Access to timestamped gaze position data buffer
+ # Access to timestamped gaze positions data buffer
tobii_ts_gaze_positions = tobii_segment_data.gidx_l_gp
print(f'{len(tobii_ts_gaze_positions)} gaze positions loaded')
+ # Access to timestamped gaze 3D positions data buffer
+ #tobii_ts_gaze_3d_positions = tobii_segment_data.gidx_gp3
+ #print(f'{len(tobii_ts_gaze_3d_positions)} gaze 3D positions loaded')
+
# Prepare video exportation at the same format than segment video
- output_video = TobiiVideo.TobiiVideoOutput(video_filepath, tobii_segment_video.get_stream())
+ output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.get_stream())
# Create aruco camera
aruco_camera = ArUcoCamera.ArUcoCamera()
@@ -86,7 +103,7 @@ def main():
# Create AOIs 3D scene
aoi3D_scene = AOI3DScene.AOI3DScene()
aoi3D_scene.load(args.aoi_scene)
- print(f'AOIs names: {aoi3D_scene.areas.keys()}')
+ print(f'AOIs names: {aoi3D_scene.keys()}')
# Create timestamped buffer to store AOIs scene in time
ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()
@@ -94,6 +111,26 @@ def main():
# Create timestamped buffer to store gaze positions in time
ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
+ # Create a visual scan visualisation frame
+ visu_width = 1920
+ visu_height = 1080
+ visu_ratio = visu_height
+ visu_frame = numpy.full((visu_height, visu_width, 3), 255, dtype=numpy.uint8)
+
+ # Project 3D scene on the reference frame
+ # TODO : center projection on a reference AOI
+ # TODO: pass the reference AOI in argument
+ aoi3D_scene.rotation = numpy.asarray([[-numpy.pi, 0.0, 0.0]])
+ aoi3D_scene.translation = numpy.asarray([[25.0, -32.0, 20.0]])
+
+ # Edit a projection matrix for the reference frame
+ K0 = numpy.asarray([[visu_ratio, 0.0, visu_width/2], [0.0, visu_ratio, visu_height/2], [0.0, 0.0, 1.0]])
+
+ aoi2D_visu_scene = aoi3D_scene.project(K0)
+
+ for name, aoi in aoi2D_visu_scene.items():
+ aoi.draw(visu_frame, aoi_color[name])
+
# Video and data replay loop
try:
@@ -109,11 +146,11 @@ def main():
closest_gaze_ts, closest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts)
# Draw gaze position
- gaze_position = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height))
- cv.circle(video_frame.matrix, gaze_position, 4, (0, 255, 255), -1)
+ video_gaze_pixel = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height))
+ cv.circle(video_frame.matrix, video_gaze_pixel, 4, (0, 255, 255), -1)
# Store gaze position at this time in millisecond
- ts_gaze_positions[video_ts/1000] = gaze_position
+ ts_gaze_positions[round(video_ts/1000)] = video_gaze_pixel
# Wait for gaze position
except ValueError:
@@ -123,7 +160,7 @@ def main():
aruco_tracker.track(video_frame.matrix)
aruco_tracker.draw(video_frame.matrix)
- # Project 3D scene related to each aruco marker
+ # Project 3D scene on each video frame and the visualisation frame
if aruco_tracker.get_markers_number():
for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
@@ -137,19 +174,30 @@ def main():
aoi3D_scene.rotation = aruco_tracker.get_marker_rotation(i)
aoi3D_scene.translation = aruco_tracker.get_marker_translation(i)
-
- # Edit Zero distorsion matrix
- D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])
# DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
- aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0)
+ aoi2D_video_scene = aoi3D_scene.project(aruco_camera.get_K())
- # Draw 2D scene
- aoi2D_scene.draw(video_frame.matrix, gaze_position)
+ # Draw 2D scene on video frame
+ aoi2D_video_scene.draw(video_frame.matrix, video_gaze_pixel)
# Store 2D scene at this time in millisecond
- ts_aois_scenes[video_ts/1000] = aoi2D_scene
+ ts_aois_scenes[round(video_ts/1000)] = aoi2D_video_scene
+
+ # Draw gaze path
+ for name, aoi in aoi2D_video_scene.items():
+
+ if not aoi.looked(video_gaze_pixel):
+ continue
+
+ ref_aoi = name #'Scene_Plan'
+
+ look_at = aoi2D_video_scene[ref_aoi].look_at(video_gaze_pixel)
+
+ visu_gaze_pixel = aoi2D_visu_scene[ref_aoi].looked_pixel(look_at)
+
+ cv.circle(visu_frame, visu_gaze_pixel, 4, aoi_color[ref_aoi], -1)
# Close window using 'Esc' key
if cv.waitKey(1) == 27:
@@ -158,6 +206,9 @@ def main():
# Display video
cv.imshow(f'Segment {tobii_segment.get_id()} video', video_frame.matrix)
+ # Display visual scan frame
+ cv.imshow(f'Segment {tobii_segment.get_id()} visual scan', visu_frame)
+
# Write video
output_video.write(video_frame.matrix)
@@ -174,18 +225,19 @@ def main():
# End output video file
output_video.close()
-
- print(f'\nAOIs video saved into {video_filepath}')
+ print(f'\nVisual scan video saved into {vs_video_filepath}')
# Build visual scan based on a pointer position
visual_scan = GazeFeatures.PointerBasedVisualScan(ts_aois_scenes, ts_gaze_positions)
-
print(f'{len(visual_scan.steps())} visual scan steps found')
- # Export visual scan
- visual_scan.export_as_csv(visual_scan_filepath)
+ # Export visual scan data
+ visual_scan.export_as_csv(vs_data_filepath)
+ print(f'Visual scan data saved into {vs_data_filepath}')
- print(f'Visual scan saved into {visual_scan_filepath}')
+ # Export visual scan image
+ cv.imwrite(vs_visu_filepath, visu_frame)
+ print(f'Visual scan image saved into {vs_visu_filepath}')
if __name__ == '__main__':
diff --git a/src/argaze/utils/live_tobii_aruco_aois.py b/src/argaze/utils/live_tobii_aruco_aois.py
index 4f00dea..bf91eec 100644
--- a/src/argaze/utils/live_tobii_aruco_aois.py
+++ b/src/argaze/utils/live_tobii_aruco_aois.py
@@ -57,7 +57,7 @@ def main():
# Create AOIs 3D scene
aoi3D_scene = AOI3DScene.AOI3DScene()
aoi3D_scene.load(args.aoi_scene)
- print(f'AOIs names: {aoi3D_scene.areas.keys()}')
+ print(f'AOIs names: {aoi3D_scene.keys()}')
# Start streaming
tobii_controller.start_streaming()