aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2022-06-22 15:08:01 +0200
committerThéo de la Hogue2022-06-22 15:08:01 +0200
commite8facd4e2da835efaa7e3aa187ed14384daad255 (patch)
tree078a8b85d9ea6cd8af9170099b613a5471457a38
parent1fc8e55cac3d9b6d7504beb870f46ac3bbb7710e (diff)
downloadargaze-e8facd4e2da835efaa7e3aa187ed14384daad255.zip
argaze-e8facd4e2da835efaa7e3aa187ed14384daad255.tar.gz
argaze-e8facd4e2da835efaa7e3aa187ed14384daad255.tar.bz2
argaze-e8facd4e2da835efaa7e3aa187ed14384daad255.tar.xz
Exporting one visual scan picture per marker if a Visualisation_Plan is in the aoi scene.
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py5
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py30
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py107
3 files changed, 98 insertions, 44 deletions
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index 6106005..b009f64 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -35,11 +35,14 @@ class AOI2DScene(AOIFeatures.AOIScene):
return looked, ignored
- def draw(self, frame, gaze_position: GazeFeatures.GazePosition):
+ def draw(self, frame, gaze_position: GazeFeatures.GazePosition, exclude=[]):
"""Draw AOI polygons on frame."""
for name, aoi2D in self.items():
+ if name in exclude:
+ continue
+
looked = aoi2D.looked(gaze_position)
color = (0, 255, 0) if looked else (0, 0, 255)
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index 0d9fce0..8e4965c 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -125,6 +125,36 @@ class AOIScene():
def keys(self):
return self.areas.keys()
+ def bounds(self):
+ """Get scene's bounds."""
+
+ all_vertices = []
+
+ for area in self.areas.values():
+ for vertice in area:
+ all_vertices.append(vertice)
+
+ all_vertices = numpy.array(all_vertices).astype(numpy.float32)
+
+ min_bounds = numpy.min(all_vertices, axis=0)
+ max_bounds = numpy.max(all_vertices, axis=0)
+
+ return numpy.array([min_bounds, max_bounds])
+
+ def center(self):
+ """Get scene's center point."""
+
+ min_bounds, max_bounds = self.bounds()
+
+ return (min_bounds + max_bounds) / 2
+
+ def size(self):
+ """Get scene size."""
+
+ min_bounds, max_bounds = self.bounds()
+
+ return max_bounds - min_bounds
+
class TimeStampedAOIScenes(DataStructures.TimeStampedBuffer):
"""Define timestamped buffer to store AOI scenes in time."""
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index b507f0e..1978b44 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -59,7 +59,7 @@ def main():
destination_path = args.segment_path
vs_data_filepath = f'{destination_path}/visual_scan.csv'
- vs_visu_filepath = f'{destination_path}/visual_scan.jpg'
+ vs_visu_filepath = f'{destination_path}/visual_scan_marker_%d.jpg'
vs_video_filepath = f'{destination_path}/visual_scan.mp4'
# Load a tobii segment
@@ -109,8 +109,10 @@ def main():
print(f'ArUcoTracker configuration for {aruco_tracker.get_markers_dictionay().get_markers_format()} markers detection:')
aruco_tracker.print_configuration()
- # Load AOI 3D scene for each marker
+ # Load AOI 3D scene for each marker and create a AOI 2D scene and frame when a 'Visualisation_Plan' AOI exist
aoi3D_scenes = {}
+ aoi2D_visu_scenes = {}
+ aoi2D_visu_frames = {}
for marker_id, aoi_scene_filepath in args.marker_id_scene.items():
@@ -119,43 +121,56 @@ def main():
aoi3D_scenes[marker_id] = AOI3DScene.AOI3DScene()
aoi3D_scenes[marker_id].load(aoi_scene_filepath)
- print(f'AOI in {os.path.basename(aoi_scene_filepath)} scene related to marker #{marker_id}')
+ print(f'AOI in {os.path.basename(aoi_scene_filepath)} scene related to marker #{marker_id}:')
for aoi in aoi3D_scenes[marker_id].keys():
- print(f'\t{aoi}')
+
+ # If a 'Visualisation_Plan' AOI exist
+ # TODO: document this deep feature !!!
+ if aoi == 'Visualisation_Plan':
+
+ print(f'\tVisualisation_Plan detected: a visual scan picture will be output for this marker.')
+
+ # Create a visual scan visualisation frame
+ visu_width, visu_height = 1920, 1080
+ scene_width, scene_height, __ = aoi3D_scenes[marker_id].size()
+
+ aoi2D_visu_frames[marker_id] = numpy.full((visu_height, visu_width, 3), 255, dtype=numpy.uint8)
+
+ if args.time_range != (0., None):
+ cv.putText(aoi2D_visu_frames[marker_id], f'Segment time range: {int(args.time_range[0] * 1000)} - {int(args.time_range[1] * 1000)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 1, cv.LINE_AA)
+
+ # Project 3D scene onto the visualisation plan
+ aoi3D_scenes[marker_id].rotation = numpy.asarray([[-numpy.pi, 0.0, 0.0]])
+ aoi3D_scenes[marker_id].translation = aoi3D_scenes[marker_id].center()*[-1, 1, 0] + [0, 0, scene_height]
+
+ # Edit a projection matrix for the reference frame
+ K0 = numpy.asarray([[visu_height, 0.0, visu_width/2], [0.0, visu_height, visu_height/2], [0.0, 0.0, 1.0]])
+
+ aoi2D_visu_scenes[marker_id] = aoi3D_scenes[marker_id].project(K0)
+
+ for name, aoi in aoi2D_visu_scenes[marker_id].items():
+ if name != 'Visualisation_Plan':
+ aoi.draw(aoi2D_visu_frames[marker_id], (0, 0, 0))
+
+ else:
+
+ print(f'\t{aoi}')
def aoi3D_scene_selector(marker_id):
return aoi3D_scenes.get(marker_id, None)
+ def aoi2D_visu_scene_selector(marker_id):
+ return aoi2D_visu_scenes.get(marker_id, None)
+
+ def aoi2D_visu_frame_selector(marker_id):
+ return aoi2D_visu_frames.get(marker_id, None)
+
# Create timestamped buffer to store AOIs scene in time
ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()
# Create timestamped buffer to store gaze positions in time
ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
- '''
- # Create a visual scan visualisation frame
- visu_width = 1920
- visu_height = 1080
- visu_ratio = visu_height
- visu_frame = numpy.full((visu_height, visu_width, 3), 255, dtype=numpy.uint8)
-
- cv.putText(visu_frame, f'Segment time range: {int(args.time_range[0] * 1000)} - {int(args.time_range[1] * 1000)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 1, cv.LINE_AA)
-
- # Project 3D scene on the reference frame
- # TODO : center projection on a reference AOI
- ref_aoi = 'Scene_Plan'
-
- # TODO: pass the reference AOI in argument
- aoi3D_scene.rotation = numpy.asarray([[-numpy.pi, 0.0, 0.0]])
- aoi3D_scene.translation = numpy.asarray([[25.0, -32.0, 20.0]])
-
- # Edit a projection matrix for the reference frame
- K0 = numpy.asarray([[visu_ratio, 0.0, visu_width/2], [0.0, visu_ratio, visu_height/2], [0.0, 0.0, 1.0]])
- aoi2D_visu_scene = aoi3D_scene.project(K0)
-
- for name, aoi in aoi2D_visu_scene.items():
- aoi.draw(visu_frame, (0, 0, 0))
- '''
# Video and data replay loop
try:
@@ -208,20 +223,24 @@ def main():
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
aoi2D_video_scene = aoi3D_scene.project(aruco_camera.get_K())
- # Draw 2D scene on video frame
- aoi2D_video_scene.draw(video_frame.matrix, video_gaze_pixel)
+ # Draw 2D scene on video frame excluding 'Visualisation_Plan' aoi
+ aoi2D_video_scene.draw(video_frame.matrix, video_gaze_pixel, ['Visualisation_Plan'])
# Store 2D scene at this time in millisecond
ts_aois_scenes[round(video_ts_ms)] = aoi2D_video_scene
- '''
- # Draw gaze path for 4 corners aoi
- if len(aoi) == 4:
- look_at = aoi2D_video_scene[ref_aoi].look_at(video_gaze_pixel)
+ # Select 2D visu scene if there is one for the detected marker
+ aoi2D_visu_scene = aoi2D_visu_scene_selector(marker_id)
+ aoi2D_visu_frame = aoi2D_visu_frame_selector(marker_id)
+
+ if aoi2D_visu_scene == None:
+ continue
+
+ look_at = aoi2D_video_scene['Visualisation_Plan'].look_at(video_gaze_pixel)
- visu_gaze_pixel = aoi2D_visu_scene[ref_aoi].looked_pixel(look_at)
- cv.circle(visu_frame, visu_gaze_pixel, 4, (0, 0, 255), -1)
- '''
+ visu_gaze_pixel = aoi2D_visu_scene['Visualisation_Plan'].looked_pixel(look_at)
+ cv.circle(aoi2D_visu_frame, visu_gaze_pixel, 4, (0, 0, 255), -1)
+
if args.window:
# Close window using 'Esc' key
@@ -231,8 +250,9 @@ def main():
# Display video
cv.imshow(f'Segment {tobii_segment.get_id()} ArUco AOI', video_frame.matrix)
- # Display visual scan frame
- #cv.imshow(f'Segment {tobii_segment.get_id()} visual scan', visu_frame)
+ # Display each visual scan frame
+ for marker_id, visu_frame in aoi2D_visu_frames.items():
+ cv.imshow(f'Segment {tobii_segment.get_id()} visual scan for marker {marker_id}', visu_frame)
# Write video
output_video.write(video_frame.matrix)
@@ -266,11 +286,12 @@ def main():
visual_scan.export_as_csv(vs_data_filepath)
print(f'Visual scan data saved into {vs_data_filepath}')
- # Export visual scan image
- #cv.imwrite(vs_visu_filepath, visu_frame)
- #print(f'Visual scan image saved into {vs_visu_filepath}')
+ # Export each visual scan picture
+ for marker_id, visu_frame in aoi2D_visu_frames.items():
+ cv.imwrite(vs_visu_filepath % marker_id, visu_frame)
+ print(f'Visual scan picture for marker {marker_id} saved into {vs_visu_filepath % marker_id}')
- # Notify where the visual scan video has been exported
+ # Notify when the visual scan video has been exported
print(f'Visual scan video saved into {vs_video_filepath}')