aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/tobii_segment_arscene_export.py58
1 files changed, 21 insertions, 37 deletions
diff --git a/src/argaze/utils/tobii_segment_arscene_export.py b/src/argaze/utils/tobii_segment_arscene_export.py
index a1acc55..9d92e56 100644
--- a/src/argaze/utils/tobii_segment_arscene_export.py
+++ b/src/argaze/utils/tobii_segment_arscene_export.py
@@ -14,26 +14,6 @@ from argaze.utils import MiscFeatures
import cv2 as cv
import numpy
-def make_rotation_matrix(x, y, z):
-
- # Create rotation matrix around x axis
- c = numpy.cos(numpy.deg2rad(x))
- s = numpy.sin(numpy.deg2rad(x))
- Rx = numpy.array([[1, 0, 0], [0, c, -s], [0, s, c]])
-
- # Create rotation matrix around y axis
- c = numpy.cos(numpy.deg2rad(y))
- s = numpy.sin(numpy.deg2rad(y))
- Ry = numpy.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
-
- # Create rotation matrix around z axis
- c = numpy.cos(numpy.deg2rad(z))
- s = numpy.sin(numpy.deg2rad(z))
- Rz = numpy.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
-
- # Return intrinsic rotation matrix
- return Rx.dot(Ry.dot(Rz))
-
def main():
"""
Track ArUcoPlan into Tobii Glasses Pro 2 camera video stream.
@@ -78,8 +58,9 @@ def main():
os.makedirs(destination_path)
print(f'{destination_path} folder created')
- vs_data_filepath = f'{destination_path}/aoi.csv'
- vs_video_filepath = f'{destination_path}/aoi.mp4'
+ aoi_json_filepath = f'{destination_path}/aoi.json'
+ aoi_csv_filepath = f'{destination_path}/aoi.csv'
+ aoi_mp4_filepath = f'{destination_path}/aoi.mp4'
# Load a tobii segment
tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1e6), int(args.time_range[1] * 1e6) if args.time_range[1] != None else None)
@@ -102,7 +83,7 @@ def main():
tobii_ts_gaze_positions = tobii_segment_data['GazePosition']
# Prepare video exportation at the same format than segment video
- output_video = TobiiVideo.TobiiVideoOutput(vs_video_filepath, tobii_segment_video.stream)
+ output_video = TobiiVideo.TobiiVideoOutput(aoi_mp4_filepath, tobii_segment_video.stream)
# Load ar scene
ar_scene = ArScene.ArScene.from_json(args.project_path)
@@ -136,6 +117,7 @@ def main():
_, nearest_vts = tobii_ts_vts.get_last_before(video_ts)
projected_aois['offset'] = nearest_vts.offset
+ projected_aois['comment'] = ''
# Hide frame left and right borders before tracking to ignore markers outside focus area
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
@@ -155,21 +137,22 @@ def main():
# Draw scene projection
scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 255, 255))
- # Catch warnings raised by project_scene method
- except UserWarning as w:
-
- projected_aois['comment'] = w
+ # Catch exceptions raised by project_scene method
+ except (ArScene.PoseEstimationFailed, ArScene.SceneProjectionFailed) as e:
# Draw tracked markers
ar_scene.aruco_tracker.draw_tracked_markers(visu_frame.matrix)
- if w == 'Pose estimation fails':
+ if str(e) == 'Unconsistent marker poses':
+
+ projected_aois['comment'] = str(e) + ': ' + str(e.unconsistencies)
- # Draw black AOI scene
- scene_projection.draw(visu_frame.matrix, (0, 0), color=(0, 0, 0))
+ else:
+
+ projected_aois['comment'] = str(e)
cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, str(w), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+ cv.putText(visu_frame.matrix, str(e), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
# Raised when timestamped buffer is empty
except KeyError as e:
@@ -184,7 +167,7 @@ def main():
# Store projected AOI
ts_offset_aois[video_ts] = projected_aois
-
+
# Draw focus area
cv.rectangle(visu_frame.matrix, (int(video_frame.width/6), 0), (int(visu_frame.width*(1-1/6)), int(visu_frame.height)), (255, 150, 150), 1)
@@ -223,18 +206,19 @@ def main():
output_video.close()
# Print aruco tracking metrics
- print('\nAruco marker tracking metrics')
+ print('\n\nAruco marker tracking metrics')
try_count, tracked_counts = ar_scene.aruco_tracker.track_metrics
for marker_id, tracked_count in tracked_counts.items():
- print(f'Markers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)')
+ print(f'\tMarkers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)')
# Export aruco aoi data
- ts_offset_aois.as_dataframe().to_csv(vs_data_filepath, index=True)
- print(f'Aruco AOI data saved into {vs_data_filepath}')
+ ts_offset_aois.to_json(aoi_json_filepath)
+ ts_offset_aois.as_dataframe().to_csv(aoi_csv_filepath)
+ print(f'Aruco AOI data saved into {aoi_json_filepath} and {aoi_csv_filepath}')
# Notify when the aruco aoi video has been exported
- print(f'Aruco AOI video saved into {vs_video_filepath}')
+ print(f'Aruco AOI video saved into {aoi_mp4_filepath}')
if __name__ == '__main__':