aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2022-12-13 10:55:20 +0100
committerThéo de la Hogue2022-12-13 10:55:20 +0100
commit76273c33348e1e84cbffb8bd0a7cea6d1aee1e74 (patch)
tree1bd7417a16c076e7baae2ea8080d6f9f398fc436 /src
parent097980e2c67ad81b1acbbc00707b141dd55397bf (diff)
downloadargaze-76273c33348e1e84cbffb8bd0a7cea6d1aee1e74.zip
argaze-76273c33348e1e84cbffb8bd0a7cea6d1aee1e74.tar.gz
argaze-76273c33348e1e84cbffb8bd0a7cea6d1aee1e74.tar.bz2
argaze-76273c33348e1e84cbffb8bd0a7cea6d1aee1e74.tar.xz
Displaying gaze positions related to each frame.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/tobii_segment_arscene_export.py70
1 files changed, 63 insertions, 7 deletions
diff --git a/src/argaze/utils/tobii_segment_arscene_export.py b/src/argaze/utils/tobii_segment_arscene_export.py
index aba33bd..92527be 100644
--- a/src/argaze/utils/tobii_segment_arscene_export.py
+++ b/src/argaze/utils/tobii_segment_arscene_export.py
@@ -82,6 +82,25 @@ def main():
# Access to timestamped gaze position data buffer
tobii_ts_gaze_positions = tobii_segment_data['GazePosition']
+ # Format tobii gaze position in pixel
+ ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
+
+ # Initialise progress bar
+ MiscFeatures.printProgressBar(0, tobii_segment_video.duration, prefix = '\nGazePositions projection:', suffix = 'Complete', length = 100)
+
+ for ts, tobii_gaze_position in tobii_ts_gaze_positions.items():
+
+ # Update Progress Bar
+ progress = ts - int(args.time_range[0] * 1e6)
+ MiscFeatures.printProgressBar(progress, tobii_segment_video.duration, prefix = 'GazePositions projection:', suffix = 'Complete', length = 100)
+
+
+ # Test gaze position validity
+ if tobii_gaze_position.validity == 0:
+
+ gaze_position_px = (int(tobii_gaze_position.value[0] * tobii_segment_video.width), int(tobii_gaze_position.value[1] * tobii_segment_video.height))
+ ts_gaze_positions[ts] = GazeFeatures.GazePosition(gaze_position_px)
+
# Prepare video exportation at the same format than segment video
output_video = TobiiVideo.TobiiVideoOutput(aoi_mp4_filepath, tobii_segment_video.stream)
@@ -102,7 +121,10 @@ def main():
# Iterate on video frames
for video_ts, video_frame in tobii_segment_video.frames():
- video_ts_ms = video_ts / 1e3
+ # This video frame is the reference until the next frame
+ # Here next frame is at + 40ms (25 fps)
+ # TODO: Get video fps to adapt
+ next_video_ts = video_ts + 40000
# Copy video frame to edit visualisation on it without disrupting aruco tracking
visu_frame = video_frame.copy()
@@ -150,8 +172,8 @@ def main():
projected_aois['error'] = str(e)
- cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, str(e), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+ cv.rectangle(visu_frame.matrix, (0, 100), (550, 150), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(e), (20, 130), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
# Raised when timestamped buffer is empty
except KeyError as e:
@@ -161,12 +183,46 @@ def main():
projected_aois['offset'] = 0
projected_aois['error'] = e
- cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, str(e), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 1, cv.LINE_AA)
+ cv.rectangle(visu_frame.matrix, (0, 100), (550, 150), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(e), (20, 130), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 1, cv.LINE_AA)
# Store projected AOI
ts_offset_aois[video_ts] = projected_aois
+ # Draw gaze positions until next frame
+ try:
+
+ # Get next gaze position
+ ts_start, start_gaze_position = ts_gaze_positions.first
+ ts_next, next_gaze_position = ts_gaze_positions.first
+
+ # Check next gaze position is not after next frame time
+ while ts_next < next_video_ts:
+
+ ts_start, start_gaze_position = ts_gaze_positions.pop_first()
+ ts_next, next_gaze_position = ts_gaze_positions.first
+
+ # Draw start gaze
+ start_gaze_position.draw(visu_frame.matrix)
+
+ if start_gaze_position.valid and next_gaze_position.valid:
+
+ # Draw movement from start to next
+ cv.line(visu_frame.matrix, start_gaze_position, next_gaze_position, (0, 255, 255), 1)
+
+ if start_gaze_position.valid:
+
+ # Write last start gaze position
+ cv.putText(visu_frame.matrix, str(start_gaze_position.value), start_gaze_position.value, cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+
+ # Write last start gaze position timing
+ cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (31, 31, 31), -1)
+ cv.putText(visu_frame.matrix, f'Gaze time: {ts_start*1e-3:.3f} ms', (20, 85), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+
+ # Empty gaze position
+ except IndexError:
+ pass
+
# Draw focus area
cv.rectangle(visu_frame.matrix, (int(video_frame.width/6), 0), (int(visu_frame.width*(1-1/6)), int(visu_frame.height)), (255, 150, 150), 1)
@@ -191,8 +247,8 @@ def main():
output_video.write(visu_frame.matrix)
# Update Progress Bar
- progress = video_ts_ms - int(args.time_range[0] * 1e3)
- MiscFeatures.printProgressBar(progress, tobii_segment_video.duration/1e3, prefix = 'Progress:', suffix = 'Complete', length = 100)
+ progress = video_ts*1e-3 - int(args.time_range[0] * 1e3)
+ MiscFeatures.printProgressBar(progress, tobii_segment_video.duration*1e-3, prefix = 'Progress:', suffix = 'Complete', length = 100)
# Exit on 'ctrl+C' interruption
except KeyboardInterrupt: