aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2022-09-21 18:33:41 +0200
committerThéo de la Hogue2022-09-21 18:33:41 +0200
commitab33be7af59513b1d19a52472303ad52e720e978 (patch)
tree16c106519c74088648d17823f4d35ee0925cb9b0 /src
parentfb1a05aa8c309263d22cbb79e47c8c6f38366790 (diff)
downloadargaze-ab33be7af59513b1d19a52472303ad52e720e978.zip
argaze-ab33be7af59513b1d19a52472303ad52e720e978.tar.gz
argaze-ab33be7af59513b1d19a52472303ad52e720e978.tar.bz2
argaze-ab33be7af59513b1d19a52472303ad52e720e978.tar.xz
Updating script.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/tobii_segment_gaze_movements_export.py57
1 files changed, 35 insertions, 22 deletions
diff --git a/src/argaze/utils/tobii_segment_gaze_movements_export.py b/src/argaze/utils/tobii_segment_gaze_movements_export.py
index b0c273a..b8806fe 100644
--- a/src/argaze/utils/tobii_segment_gaze_movements_export.py
+++ b/src/argaze/utils/tobii_segment_gaze_movements_export.py
@@ -81,34 +81,46 @@ def main():
# Access to timestamped gaze 3D positions data buffer
tobii_ts_gaze_positions_3d = tobii_segment_data['GazePosition3D']
- # Format tobii gaze position in pixel and store them using millisecond unit timestamp
- ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
-
# !!! the parameters below are specific to the TobiiGlassesPro2 !!!
# Reference : https://www.biorxiv.org/content/10.1101/299925v1
tobii_accuracy = 1.42 # degree
tobii_precision = 0.34 # degree
tobii_camera_hfov = 82 # degree
+ # Format tobii gaze position and accuracy in pixel
+ ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
+
for ts, tobii_gaze_position in tobii_ts_gaze_positions.items():
+ # Test gaze position validity
if tobii_gaze_position.validity == 0:
gaze_position_pixel = GazeFeatures.GazePosition( (int(tobii_gaze_position.value[0] * tobii_segment_video.get_width()), int(tobii_gaze_position.value[1] * tobii_segment_video.get_height())) )
- ts_gaze_positions[ts/1000] = gaze_position_pixel
+ # Get gaze position 3D at same gaze position timestamp
+ tobii_gaze_position_3d = tobii_ts_gaze_positions_3d.pop(ts)
- for ts, tobii_ts_gaze_position_3d in tobii_ts_gaze_positions_3d.items():
+ # Test gaze position 3d validity
+ if tobii_gaze_position_3d.validity == 0:
+
+ gaze_accuracy_mm = numpy.sin(numpy.deg2rad(tobii_accuracy)) * tobii_gaze_position_3d.value[2]
+ tobii_camera_hfov_mm = numpy.sin(numpy.deg2rad(tobii_camera_hfov)) * tobii_gaze_position_3d.value[2]
+
+ gaze_position_pixel.accuracy = round(tobii_segment_video.get_width() * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
- if tobii_ts_gaze_position_3d.validity == 0:
+ # Store gaze position using millisecond unit timestamp
+ ts_gaze_positions[ts/1000] = gaze_position_pixel
- gaze_accuracy_mm = numpy.sin(numpy.deg2rad(tobii_accuracy)) * tobii_ts_gaze_position_3d.value[2]
- tobii_camera_hfov_mm = numpy.sin(numpy.deg2rad(tobii_camera_hfov)) * tobii_ts_gaze_position_3d.value[2]
-
- ts_gaze_positions[ts/1000].accuracy = round(tobii_segment_video.get_width() * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
+ continue
- print(f'Dispersion threshold: {args.dispersion_threshold}')
- print(f'Duration threshold: {args.duration_threshold}')
+ # Store unvalid gaze position for further movement processing
+ ts_gaze_positions[ts/1000] = GazeFeatures.UnvalidGazePosition()
+
+ print(f'Invalid gaze position stored at {ts/1000} ms')
+
+ print(f'Movement identifier parameters:')
+ print(f'\tDispersion threshold = {args.dispersion_threshold}')
+ print(f'\tDuration threshold = {args.duration_threshold}')
# Start movement identification
movement_identifier = GazeFeatures.DispersionBasedMovementIdentifier(ts_gaze_positions, args.dispersion_threshold, args.duration_threshold)
@@ -181,13 +193,6 @@ def main():
video_ts_ms = video_ts / 1000
- # write segment timing
- cv.putText(video_frame.matrix, f'Segment time: {int(video_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
-
- # write movement identification parameters
- cv.putText(video_frame.matrix, f'Dispersion threshold: {args.dispersion_threshold} px', (20, 100), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(video_frame.matrix, f'Duration threshold: {args.duration_threshold} ms', (20, 140), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
-
# Draw current fixation
if len(fixations) > 0:
@@ -218,14 +223,22 @@ def main():
# Get closest gaze position before video timestamp and remove all gaze positions before
_, nearest_gaze_position = ts_gaze_positions.pop_first_until(video_ts_ms)
- # Draw gaze position and precision
- cv.circle(video_frame.matrix, nearest_gaze_position, 2, (0, 255, 255), -1)
- cv.circle(video_frame.matrix, nearest_gaze_position, nearest_gaze_position.accuracy, (0, 255, 255), 1)
+ # Draw gaze
+ nearest_gaze_position.draw(video_frame.matrix)
# Wait for gaze position
except ValueError:
pass
+ # Write segment timing
+ cv.rectangle(video_frame.matrix, (0, 0), (550, 50), (63, 63, 63), -1)
+ cv.putText(video_frame.matrix, f'Segment time: {int(video_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+
+ # Write movement identification parameters
+ cv.rectangle(video_frame.matrix, (0, 90), (550, 150), (63, 63, 63), -1)
+ cv.putText(video_frame.matrix, f'Dispersion threshold: {args.dispersion_threshold} px', (20, 100), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.putText(video_frame.matrix, f'Duration threshold: {args.duration_threshold} ms', (20, 140), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+
if args.window:
# Close window using 'Esc' key