aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2022-10-11 15:24:46 +0200
committerThéo de la Hogue2022-10-11 15:24:46 +0200
commit088e90de7cab01300a744670af6d8f0f8392deef (patch)
treebe9c8fb0ad5cb0ed1b6ba6af5222d67fee369942 /src
parentbe8ec51b2d91b6156beeb77b528cb47ea039dfb3 (diff)
downloadargaze-088e90de7cab01300a744670af6d8f0f8392deef.zip
argaze-088e90de7cab01300a744670af6d8f0f8392deef.tar.gz
argaze-088e90de7cab01300a744670af6d8f0f8392deef.tar.bz2
argaze-088e90de7cab01300a744670af6d8f0f8392deef.tar.xz
Using sync data streaming feature.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/tobii_stream_display.py162
1 files changed, 83 insertions, 79 deletions
diff --git a/src/argaze/utils/tobii_stream_display.py b/src/argaze/utils/tobii_stream_display.py
index 8dd2341..8c67cee 100644
--- a/src/argaze/utils/tobii_stream_display.py
+++ b/src/argaze/utils/tobii_stream_display.py
@@ -23,109 +23,109 @@ def main():
# Create tobii controller
tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf')
- # Enable tobii data stream
+ # Enable tobii data stream
tobii_data_stream = tobii_controller.enable_data_stream()
# Enable tobii video stream
tobii_video_stream = tobii_controller.enable_video_stream()
- # Prepare to timestamped gaze position data stream bufferring
- tobii_ts_gaze_positions = DataStructures.TimeStampedBuffer()
+ # Init head movement
+ head_movement_px = numpy.array((0, 0))
- # Prepare to timestamped gaze position 3d data stream bufferring
- tobii_ts_gaze_positions_3d = DataStructures.TimeStampedBuffer()
+ # Init gaze position
+ gaze_position_px = GazeFeatures.GazePosition((0, 0))
- # Prepare to timestamped head rotations data stream bufferring
- tobii_ts_head_rotations = DataStructures.TimeStampedBuffer()
-
- # Start streaming
- tobii_controller.start_streaming()
-
- # Live video and data stream capture loop
- try:
-
- # Assess loop performance
- loop_chrono = MiscFeatures.TimeProbe()
- fps = 0
-
- while tobii_video_stream.is_alive():
-
- # Read video stream
- video_ts, video_frame = tobii_video_stream.read()
- video_ts_ms = video_ts / 1e3
-
- # Read data stream
- data_ts, data_stream = tobii_data_stream.read()
- data_ts_ms = data_ts / 1e3
-
- try:
-
- # Buffer last received gaze positions
- tobii_ts_gaze_positions.append(data_stream['GazePosition'])
-
- # Buffer last received gaze positions 3d
- tobii_ts_gaze_positions_3d.append(data_stream['GazePosition3D'])
+ # Init data timestamped in millisecond
+ data_ts_ms = 0
+
+ # Assess temporal performance
+ loop_chrono = MiscFeatures.TimeProbe()
+ gyroscope_chrono = MiscFeatures.TimeProbe()
+ gaze_chrono = MiscFeatures.TimeProbe()
+
+ loop_ps = 0
+ gyroscope_ps = 0
+ gaze_ps = 0
+
+ def data_stream_callback(data_ts, data_object, data_object_type):
- # Buffer last received gaze positions 3d
- tobii_ts_head_rotations.append(data_stream['Gyroscope'])
+ nonlocal head_movement_px
+ nonlocal gaze_position_px
+ nonlocal data_ts_ms
+ nonlocal gyroscope_chrono
+ nonlocal gaze_chrono
- # Ignore missing data stream
- except KeyError as e:
- pass
+ data_ts_ms = data_ts / 1e3
- try:
+ match data_object_type:
- # Get nearest head rotation before video timestamp and remove all head rotations before
- earliest_ts, earliest_head_rotation = tobii_ts_head_rotations.pop_last()
+ case 'Gyroscope':
+
+ # Assess gyroscope stream performance
+ gyroscope_chrono.lap()
# Calculate head movement considering only head yaw and pitch
- head_movement = numpy.array(earliest_head_rotation.value)
+ head_movement = numpy.array(data_object.value)
head_movement_px = head_movement.astype(int)
- head_movement_norm = numpy.linalg.norm(head_movement[0:2])
-
- # Draw movement vector
- cv.line(video_frame.matrix, (int(video_frame.width/2), int(video_frame.height/2)), (int(video_frame.width/2) + head_movement_px[1], int(video_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
-
- # Wait for head rotation
- except KeyError:
- pass
- try:
-
- # Get nearest gaze position before video timestamp and remove all gaze positions before
- _, earliest_gaze_position = tobii_ts_gaze_positions.pop_last()
+ case 'GazePosition':
+
+ # Assess gaze position stream performance
+ gaze_chrono.lap()
# Ignore frame when gaze position is not valid
- if earliest_gaze_position.validity == 0:
+ if data_object.validity == 0:
- gaze_position_pixel = GazeFeatures.GazePosition( (int(earliest_gaze_position.value[0] * video_frame.width), int(earliest_gaze_position.value[1] * video_frame.height)) )
+ gaze_position_px = GazeFeatures.GazePosition( (int(data_object.value[0] * video_frame.width), int(data_object.value[1] * video_frame.height)) )
+
+ case 'GazePosition3D':
+
+ # Ignore frame when gaze position 3D is not valid
+ if data_object.validity == 0:
- # Get nearest gaze position 3D before video timestamp and remove all gaze positions before
- _, earliest_gaze_position_3d = tobii_ts_gaze_positions_3d.pop_last()
+ gaze_accuracy_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.ACCURACY)) * data_object.value[2]
+ tobii_camera_hfov_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.CAMERA_HFOV / 2)) * data_object.value[2]
- # Ignore frame when gaze position 3D is not valid
- if earliest_gaze_position_3d.validity == 0:
-
- gaze_accuracy_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.ACCURACY)) * earliest_gaze_position_3d.value[2]
- tobii_camera_hfov_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.CAMERA_HFOV / 2)) * earliest_gaze_position_3d.value[2]
+ gaze_position_px.accuracy = round(video_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
- gaze_position_pixel.accuracy = round(video_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
-
- # Draw gaze
- gaze_position_pixel.draw(video_frame.matrix)
+ tobii_data_stream.reading_callback = data_stream_callback
+
+ # Start streaming
+ tobii_controller.start_streaming()
+
+ # Live video stream capture loop
+ try:
+
+ while tobii_video_stream.is_alive():
+
+ # Read video stream
+ video_ts, video_frame = tobii_video_stream.read()
+ video_ts_ms = video_ts / 1e3
- # Wait for gaze position
- except KeyError:
- pass
-
# Assess loop performance
- loop_time, loop_counter, elapsed_time = loop_chrono.lap()
+ lap_time, lap_counter, elapsed_time = loop_chrono.lap()
# Update fps each 10 loops
- if loop_counter >= 10:
+ if lap_counter >= 10:
- fps = loop_counter / elapsed_time
+ loop_ps = 1e3 * lap_counter / elapsed_time
loop_chrono.restart()
+
+ # Assess gyroscope streaming performance
+ elapsed_time, lap_counter = gyroscope_chrono.end()
+ gyroscope_ps = 1e3 * lap_counter / elapsed_time
+ gyroscope_chrono.restart()
+
+ # Assess gaze streaming performance
+ elapsed_time, lap_counter = gaze_chrono.end()
+ gaze_ps = 1e3 * lap_counter / elapsed_time
+ gaze_chrono.restart()
+
+ # Draw head movement
+ cv.line(video_frame.matrix, (int(video_frame.width/2), int(video_frame.height/2)), (int(video_frame.width/2) + head_movement_px[1], int(video_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
+
+ # Draw gaze
+ gaze_position_px.draw(video_frame.matrix)
# Draw center
cv.line(video_frame.matrix, (int(video_frame.width/2) - 50, int(video_frame.height/2)), (int(video_frame.width/2) + 50, int(video_frame.height/2)), (255, 150, 150), 1)
@@ -135,14 +135,18 @@ def main():
cv.rectangle(video_frame.matrix, (0, 0), (1100, 50), (63, 63, 63), -1)
cv.putText(video_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
cv.putText(video_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(video_frame.matrix, f'Fps: {int(fps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.putText(video_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.rectangle(video_frame.matrix, (0, 50), (580, 100), (127, 127, 127), -1)
+ cv.putText(video_frame.matrix, f'Gyroscope fps: {int(gyroscope_ps)}', (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+ cv.putText(video_frame.matrix, f'Gaze fps: {int(gaze_ps)}', (350, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+
+ cv.imshow(f'Video and data stream', video_frame.matrix)
+
# Close window using 'Esc' key
if cv.waitKey(1) == 27:
break
- cv.imshow(f'Video and data stream', video_frame.matrix)
-
# Exit on 'ctrl+C' interruption
except KeyboardInterrupt:
pass