aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/argaze/utils/tobii_stream_aruco_aoi_display.py125
1 files changed, 69 insertions, 56 deletions
diff --git a/src/argaze/utils/tobii_stream_aruco_aoi_display.py b/src/argaze/utils/tobii_stream_aruco_aoi_display.py
index 5529052..50c5cd7 100644
--- a/src/argaze/utils/tobii_stream_aruco_aoi_display.py
+++ b/src/argaze/utils/tobii_stream_aruco_aoi_display.py
@@ -1,8 +1,7 @@
#!/usr/bin/env python
import argparse
-import os, time
-import json
+import os, json
from argaze import DataStructures
from argaze import GazeFeatures
@@ -11,8 +10,8 @@ from argaze.ArUcoMarkers import *
from argaze.AreaOfInterest import *
from argaze.utils import MiscFeatures
-import numpy
import cv2 as cv
+import numpy
def main():
"""
@@ -22,7 +21,7 @@ def main():
# Manage arguments
parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
- parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip')
+ parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='10.34.0.12', help='tobii glasses ip')
parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
parser.add_argument('-md', '--marker_dictionary', metavar='MARKER_DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
@@ -40,13 +39,16 @@ def main():
# Create tobii controller
tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf')
- # Enable tobii data stream
+ # Calibrate tobii glasses
+ tobii_controller.calibrate()
+
+ # Enable tobii data stream
tobii_data_stream = tobii_controller.enable_data_stream()
# Enable tobii video stream
tobii_video_stream = tobii_controller.enable_video_stream()
- # create aruco camera
+ # Create aruco camera
aruco_camera = ArUcoCamera.ArUcoCamera()
# Load calibration file
@@ -91,12 +93,46 @@ def main():
# Create timestamped buffer to store AOIs scene in time
ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()
- # Prepare to timestamped head rotations data stream bufferring
- tobii_ts_head_rotations = DataStructures.TimeStampedBuffer()
+ # Init head movement
+ head_movement_px = numpy.array((0, 0))
+ head_movement_norm = 0
+
+ # Init data timestamped in millisecond
+ data_ts_ms = 0
+
+ # Assess temporal performance
+ loop_chrono = MiscFeatures.TimeProbe()
+ gyroscope_chrono = MiscFeatures.TimeProbe()
+
+ loop_ps = 0
+ gyroscope_ps = 0
+
+ def data_stream_callback(data_ts, data_object, data_object_type):
+
+ nonlocal head_movement_px
+ nonlocal head_movement_norm
+ nonlocal data_ts_ms
+ nonlocal gyroscope_chrono
+
+ data_ts_ms = data_ts / 1e3
+
+ match data_object_type:
+
+ case 'Gyroscope':
+
+ # Assess gyroscope stream performance
+ gyroscope_chrono.lap()
+
+ # Calculate head movement considering only head yaw and pitch
+ head_movement = numpy.array(data_object.value)
+ head_movement_px = head_movement.astype(int)
+ head_movement_norm = numpy.linalg.norm(head_movement[0:2])
+
+ tobii_data_stream.reading_callback = data_stream_callback
# Start streaming
tobii_controller.start_streaming()
-
+
# Live video stream capture loop
try:
@@ -117,33 +153,9 @@ def main():
# Copy video frame to edit visualisation on it without disrupting aruco tracking
visu_frame = video_frame.copy()
- # Read data stream
- data_ts, data_stream = tobii_data_stream.read()
- data_ts_ms = data_ts / 1e3
-
- try:
-
- # Buffer last received gaze positions 3d
- tobii_ts_head_rotations.append(data_stream['Gyroscope'])
-
- # Ignore missing data stream
- except KeyError as e:
- pass
-
# Process video and data frame
try:
- # Get nearest head rotation before video timestamp and remove all head rotations before
- _, nearest_head_rotation = tobii_ts_head_rotations.pop_first_until(video_ts)
-
- # Calculate head movement considering only head yaw and pitch
- head_movement = numpy.array(nearest_head_rotation.value)
- head_movement_px = head_movement.astype(int)
- head_movement_norm = numpy.linalg.norm(head_movement[0:2])
-
- # Draw movement vector
- cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2)), (int(visu_frame.width/2) + head_movement_px[1], int(visu_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
-
# Head movement detection hysteresis
# TODO : pass the threshold value as argument
if not head_moving and head_movement_norm > 50:
@@ -215,7 +227,7 @@ def main():
for name, aoi_array in aoi2D_dict.items():
aoi2D_merged_scene[name] = numpy.sum(aoi_array, axis=0) / len(aoi_array)
- aoi2D_merged_scene.draw(visu_frame.matrix, (0, 0))
+ aoi2D_merged_scene.draw(visu_frame.matrix)
# Store 2D merged scene at this time in millisecond
ts_aois_scenes[round(video_ts_ms)] = aoi2D_merged_scene
@@ -228,43 +240,44 @@ def main():
# Write warning
except UserWarning as w:
- cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
- cv.putText(visu_frame.matrix, str(w), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
-
- # Raised when timestamped buffer is empty
- except KeyError:
- pass
+ cv.rectangle(visu_frame.matrix, (0, 100), (500, 150), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, str(w), (20, 140), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
# Assess loop performance
- loop_time, loop_counter, elapsed_time = loop_chrono.lap()
+ lap_time, lap_counter, elapsed_time = loop_chrono.lap()
# Update fps each 10 loops
- if loop_counter >= 10:
+ if lap_counter >= 10:
- fps = 1e3 * loop_counter / elapsed_time
+ loop_ps = 1e3 * lap_counter / elapsed_time
loop_chrono.restart()
+
+ # Assess gyroscope streaming performance
+ elapsed_time, lap_counter = gyroscope_chrono.end()
+ gyroscope_ps = 1e3 * lap_counter / elapsed_time
+ gyroscope_chrono.restart()
- # Draw focus area
- cv.rectangle(visu_frame.matrix, (int(video_frame.width/6), 0), (int(visu_frame.width*(1-1/6)), int(visu_frame.height)), (255, 150, 150), 1)
-
+ # Draw head movement vector
+ cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2)), (int(visu_frame.width/2) + head_movement_px[1], int(visu_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
+
# Draw center
cv.line(visu_frame.matrix, (int(visu_frame.width/2) - 50, int(visu_frame.height/2)), (int(visu_frame.width/2) + 50, int(visu_frame.height/2)), (255, 150, 150), 1)
cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
-
+
# Write stream timing
cv.rectangle(visu_frame.matrix, (0, 0), (1100, 50), (63, 63, 63), -1)
cv.putText(visu_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
cv.putText(visu_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(visu_frame.matrix, f'Fps: {int(fps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
-
- if args.window:
+ cv.putText(visu_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- # Close window using 'Esc' key
- if cv.waitKey(1) == 27:
- break
+ cv.rectangle(visu_frame.matrix, (0, 50), (500, 100), (127, 127, 127), -1)
+ cv.putText(visu_frame.matrix, f'Gyroscope fps: {int(gyroscope_ps)}', (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
- # Display visualisation
- cv.imshow(f'Stream ArUco AOI', visu_frame.matrix)
+ cv.imshow(f'Stream ArUco AOI', visu_frame.matrix)
+
+ # Close window using 'Esc' key
+ if cv.waitKey(1) == 27:
+ break
# Exit on 'ctrl+C' interruption
except KeyboardInterrupt:
@@ -275,7 +288,7 @@ def main():
# Stop streaming
tobii_controller.stop_streaming()
-
+
if __name__ == '__main__':
main() \ No newline at end of file