aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/argaze/TobiiGlassesPro2/TobiiData.py114
-rw-r--r--src/argaze/utils/MiscFeatures.py13
-rw-r--r--src/argaze/utils/tobii_stream_aruco_aoi_display.py2
-rw-r--r--src/argaze/utils/tobii_stream_display.py162
4 files changed, 173 insertions, 118 deletions
diff --git a/src/argaze/TobiiGlassesPro2/TobiiData.py b/src/argaze/TobiiGlassesPro2/TobiiData.py
index 975afce..903dcc7 100644
--- a/src/argaze/TobiiGlassesPro2/TobiiData.py
+++ b/src/argaze/TobiiGlassesPro2/TobiiData.py
@@ -11,6 +11,8 @@ import queue
from argaze import DataStructures
from argaze.TobiiGlassesPro2 import TobiiNetworkInterface
+from argaze.utils import MiscFeatures
+
@dataclass
class DirSig():
"""Define dir sig data (dir sig)."""
@@ -287,6 +289,8 @@ class TobiiDataSegment():
class TobiiDataStream(threading.Thread):
"""Capture Tobii Glasses Pro 2 data stream in separate thread."""
+ reading_callback = None
+
def __init__(self, network_interface: TobiiNetworkInterface.TobiiNetworkInterface):
"""Initialise thread super class as a deamon dedicated to data reception."""
@@ -296,7 +300,7 @@ class TobiiDataStream(threading.Thread):
self.__network = network_interface
self.__data_socket = self.__network.make_socket()
- self.__data_queue = queue.Queue()
+ self.__data_queue = queue.Queue(50) # TODO : set queue size according technical reason
self.__stop_event = threading.Event()
self.__read_lock = threading.Lock()
@@ -306,6 +310,8 @@ class TobiiDataStream(threading.Thread):
self.__keep_alive_thread = threading.Thread(target = self.__keep_alive)
self.__keep_alive_thread.daemon = True
+ self.__json_data_parser = TobiiJsonDataParser()
+
def __del__(self):
"""Stop data reception before destruction."""
@@ -338,24 +344,56 @@ class TobiiDataStream(threading.Thread):
self.__data_socket.close()
def run(self):
- """Store received data into a queue for further reading."""
+ """Managed received data for sync and async reading case.
+ - Sync: send data to callback function.
+ - Async: store data into a locked queue for further reading."""
while not self.__stop_event.isSet():
- # lock data queue access
- self.__read_lock.acquire()
+ # Sync reading case
+ if self.reading_callback != None:
+
+ # grab data
+ data = self.__network.grab_data(self.__data_socket)
+
+ # decode data
+ json_data = json.loads(data.decode('utf-8'))
+
+ # parse json into timestamped data object
+ data_ts, data_object, data_object_type = self.__parse_json_data(json_data)
+
+ self.reading_callback(data_ts, data_object, data_object_type)
+
+ # Async reading case
+ else:
- # write in data queue
- data = self.__network.grab_data(self.__data_socket)
- json_data = json.loads(data.decode('utf-8'))
- self.__data_queue.put(json_data)
+ # wait for queue reading
+ if self.__data_queue.full():
- # unlock data queue access
- self.__read_lock.release()
+ # sleep 1 micro second
+ time.sleep(0.0001)
+ continue
+
+ # lock data queue access
+ self.__read_lock.acquire()
+
+ # grab data
+ data = self.__network.grab_data(self.__data_socket)
+
+ # decode data
+ json_data = json.loads(data.decode('utf-8'))
+
+ # write data in queue
+ self.__data_queue.put(json_data)
+
+ # unlock data queue access
+ self.__read_lock.release()
def read(self):
- json_data_parser = TobiiJsonDataParser()
+ # no data to read
+ if self.__data_queue.empty():
+ raise ValueError
# create a dictionary of timestamped data buffers
ts_data_buffer_dict = {
@@ -377,39 +415,51 @@ class TobiiDataStream(threading.Thread):
# if the data acquisition thread is not running
if self.__stop_event.isSet():
return ts_data_buffer_dict
-
+
# lock data queue access
self.__read_lock.acquire()
+ # search for earliest timestamp in the queue
+ earliest_ts = 0
+
# read data queue
while not self.__data_queue.empty():
- json_data = self.__data_queue.get()
-
- # parse data status
- status = json_data.pop('s', -1)
-
- # convert timestamp
- ts = json_data.pop('ts')
-
- # keep first timestamp to offset all timestamps
- if self.__first_ts == 0:
- self.__first_ts = ts
-
- ts -= self.__first_ts
+ # parse json into timestamped data object
+ data_ts, data_object, data_object_type = self.__parse_json_data(self.__data_queue.get())
# ignore negative timestamp
- if ts < 0:
+ if data_ts < 0:
break
- # convert json data into data object
- data_object = json_data_parser.parse_data( status, json_data)
- data_object_type = type(data_object).__name__
+ # keep earliest timestamp
+ if data_ts > earliest_ts:
+ earliest_ts = data_ts
# store data object into dedicated timestamped buffer
- ts_data_buffer_dict[data_object_type][ts] = data_object
-
+ ts_data_buffer_dict[data_object_type][data_ts] = data_object
+
# unlock data queue access
self.__read_lock.release()
- return ts, ts_data_buffer_dict
+ return earliest_ts, ts_data_buffer_dict
+
+ def __parse_json_data(self, json_data):
+
+ # parse data status
+ status = json_data.pop('s', -1)
+
+ # convert timestamp
+ data_ts = json_data.pop('ts')
+
+ # keep first timestamp to offset all timestamps
+ if self.__first_ts == 0:
+ self.__first_ts = data_ts
+
+ data_ts -= self.__first_ts
+
+ # convert json data into data object
+ data_object = self.__json_data_parser.parse_data(status, json_data)
+ data_object_type = type(data_object).__name__
+
+ return data_ts, data_object, data_object_type
diff --git a/src/argaze/utils/MiscFeatures.py b/src/argaze/utils/MiscFeatures.py
index 6fe51d5..9f68a27 100644
--- a/src/argaze/utils/MiscFeatures.py
+++ b/src/argaze/utils/MiscFeatures.py
@@ -2,7 +2,6 @@
import time
-# Print iterations progress
def printProgressBar (iteration:int, total:int, prefix:str = '', suffix:str = '', decimals:int = 1, length:int = 100, fill:str = '█', printEnd:str = "\r"):
"""Print iterations progress.
Call in a loop to create terminal progress bar.
@@ -24,9 +23,8 @@ def printProgressBar (iteration:int, total:int, prefix:str = '', suffix:str = ''
if iteration == total:
print()
-# Handle exit event
class ExitSignalHandler():
-
+ """Handle exit event"""
def __init__(self):
import signal
@@ -46,20 +44,22 @@ class ExitSignalHandler():
def status(self):
return __exit_event.is_set()
-# Assess temporal performance
class TimeProbe():
+ """Assess temporal performance"""
def __init__(self):
self.start()
def start(self):
+ """Start chronometer."""
self.__last_time = time.perf_counter()
self.__lap_counter = 0
self.__elapsed_time = 0
def lap(self):
+ """Get the last lap time, number of laps and total elapsed time in millisecond."""
lap_time = time.perf_counter() - self.__last_time
@@ -67,13 +67,14 @@ class TimeProbe():
self.__lap_counter += 1
self.__elapsed_time += lap_time
- return lap_time, self.__lap_counter, self.__elapsed_time
+ return lap_time * 1e3, self.__lap_counter, self.__elapsed_time * 1e3
def end(self):
+ """Stop chronometer and get elapsed time in millisecond."""
self.__elapsed_time += time.perf_counter() - self.__last_time
- return self.__elapsed_time
+ return self.__elapsed_time * 1e3, self.__lap_counter
def restart(self):
diff --git a/src/argaze/utils/tobii_stream_aruco_aoi_display.py b/src/argaze/utils/tobii_stream_aruco_aoi_display.py
index d619faa..5529052 100644
--- a/src/argaze/utils/tobii_stream_aruco_aoi_display.py
+++ b/src/argaze/utils/tobii_stream_aruco_aoi_display.py
@@ -241,7 +241,7 @@ def main():
# Update fps each 10 loops
if loop_counter >= 10:
- fps = loop_counter / elapsed_time
+ fps = 1e3 * loop_counter / elapsed_time
loop_chrono.restart()
# Draw focus area
diff --git a/src/argaze/utils/tobii_stream_display.py b/src/argaze/utils/tobii_stream_display.py
index 8dd2341..8c67cee 100644
--- a/src/argaze/utils/tobii_stream_display.py
+++ b/src/argaze/utils/tobii_stream_display.py
@@ -23,109 +23,109 @@ def main():
# Create tobii controller
tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf')
- # Enable tobii data stream
+ # Enable tobii data stream
tobii_data_stream = tobii_controller.enable_data_stream()
# Enable tobii video stream
tobii_video_stream = tobii_controller.enable_video_stream()
- # Prepare to timestamped gaze position data stream bufferring
- tobii_ts_gaze_positions = DataStructures.TimeStampedBuffer()
+ # Init head movement
+ head_movement_px = numpy.array((0, 0))
- # Prepare to timestamped gaze position 3d data stream bufferring
- tobii_ts_gaze_positions_3d = DataStructures.TimeStampedBuffer()
+ # Init gaze position
+ gaze_position_px = GazeFeatures.GazePosition((0, 0))
- # Prepare to timestamped head rotations data stream bufferring
- tobii_ts_head_rotations = DataStructures.TimeStampedBuffer()
-
- # Start streaming
- tobii_controller.start_streaming()
-
- # Live video and data stream capture loop
- try:
-
- # Assess loop performance
- loop_chrono = MiscFeatures.TimeProbe()
- fps = 0
-
- while tobii_video_stream.is_alive():
-
- # Read video stream
- video_ts, video_frame = tobii_video_stream.read()
- video_ts_ms = video_ts / 1e3
-
- # Read data stream
- data_ts, data_stream = tobii_data_stream.read()
- data_ts_ms = data_ts / 1e3
-
- try:
-
- # Buffer last received gaze positions
- tobii_ts_gaze_positions.append(data_stream['GazePosition'])
-
- # Buffer last received gaze positions 3d
- tobii_ts_gaze_positions_3d.append(data_stream['GazePosition3D'])
+ # Init data timestamped in millisecond
+ data_ts_ms = 0
+
+ # Assess temporal performance
+ loop_chrono = MiscFeatures.TimeProbe()
+ gyroscope_chrono = MiscFeatures.TimeProbe()
+ gaze_chrono = MiscFeatures.TimeProbe()
+
+ loop_ps = 0
+ gyroscope_ps = 0
+ gaze_ps = 0
+
+ def data_stream_callback(data_ts, data_object, data_object_type):
- # Buffer last received gaze positions 3d
- tobii_ts_head_rotations.append(data_stream['Gyroscope'])
+ nonlocal head_movement_px
+ nonlocal gaze_position_px
+ nonlocal data_ts_ms
+ nonlocal gyroscope_chrono
+ nonlocal gaze_chrono
- # Ignore missing data stream
- except KeyError as e:
- pass
+ data_ts_ms = data_ts / 1e3
- try:
+ match data_object_type:
- # Get nearest head rotation before video timestamp and remove all head rotations before
- earliest_ts, earliest_head_rotation = tobii_ts_head_rotations.pop_last()
+ case 'Gyroscope':
+
+ # Assess gyroscope stream performance
+ gyroscope_chrono.lap()
# Calculate head movement considering only head yaw and pitch
- head_movement = numpy.array(earliest_head_rotation.value)
+ head_movement = numpy.array(data_object.value)
head_movement_px = head_movement.astype(int)
- head_movement_norm = numpy.linalg.norm(head_movement[0:2])
-
- # Draw movement vector
- cv.line(video_frame.matrix, (int(video_frame.width/2), int(video_frame.height/2)), (int(video_frame.width/2) + head_movement_px[1], int(video_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
-
- # Wait for head rotation
- except KeyError:
- pass
- try:
-
- # Get nearest gaze position before video timestamp and remove all gaze positions before
- _, earliest_gaze_position = tobii_ts_gaze_positions.pop_last()
+ case 'GazePosition':
+
+ # Assess gaze position stream performance
+ gaze_chrono.lap()
# Ignore frame when gaze position is not valid
- if earliest_gaze_position.validity == 0:
+ if data_object.validity == 0:
- gaze_position_pixel = GazeFeatures.GazePosition( (int(earliest_gaze_position.value[0] * video_frame.width), int(earliest_gaze_position.value[1] * video_frame.height)) )
+ gaze_position_px = GazeFeatures.GazePosition( (int(data_object.value[0] * video_frame.width), int(data_object.value[1] * video_frame.height)) )
+
+ case 'GazePosition3D':
+
+ # Ignore frame when gaze position 3D is not valid
+ if data_object.validity == 0:
- # Get nearest gaze position 3D before video timestamp and remove all gaze positions before
- _, earliest_gaze_position_3d = tobii_ts_gaze_positions_3d.pop_last()
+ gaze_accuracy_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.ACCURACY)) * data_object.value[2]
+ tobii_camera_hfov_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.CAMERA_HFOV / 2)) * data_object.value[2]
- # Ignore frame when gaze position 3D is not valid
- if earliest_gaze_position_3d.validity == 0:
-
- gaze_accuracy_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.ACCURACY)) * earliest_gaze_position_3d.value[2]
- tobii_camera_hfov_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.CAMERA_HFOV / 2)) * earliest_gaze_position_3d.value[2]
+ gaze_position_px.accuracy = round(video_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
- gaze_position_pixel.accuracy = round(video_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
-
- # Draw gaze
- gaze_position_pixel.draw(video_frame.matrix)
+ tobii_data_stream.reading_callback = data_stream_callback
+
+ # Start streaming
+ tobii_controller.start_streaming()
+
+ # Live video stream capture loop
+ try:
+
+ while tobii_video_stream.is_alive():
+
+ # Read video stream
+ video_ts, video_frame = tobii_video_stream.read()
+ video_ts_ms = video_ts / 1e3
- # Wait for gaze position
- except KeyError:
- pass
-
# Assess loop performance
- loop_time, loop_counter, elapsed_time = loop_chrono.lap()
+ lap_time, lap_counter, elapsed_time = loop_chrono.lap()
# Update fps each 10 loops
- if loop_counter >= 10:
+ if lap_counter >= 10:
- fps = loop_counter / elapsed_time
+ loop_ps = 1e3 * lap_counter / elapsed_time
loop_chrono.restart()
+
+ # Assess gyroscope streaming performance
+ elapsed_time, lap_counter = gyroscope_chrono.end()
+ gyroscope_ps = 1e3 * lap_counter / elapsed_time
+ gyroscope_chrono.restart()
+
+ # Assess gaze streaming performance
+ elapsed_time, lap_counter = gaze_chrono.end()
+ gaze_ps = 1e3 * lap_counter / elapsed_time
+ gaze_chrono.restart()
+
+ # Draw head movement
+ cv.line(video_frame.matrix, (int(video_frame.width/2), int(video_frame.height/2)), (int(video_frame.width/2) + head_movement_px[1], int(video_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
+
+ # Draw gaze
+ gaze_position_px.draw(video_frame.matrix)
# Draw center
cv.line(video_frame.matrix, (int(video_frame.width/2) - 50, int(video_frame.height/2)), (int(video_frame.width/2) + 50, int(video_frame.height/2)), (255, 150, 150), 1)
@@ -135,14 +135,18 @@ def main():
cv.rectangle(video_frame.matrix, (0, 0), (1100, 50), (63, 63, 63), -1)
cv.putText(video_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
cv.putText(video_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- cv.putText(video_frame.matrix, f'Fps: {int(fps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.putText(video_frame.matrix, f'Fps: {int(loop_ps)}', (950, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.rectangle(video_frame.matrix, (0, 50), (580, 100), (127, 127, 127), -1)
+ cv.putText(video_frame.matrix, f'Gyroscope fps: {int(gyroscope_ps)}', (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+ cv.putText(video_frame.matrix, f'Gaze fps: {int(gaze_ps)}', (350, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
+
+ cv.imshow(f'Video and data stream', video_frame.matrix)
+
# Close window using 'Esc' key
if cv.waitKey(1) == 27:
break
- cv.imshow(f'Video and data stream', video_frame.matrix)
-
# Exit on 'ctrl+C' interruption
except KeyboardInterrupt:
pass