aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/contexts/TobiiProGlasses2.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/argaze/utils/contexts/TobiiProGlasses2.py')
-rw-r--r--src/argaze/utils/contexts/TobiiProGlasses2.py142
1 files changed, 132 insertions, 10 deletions
diff --git a/src/argaze/utils/contexts/TobiiProGlasses2.py b/src/argaze/utils/contexts/TobiiProGlasses2.py
index 2f43bc5..92aba0f 100644
--- a/src/argaze/utils/contexts/TobiiProGlasses2.py
+++ b/src/argaze/utils/contexts/TobiiProGlasses2.py
@@ -631,7 +631,7 @@ class LiveStream(ArFeatures.ArContext):
iptype = socket.AF_INET6
res = socket.getaddrinfo(self.__address, self.__udpport, socket.AF_UNSPEC, socket.SOCK_DGRAM, 0,
- socket.AI_PASSIVE)
+ socket.AI_PASSIVE)
family, socktype, proto, canonname, sockaddr = res[0]
new_socket = socket.socket(family, socktype, proto)
@@ -947,8 +947,7 @@ class LiveStream(ArFeatures.ArContext):
if self.__calibration_id is not None:
- status = self.__wait_for_status('/api/calibrations/' + self.__calibration_id + '/status', 'ca_state',
- ['calibrating', 'calibrated', 'stale', 'uncalibrated', 'failed'])
+ status = self.__wait_for_status('/api/calibrations/' + self.__calibration_id + '/status', 'ca_state', ['calibrating', 'calibrated', 'stale', 'uncalibrated', 'failed'])
# Forget calibration id
if status != 'calibrating':
@@ -963,9 +962,8 @@ class LiveStream(ArFeatures.ArContext):
# RECORDING FEATURES
- def __wait_for_recording_status(self, recording_id,
- status_array=['init', 'starting', 'recording', 'pausing', 'paused', 'stopping',
- 'stopped', 'done', 'stale', 'failed']):
+ def __wait_for_recording_status(self, recording_id, status_array=['init', 'starting', 'recording', 'pausing', 'paused', 'stopping', 'stopped', 'done', 'stale', 'failed']):
+
return self.__wait_for_status('/api/recordings/' + recording_id + '/status', 'rec_state', status_array)
def create_recording(self, participant_name, recording_name='', recording_notes='') -> str:
@@ -1160,6 +1158,24 @@ class PostProcessing(ArFeatures.ArContext):
self.__data_list = []
+ # Initialize synchronisation
+ self.__sync_event = None
+ self.__sync_event_unit = None
+ self.__sync_event_factor = None
+ self.__sync_data_ts = None
+ self.__sync_ts = None
+ self.__last_sync_data_ts = None
+ self.__last_sync_ts = None
+
+ self.__time_unit_factor = {
+ "µs": 1e-3,
+ "ms": 1,
+ "s": 1e3
+ }
+
+ # Initialize inconsistent timestamp monitoring
+ self.__last_data_ts = None
+
# Init protected attributes
self._image_parameters = {**ArFeatures.DEFAULT_ARCONTEXT_IMAGE_PARAMETERS, **DEFAULT_TOBII_IMAGE_PARAMETERS}
@@ -1193,6 +1209,27 @@ class PostProcessing(ArFeatures.ArContext):
self.__end = end
+ @property
+ def sync_event(self) -> str:
+ """Optional event type dedicated to syncrhonize Tobii timestamps with external time source."""
+ return self.__sync_event
+
+ @sync_event.setter
+ def sync_event(self, sync_event: str):
+
+ self.__sync_event = sync_event
+
+ @property
+ def sync_event_unit(self) -> str:
+ """Define sync event unit for conversion purpose ('µs', 'ms' or 's')"""
+ return self.__sync_event_unit
+
+ @sync_event_unit.setter
+ def sync_event_unit(self, sync_event_unit: str):
+
+ self.__sync_event_unit = sync_event_unit
+ self.__sync_event_factor = self.__time_unit_factor.get(sync_event_unit)
+
@DataFeatures.PipelineStepEnter
def __enter__(self):
@@ -1222,6 +1259,9 @@ class PostProcessing(ArFeatures.ArContext):
# Create stop event
self.__stop_event = threading.Event()
+ # Create pause event
+ self.__pause_event = threading.Event()
+
# Open reading thread
self.__reading_thread = threading.Thread(target=self.__read)
@@ -1244,15 +1284,24 @@ class PostProcessing(ArFeatures.ArContext):
for video_ts, video_image, data_list in self:
+ # Check pause event (and stop event)
+ while self.__pause_event.is_set() and not self.__stop_event.is_set():
+
+ logging.debug('> reading is paused at %i', video_ts)
+
+ self._process_camera_image(timestamp=video_ts, image=video_image)
+
+ time.sleep(1)
+
+ # Check stop event
if self.__stop_event.is_set():
+
break
logging.debug('> read image at %i timestamp', video_ts)
# Process camera image
- self._process_camera_image(
- timestamp=video_ts,
- image=video_image)
+ self._process_camera_image(timestamp=video_ts, image=video_image)
height, width, _ = video_image.shape
@@ -1261,6 +1310,62 @@ class PostProcessing(ArFeatures.ArContext):
# Process data
for data_ts, data_object, data_object_type in data_list:
+ # Check sync event first if required
+ if self.__sync_event is not None:
+
+ if data_object_type == 'Event':
+
+ logging.debug('> reading %s event (%s) at %f ms', data_object.type, data_object.tag, data_ts)
+
+ if data_object.type == self.__sync_event:
+
+ # Store old sync data ts
+ if self.__last_sync_data_ts is None and self.__sync_data_ts is not None:
+
+ self.__last_sync_data_ts = self.__sync_data_ts
+
+ # Store old sync ts
+ if self.__last_sync_ts is None and self.__sync_ts is not None:
+
+ self.__last_sync_ts = self.__sync_ts
+
+ # Store sync event timestamp
+ self.__sync_data_ts = data_ts
+ self.__sync_ts = float(data_object.tag) * self.__sync_event_factor
+
+ # Monitor delay between data ts and sync ts
+ if self.__last_sync_data_ts is not None and self.__last_sync_ts is not None:
+
+ diff_data_ts = self.__sync_data_ts - self.__last_sync_data_ts
+ diff_sync_ts = (self.__sync_ts - self.__last_sync_ts)
+
+ # Correct sync ts
+ self.__sync_ts += diff_data_ts-diff_sync_ts
+
+ if abs(diff_data_ts-diff_sync_ts) > 0:
+
+ logging.info('Difference between data and sync event timestamps is %i ms', diff_data_ts-diff_sync_ts)
+
+ # Don't process gaze positions if sync is required but sync event not happened yet
+ if self.__sync_event is not None and self.__sync_ts is None:
+
+ continue
+
+ # Otherwise, synchronize timestamp with sync event
+ else:
+
+ data_ts = int(self.__sync_ts + data_ts - self.__sync_data_ts)
+
+ # Catch inconstistent timestamps
+ if self.__last_data_ts is not None:
+
+ if self.__data_ts - self.__last_data_ts <= 0:
+
+ logging.error('! %i gaze position more recent than the previous one', data_ts)
+
+ last_data_ts = data_ts
+
+ # Process gaze positions
match data_object_type:
case 'GazePosition':
@@ -1280,7 +1385,7 @@ class PostProcessing(ArFeatures.ArContext):
# Process empty gaze position
self._process_gaze_position(timestamp=data_ts)
-
+
def __iter__(self):
self.__data_file = gzip.open(os.path.join(self.__segment, TOBII_SEGMENT_DATA_FILENAME))
@@ -1366,3 +1471,20 @@ class PostProcessing(ArFeatures.ArContext):
# Return millisecond timestamp, data object and type
return ts * 1e-3, data_object, data_object_type
+
+ @DataFeatures.PipelineStepMethod
+ def pause(self):
+ """Pause pipeline processing."""
+
+ self.__pause_event.set()
+
+ def is_paused(self) -> bool:
+ """Is pipeline processing paused?"""
+
+ return self.__pause_event.is_set()
+
+ @DataFeatures.PipelineStepMethod
+ def resume(self):
+ """Resume pipeline processing."""
+
+ self.__pause_event.clear() \ No newline at end of file