diff options
-rw-r--r-- | src/argaze/ArFeatures.py | 29 | ||||
-rw-r--r-- | src/argaze/ArUcoMarkers/ArUcoCamera.py | 6 | ||||
-rw-r--r-- | src/argaze/DataFeatures.py | 37 | ||||
-rw-r--r-- | src/argaze/utils/eyetrackers/TobiiProGlasses2.py | 68 |
4 files changed, 98 insertions, 42 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py index b70cc40..8e9e63e 100644 --- a/src/argaze/ArFeatures.py +++ b/src/argaze/ArFeatures.py @@ -469,7 +469,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): self.__filter_in_progress_identification = True self.__scan_path = None self.__scan_path_analyzers = [] - self.__background = numpy.full((1, 1, 3), 127).astype(numpy.uint8) + self.__background = DataFeatures.TimestampedImage( numpy.full((1, 1, 3), 127).astype(numpy.uint8) ) self.__heatmap = None self.__calibrated_gaze_position = GazeFeatures.GazePosition() self.__identified_gaze_movement = GazeFeatures.GazeMovement() @@ -616,10 +616,18 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): @background.setter @DataFeatures.PipelineStepAttributeSetter - def background(self, background: numpy.array): + def background(self, background: DataFeatures.TimestampedImage): - # Resize image to frame size - self.__background = cv2.resize(background, dsize = self.size, interpolation = cv2.INTER_CUBIC) + assert(isinstance(background, DataFeatures.TimestampedImage)) + + if background.size != self.size: + + # Resize image to frame size + self.__background = DataFeatures.TimestampedImage( cv2.resize(background, dsize = self.size, interpolation = cv2.INTER_CUBIC), background.timestamp) + + else: + + self.__background = background @property def heatmap(self) -> AOIFeatures.Heatmap: @@ -886,7 +894,7 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject): self.__calibrated_gaze_position.draw(image, **draw_gaze_positions) - return image + return DataFeatures.TimestampedImage(image, timestamp = self.__background.timestamp) def image(self, **kwargs: dict) -> numpy.array: """ @@ -1363,7 +1371,7 @@ class ArContext(DataFeatures.PipelineStepObject): # Init private attributes self.__pipeline = None - self.__exceptions = DataFeatures.TimeStampedExceptions() + self.__exceptions = DataFeatures.TimestampedExceptions() # Init protected attributes self._image_parameters = DEFAULT_ARCONTEXT_IMAGE_PARAMETERS @@ -1450,7 +1458,7 @@ class ArContext(DataFeatures.PipelineStepObject): raise(TypeError('Pipeline is not ArFrame instance.')) - def _process_camera_image(self, timestamp: int|float, image: numpy.ndarray): + def _process_camera_image(self, timestamp: int|float, image: numpy.array): """Request pipeline to process new camera image at a timestamp.""" logging.debug('%s._process_camera_image', type(self).__name__) @@ -1460,7 +1468,7 @@ class ArContext(DataFeatures.PipelineStepObject): height, width, _ = image.shape # Compare image size with ArCamera frame size - if width != self.__pipeline.size[0] or height != self.__pipeline.size[1]: + if list(image.shape[0:2][::-1]) != self.__pipeline.size: logging.warning('%s._process_camera_image: image size (%i x %i) is different of ArCamera frame size (%i x %i)', type(self).__name__ , width, height, self.__pipeline.size[0], self.__pipeline.size[1]) return @@ -1469,7 +1477,7 @@ class ArContext(DataFeatures.PipelineStepObject): logging.debug('\t> watch image (%i x %i)', width, height) - self.__pipeline.watch( image, timestamp = timestamp) + self.__pipeline.watch( DataFeatures.TimestampedImage(image, timestamp = timestamp)) except DataFeatures.TimestampedException as e: @@ -1499,7 +1507,8 @@ class ArContext(DataFeatures.PipelineStepObject): if draw_times: - + info_stack += 1 + cv2.putText(image, f'{image.timestamp}ms', (20, info_stack * 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) if issubclass(type(self.__pipeline), ArCamera): diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py index 80e3f94..e84d71a 100644 --- a/src/argaze/ArUcoMarkers/ArUcoCamera.py +++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py @@ -129,7 +129,7 @@ class ArUcoCamera(ArFeatures.ArCamera): self._image_parameters['draw_layers'][layer_name] = ArFeatures.DEFAULT_ARLAYER_DRAW_PARAMETERS @DataFeatures.PipelineStepMethod - def watch(self, image: numpy.array): + def watch(self, image: DataFeatures.TimestampedImage): """Detect environment aruco markers from image and project scenes into camera frame.""" # Use camera frame locker feature @@ -144,9 +144,9 @@ class ArUcoCamera(ArFeatures.ArCamera): cv2.rectangle(image, (width - self.__sides_mask, 0), (width, height), (0, 0, 0), -1) # Detect aruco markers - self.__aruco_detector.detect_markers(image, timestamp=self.timestamp) + self.__aruco_detector.detect_markers(image) - # Fill camera frame background with image + # Fill camera frame background with timestamped image self.background = image # Clear former layers projection into camera frame diff --git a/src/argaze/DataFeatures.py b/src/argaze/DataFeatures.py index f573f1c..a8ede6f 100644 --- a/src/argaze/DataFeatures.py +++ b/src/argaze/DataFeatures.py @@ -572,7 +572,7 @@ class TimestampedException(Exception, TimestampedObject): Exception.__init__(self, exception) TimestampedObject.__init__(self, timestamp) -class TimeStampedExceptions(TimestampedObjectsList): +class TimestampedExceptions(TimestampedObjectsList): """Handle timestamped exceptions into a list.""" def __init__(self, exceptions: list = []): @@ -591,6 +591,33 @@ class PipelineStepLoadingFailed(Exception): super().__init__(message) +class TimestampedImage(numpy.ndarray, TimestampedObject): + """Wrap numpy.array to timestamp image.""" + + def __new__(cls, array: numpy.array, timestamp: int|float = math.nan): + + return numpy.ndarray.__new__(cls, array.shape, dtype = array.dtype, buffer = array) + + def __init__(self, array: numpy.array, timestamp: int|float = math.nan): + + TimestampedObject.__init__(self, timestamp) + + def __array_finalize__(self, obj): + + pass + + @property + def size(self) -> list: + """Return list with width and heigth.""" + return list(self.shape[0:2][::-1]) + +class TimestampedImages(TimestampedObjectsList): + """Handle timestamped images into a list.""" + + def __init__(self, images: list = []): + + TimestampedObjectsList.__init__(self, TimestampedImage, images) + def PipelineStepInit(method): """Define a decorator use into PipelineStepObject class to declare pipeline step init method.""" @@ -653,7 +680,7 @@ def PipelineStepAttributeSetter(method): # Load image from JPG and PNG formats if file_format == 'JPG' or file_format == 'PNG': - return method(self, cv2.imread(filepath)) + return method(self, TimestampedImage(cv2.imread(filepath))) # Load image from OBJ formats elif file_format == 'OBJ': @@ -954,10 +981,14 @@ def PipelineStepMethod(method): """ if timestamp is None and len(args) > 0: - if isinstance(args[0], TimestampedObject): + if issubclass(type(args[0]), TimestampedObject): timestamp = args[0].timestamp + else: + + logging.error('%s.%s: %s is not a TimestampedObject subclass. You must pass a timestamp argument.', type(self).__name__, method.__name__, type(args[0]).__name__) + if unwrap: return method(self, *args, **kwargs) diff --git a/src/argaze/utils/eyetrackers/TobiiProGlasses2.py b/src/argaze/utils/eyetrackers/TobiiProGlasses2.py index f46ddd8..7240714 100644 --- a/src/argaze/utils/eyetrackers/TobiiProGlasses2.py +++ b/src/argaze/utils/eyetrackers/TobiiProGlasses2.py @@ -168,8 +168,6 @@ class TobiiJsonDataParser(): def __init__(self): - self.__first_ts = 0 - self.__parse_data_map = { 'dir': self.__parse_dir_sig, 'pts': self.__parse_pts, @@ -207,12 +205,6 @@ class TobiiJsonDataParser(): data_object = self.__parse_data_map[first_key](status, json_data) data_object_type = type(data_object).__name__ - # Keep first timestamp to offset all timestamps - if self.__first_ts == 0: - self.__first_ts = data_ts - - data_ts -= self.__first_ts - return data_ts, data_object, data_object_type def __parse_pupill_or_gaze(self, status, json_data): @@ -496,9 +488,10 @@ class LiveStream(ArFeatures.ArContext): logging.info('%s: %s', key, str(value)) - # Store video stream dimension + # Store video stream info self.__video_width = configuration['sys_sc_width'] self.__video_height = configuration['sys_sc_height'] + self.__video_fps = configuration['sys_sc_fps'] # Bind to project if required if self.__project_name is not None: @@ -521,10 +514,6 @@ class LiveStream(ArFeatures.ArContext): # Create stop event self.__stop_event = threading.Event() - # Create a video buffer with a lock - self.__video_buffer = collections.OrderedDict() - self.__video_buffer_lock = threading.Lock() - # Open data stream self.__data_socket = self.__make_socket() self.__data_thread = threading.Thread(target = self.__stream_data) @@ -541,13 +530,6 @@ class LiveStream(ArFeatures.ArContext): logging.debug('> starting video thread...') self.__video_thread.start() - # Open video buffer reader - self.__video_buffer_read_thread = threading.Thread(target = self.__video_buffer_read) - self.__video_buffer_read_thread.daemon = False - - logging.debug('> starting video buffer reader thread...') - self.__video_buffer_read_thread.start() - # Keep connection alive self.__keep_alive_msg = "{\"type\": \"live.data.unicast\", \"key\": \""+ str(uuid.uuid4()) +"\", \"op\": \"start\"}" self.__keep_alive_thread = threading.Thread(target = self.__keep_alive) @@ -642,6 +624,9 @@ class LiveStream(ArFeatures.ArContext): logging.debug('%s.__stream_data', type(self).__name__) + # First timestamp to offset all timestamps + first_ts = 0 + while not self.__stop_event.is_set(): try: @@ -658,13 +643,20 @@ class LiveStream(ArFeatures.ArContext): # Parse json into timestamped data object data_ts, data_object, data_object_type = self.__parser.parse(data) + # Store first timestamp + if first_ts == 0: + + first_ts = data_ts + # Edit millisecond timestamp - timestamp = int(data_ts * 1e-3) + timestamp = int((data_ts - first_ts) * 1e-3) match data_object_type: case 'GazePosition': + logging.debug('> received %s at %i timestamp', data_object_type, timestamp) + # When gaze position is valid if data_object.validity == 0: @@ -684,9 +676,24 @@ class LiveStream(ArFeatures.ArContext): logging.debug('%s.__stream_video', type(self).__name__) + # Open video stream container = av.open(f'rtsp://{self.__address}:8554/live/scene', options={'rtsp_transport': 'tcp'}) self.__stream = container.streams.video[0] + # Create a video buffer with a lock + self.__video_buffer = collections.OrderedDict() + self.__video_buffer_lock = threading.Lock() + + # Open video buffer reader + self.__video_buffer_read_thread = threading.Thread(target = self.__video_buffer_read) + self.__video_buffer_read_thread.daemon = False + + logging.debug('> starting video buffer reader thread...') + self.__video_buffer_read_thread.start() + + # First timestamp to offset all timestamps + first_ts = 0 + for image in container.decode(self.__stream): logging.debug('> new image decoded') @@ -701,9 +708,15 @@ class LiveStream(ArFeatures.ArContext): if image.time is not None: - timestamp = int(image.time * 1e6) + # Store first timestamp + if first_ts == 0: - logging.debug('> store image at %f timestamp', timestamp) + first_ts = image.time + + # Edit millisecond timestamp + timestamp = int((image.time - first_ts) * 1e3) + + logging.debug('> store image at %i timestamp', timestamp) # Lock buffer access self.__video_buffer_lock.acquire() @@ -724,7 +737,8 @@ class LiveStream(ArFeatures.ArContext): # Can't read image while it is locked while self.__video_buffer_lock.locked(): - time.sleep(1e-6) + # Check 10 times per frame + time.sleep(1 / (10 * self.__video_fps)) # Lock buffer access self.__video_buffer_lock.acquire() @@ -732,12 +746,14 @@ class LiveStream(ArFeatures.ArContext): # Video buffer not empty if len(self.__video_buffer) > 0: + logging.debug('> %i images in buffer', len(self.__video_buffer)) + # Get last stored image try: timestamp, image = self.__video_buffer.popitem(last=True) - logging.debug('> read image at %f timestamp', timestamp) + logging.debug('> read image at %i timestamp', timestamp) if len(self.__video_buffer) > 0: @@ -750,7 +766,7 @@ class LiveStream(ArFeatures.ArContext): self._process_camera_image( timestamp = timestamp, image = image) - + except Exception as e: logging.warning('%s.__video_buffer_read: %s', type(self).__name__, e) |