aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2024-04-02 17:48:47 +0200
committerThéo de la Hogue2024-04-02 17:48:47 +0200
commit9a86189fbc3df8f8258cdc7ea8862fe70911f5f8 (patch)
treef373551a638a078a0c338217eb205e2029340aba
parentd05ad01b0d4960297e59e177e6ecb078f25eec40 (diff)
downloadargaze-9a86189fbc3df8f8258cdc7ea8862fe70911f5f8.zip
argaze-9a86189fbc3df8f8258cdc7ea8862fe70911f5f8.tar.gz
argaze-9a86189fbc3df8f8258cdc7ea8862fe70911f5f8.tar.bz2
argaze-9a86189fbc3df8f8258cdc7ea8862fe70911f5f8.tar.xz
Refactoring image and drawing features.
-rw-r--r--src/argaze/ArFeatures.py141
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoCamera.py42
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py2
-rw-r--r--src/argaze/DataFeatures.py79
-rw-r--r--src/argaze/utils/UtilsFeatures.py51
-rw-r--r--src/argaze/utils/contexts/TobiiProGlasses2.py25
6 files changed, 168 insertions, 172 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 56ea41d..da39c0b 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -116,8 +116,8 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
self.__looked_aoi_name = None
self.__aoi_scan_path_analyzed = False
- # Init protected attributes
- self._draw_parameters = DEFAULT_ARLAYER_DRAW_PARAMETERS
+ # Init pipeline step object attributes
+ self.draw_parameters = DEFAULT_ARLAYER_DRAW_PARAMETERS
@property
def aoi_scene(self) -> AOIFeatures.AOIScene:
@@ -265,17 +265,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
for analyzer in self.__aoi_scan_path_analyzers:
analyzer.parent = self
-
- @property
- def draw_parameters(self) -> dict:
- """Default draw method parameters dictionary."""
- return self._draw_parameters
-
- @draw_parameters.setter
- @DataFeatures.PipelineStepAttributeSetter
- def draw_parameters(self, draw_parameters: dict):
-
- self._draw_parameters = draw_parameters
def last_looked_aoi_name(self) -> str:
"""Get last looked aoi name."""
@@ -398,6 +387,7 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
self.__aoi_scan_path.append_saccade(gaze_movement)
+ @DataFeatures.PipelineStepDraw
def draw(self, image: numpy.array, draw_aoi_scene: dict = None, draw_aoi_matching: dict = None):
"""
Draw into image.
@@ -407,11 +397,6 @@ class ArLayer(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
draw_aoi_matching: AOIMatcher.draw parameters (which depends of the loaded aoi matcher module, if None, no aoi matching is drawn)
"""
- # Use draw_parameters attribute if no parameters
- if draw_aoi_scene is None and draw_aoi_matching is None:
-
- return self.draw(image, **self._draw_parameters)
-
# Use layer lock feature
with self._lock:
@@ -670,17 +655,6 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
for name, layer in self._layers.items():
layer.parent = self
-
- @property
- def image_parameters(self) -> dict:
- """Default image method parameters dictionary."""
- return self._image_parameters
-
- @image_parameters.setter
- @DataFeatures.PipelineStepAttributeSetter
- def image_parameters(self, image_parameters: dict):
-
- self._image_parameters = image_parameters
def last_gaze_position(self) -> object:
"""Get last calibrated gaze position"""
@@ -825,7 +799,8 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
layer.look(self.__identified_gaze_movement)
- def __image(self, background_weight: float = None, heatmap_weight: float = None, draw_gaze_position_calibrator: dict = None, draw_scan_path: dict = None, draw_layers: dict = None, draw_gaze_positions: dict = None, draw_fixations: dict = None, draw_saccades: dict = None) -> numpy.array:
+ @DataFeatures.PipelineStepImage
+ def image(self, background_weight: float = None, heatmap_weight: float = None, draw_gaze_position_calibrator: dict = None, draw_scan_path: dict = None, draw_layers: dict = None, draw_gaze_positions: dict = None, draw_fixations: dict = None, draw_saccades: dict = None) -> numpy.array:
"""
Get background image with overlaid visualisations.
@@ -840,11 +815,11 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
draw_saccades: [GazeFeatures.Saccade.draw](argaze.md/#argaze.GazeFeatures.Saccade.draw) parameters (if None, no saccade is drawn)
"""
+ logging.debug('ArFrame.image %s', self.name)
+
# Use frame lock feature
with self._lock:
- logging.debug('ArFrame.__image %s', self.name)
-
# Draw background only
if background_weight is not None and (heatmap_weight is None or self.__heatmap is None):
@@ -929,24 +904,10 @@ class ArFrame(DataFeatures.SharedObject, DataFeatures.PipelineStepObject):
self.__calibrated_gaze_position.draw(image, **draw_gaze_positions)
- logging.debug('\t> returning image (%i x %i)', *image.shape[:2])
+ logging.debug('\t> returning image (%i x %i)', image.shape[1], image.shape[0])
return DataFeatures.TimestampedImage(image, timestamp = self.__background.timestamp)
- def image(self, **kwargs: dict) -> numpy.array:
- """
- Get frame image.
-
- Parameters:
- kwargs: ArFrame.__image parameters
- """
- # Use image_parameters attribute if no kwargs
- if kwargs:
-
- return self.__image(**kwargs)
-
- return self.__image(**self._image_parameters)
-
class ArScene(DataFeatures.PipelineStepObject):
"""
Define abstract Augmented Reality scene with ArLayers and ArFrames inside.
@@ -1152,16 +1113,6 @@ class ArScene(DataFeatures.PipelineStepObject):
# Project layer aoi scene
yield name, aoi_scene_copy.project(tvec, rvec, self.parent.aruco_detector.optic_parameters.K)
- def draw(self, image: numpy.array, **kwargs: dict):
- """
- Draw scene into image.
-
- Parameters:
- image: where to draw
- """
-
- raise NotImplementedError('draw() method not implemented')
-
class ArCamera(ArFrame):
"""
Define abstract Augmented Reality camera as ArFrame with ArScenes inside.
@@ -1417,14 +1368,13 @@ class ArCamera(ArFrame):
width, height = frame.size
destination = numpy.float32([[0, 0], [width, 0], [width, height], [0, height]])
mapping = cv2.getPerspectiveTransform(aoi_2d.astype(numpy.float32), destination)
- frame.background = cv2.warpPerspective(self.background, mapping, (width, height))
+ frame.background = DataFeatures.TimestampedImage( cv2.warpPerspective(self.background, mapping, (width, height)), timestamp = self.background.timestamp)
# Ignore missing frame projection
except KeyError:
pass
-
# Define default ArContext image parameters
DEFAULT_ARCONTEXT_IMAGE_PARAMETERS = {
"draw_times": True,
@@ -1441,12 +1391,16 @@ class ArContext(DataFeatures.PipelineStepObject):
# Init private attributes
self.__pipeline = None
- self.__debug = False
+ self.__catch_exceptions = True
self.__exceptions = DataFeatures.TimestampedExceptions()
# Init gaze position processing assement
self.__process_gaze_position_chrono = UtilsFeatures.TimeProbe()
self.__process_gaze_position_frequency = 0
+
+ # Init camera image processing assement
+ self.__process_camera_image_chrono = UtilsFeatures.TimeProbe()
+ self.__process_camera_image_frequency = 0
# Init protected attributes
self._image_parameters = DEFAULT_ARCONTEXT_IMAGE_PARAMETERS
@@ -1465,25 +1419,14 @@ class ArContext(DataFeatures.PipelineStepObject):
self.__pipeline = pipeline
@property
- def debug(self) -> bool:
- """Disable pipeline exception catching to make it crash instead."""
- return self.__debug
-
- @debug.setter
- def debug(self, debug: bool):
-
- self.__debug = debug
-
- @property
- def image_parameters(self) -> dict:
- """Default image method parameters dictionary."""
- return self._image_parameters
+ def catch_exceptions(self) -> bool:
+ """Catch pipeline exception to display them instead of crashing execution."""
+ return self.__catch_exceptions
- @image_parameters.setter
- @DataFeatures.PipelineStepAttributeSetter
- def image_parameters(self, image_parameters: dict):
+ @catch_exceptions.setter
+ def catch_exceptions(self, catch_exceptions: bool):
- self._image_parameters = image_parameters
+ self.__catch_exceptions = catch_exceptions
def exceptions(self) -> DataFeatures.TimestampedException:
"""Get exceptions list"""
@@ -1495,6 +1438,7 @@ class ArContext(DataFeatures.PipelineStepObject):
return {
**DataFeatures.PipelineStepObject.as_dict(self),
"pipeline": self.__pipeline,
+ "catch_exceptions": self.__catch_exceptions,
"image_parameters": self._image_parameters
}
@@ -1503,6 +1447,7 @@ class ArContext(DataFeatures.PipelineStepObject):
"""Enter into ArContext."""
self.__process_gaze_position_chrono.start()
+ self.__process_camera_image_chrono.start()
return self
@@ -1514,7 +1459,7 @@ class ArContext(DataFeatures.PipelineStepObject):
def _process_gaze_position(self, timestamp: int|float, x: int|float = None, y: int|float = None, precision: int|float = None):
"""Request pipeline to process new gaze position at a timestamp."""
- logging.debug('%s._process_gaze_position', type(self).__name__)
+ logging.debug('ArContext._process_gaze_position %s', self.name)
# Assess gaze position processing frequency
lap_time, nb_laps, elapsed_time = self.__process_gaze_position_chrono.lap()
@@ -1531,12 +1476,12 @@ class ArContext(DataFeatures.PipelineStepObject):
if x is None and y is None:
# Edit empty gaze position
- self.__pipeline.look( GazeFeatures.GazePosition( timestamp = timestamp), debug = self.__debug )
+ self.__pipeline.look( GazeFeatures.GazePosition( timestamp = timestamp), catch_exceptions = self.__catch_exceptions )
else:
# Edit gaze position
- self.__pipeline.look( GazeFeatures.GazePosition( (x, y), precision = precision, timestamp = timestamp), debug = self.__debug)
+ self.__pipeline.look( GazeFeatures.GazePosition( (x, y), precision = precision, timestamp = timestamp), catch_exceptions = self.__catch_exceptions)
except DataFeatures.TimestampedException as e:
@@ -1549,7 +1494,15 @@ class ArContext(DataFeatures.PipelineStepObject):
def _process_camera_image(self, timestamp: int|float, image: numpy.array):
"""Request pipeline to process new camera image at a timestamp."""
- logging.debug('%s._process_camera_image', type(self).__name__)
+ logging.debug('ArContext._process_camera_image %s', self.name)
+
+ # Assess camera image processing frequency
+ lap_time, nb_laps, elapsed_time = self.__process_camera_image_chrono.lap()
+
+ if elapsed_time > 1e3:
+
+ self.__process_camera_image_frequency = nb_laps
+ self.__process_camera_image_chrono.restart()
if issubclass(type(self.__pipeline), ArCamera):
@@ -1565,7 +1518,10 @@ class ArContext(DataFeatures.PipelineStepObject):
logging.debug('\t> watch image (%i x %i)', width, height)
- self.__pipeline.watch( DataFeatures.TimestampedImage(image, timestamp = timestamp), debug = self.__debug )
+ self.__pipeline.watch( DataFeatures.TimestampedImage(image, timestamp = timestamp), catch_exceptions = self.__catch_exceptions )
+
+ # TODO: make this step optional
+ self.__pipeline.map(timestamp = timestamp, catch_exceptions = self.__catch_exceptions)
except DataFeatures.TimestampedException as e:
@@ -1577,14 +1533,15 @@ class ArContext(DataFeatures.PipelineStepObject):
raise(TypeError('Pipeline is not ArCamera instance.'))
- def __image(self, draw_times: bool, draw_exceptions: bool):
+ @DataFeatures.PipelineStepImage
+ def image(self, draw_times: bool = None, draw_exceptions: bool = None):
"""
Get pipeline image with execution informations.
Parameters:
draw_exceptions: ...
"""
- logging.debug('%s.__image', type(self).__name__)
+ logging.debug('ArContext.image %s', self.name)
image = self.__pipeline.image()
height, width, _ = image.shape
@@ -1609,7 +1566,7 @@ class ArContext(DataFeatures.PipelineStepObject):
watch_time = math.nan
info_stack += 1
- cv2.putText(image, f'Watch {watch_time}ms', (20, info_stack * 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(image, f'Watch {watch_time}ms at {self.__process_camera_image_frequency}Hz', (20, info_stack * 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
if issubclass(type(self.__pipeline), ArFrame):
@@ -1636,17 +1593,3 @@ class ArContext(DataFeatures.PipelineStepObject):
cv2.putText(image, f'error: {e}', (20, height-(i+1)*50+25), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
return image
-
- def image(self, **kwargs: dict) -> numpy.array:
- """
- Get pipeline image.
-
- Parameters:
- kwargs: ArContext.__image parameters
- """
- # Use image_parameters attribute if no kwargs
- if kwargs:
-
- return self.__image(**kwargs)
-
- return self.__image(**self._image_parameters)
diff --git a/src/argaze/ArUcoMarkers/ArUcoCamera.py b/src/argaze/ArUcoMarkers/ArUcoCamera.py
index f32e4b6..93a0b8f 100644
--- a/src/argaze/ArUcoMarkers/ArUcoCamera.py
+++ b/src/argaze/ArUcoMarkers/ArUcoCamera.py
@@ -54,6 +54,8 @@ class ArUcoCamera(ArFeatures.ArCamera):
# Init private attribute
self.__aruco_detector = None
self.__sides_mask = 0
+
+ # Init protected attributes
self._image_parameters = {**ArFeatures.DEFAULT_ARFRAME_IMAGE_PARAMETERS, **DEFAULT_ARUCOCAMERA_IMAGE_PARAMETERS}
@property
@@ -115,20 +117,6 @@ class ArUcoCamera(ArFeatures.ArCamera):
# Update expected and excluded aoi
self._update_expected_and_excluded_aoi()
- @ArFeatures.ArCamera.image_parameters.setter
- @DataFeatures.PipelineStepAttributeSetter
- def image_parameters(self, image_parameters: dict):
-
- self._image_parameters = image_parameters
-
- if 'draw_layers' not in self._image_parameters:
-
- self._image_parameters['draw_layers'] = {}
-
- for layer_name in self.layers.keys():
-
- self._image_parameters['draw_layers'][layer_name] = ArFeatures.DEFAULT_ARLAYER_DRAW_PARAMETERS
-
@DataFeatures.PipelineStepMethod
def watch(self, image: DataFeatures.TimestampedImage):
"""Detect environment aruco markers from image and project scenes into camera frame."""
@@ -206,7 +194,8 @@ class ArUcoCamera(ArFeatures.ArCamera):
pass
- def __image(self, draw_detected_markers: dict = None, draw_scenes: dict = None, draw_optic_parameters_grid: dict = None, **kwargs: dict) -> numpy.array:
+ @DataFeatures.PipelineStepImage
+ def image(self, draw_detected_markers: dict = None, draw_scenes: dict = None, draw_optic_parameters_grid: dict = None, **kwargs: dict) -> numpy.array:
"""Get frame image with ArUco detection visualisation.
Parameters:
@@ -216,6 +205,8 @@ class ArUcoCamera(ArFeatures.ArCamera):
kwargs: ArCamera.image parameters
"""
+ logging.debug('ArUcoCamera.image %s', self.name)
+
# Get camera frame image
# Note: don't lock/unlock camera frame here as super().image manage it.
image = super().image(**kwargs)
@@ -226,6 +217,8 @@ class ArUcoCamera(ArFeatures.ArCamera):
# Draw optic parameters grid if required
if draw_optic_parameters_grid is not None:
+ logging.debug('\t> drawing optic parameters')
+
self.__aruco_detector.optic_parameters.draw(image, **draw_optic_parameters_grid)
# Draw scenes if required
@@ -233,26 +226,15 @@ class ArUcoCamera(ArFeatures.ArCamera):
for scene_name, draw_scenes_parameters in draw_scenes.items():
+ logging.debug('\t> drawing %s scene', scene_name)
+
self.scenes[scene_name].draw(image, **draw_scenes_parameters)
# Draw detected markers if required
if draw_detected_markers is not None:
+ logging.debug('\t> drawing detected markers')
+
self.__aruco_detector.draw_detected_markers(image, draw_detected_markers)
return image
-
- def image(self, **kwargs: dict) -> numpy.array:
- """
- Get frame image.
-
- Parameters:
- kwargs: ArUcoCamera.__image parameters
- """
-
- # Use image_parameters attribute if no kwargs
- if kwargs:
-
- return self.__image(**kwargs)
-
- return self.__image(**self._image_parameters)
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index 680397b..6c5f589 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -636,7 +636,7 @@ class Heatmap(DataFeatures.PipelineStepObject):
gray = (255 * self.__point_spread_sum / numpy.max(self.__point_spread_sum)).astype(numpy.uint8)
self.__image = cv2.applyColorMap(gray, cv2.COLORMAP_JET)
- @property
+ @DataFeatures.PipelineStepImage
def image(self):
"""Get heatmap image."""
diff --git a/src/argaze/DataFeatures.py b/src/argaze/DataFeatures.py
index 032e7e5..703c521 100644
--- a/src/argaze/DataFeatures.py
+++ b/src/argaze/DataFeatures.py
@@ -628,12 +628,12 @@ def PipelineStepInit(method):
kwargs: any arguments defined by PipelineStepMethodInit.
"""
- # Init class attributes
- method(self, **kwargs)
-
# Init pipeline step object attributes
PipelineStepObject.__init__(self)
+ # Init class attributes
+ method(self, **kwargs)
+
# Update all attributes
self.update_attributes(kwargs)
@@ -747,6 +747,46 @@ def PipelineStepAttributeSetter(method):
return wrapper
+def PipelineStepImage(method):
+ """Define a decorator use into PipelineStepObject class to wrap pipeline step image method."""
+
+ def wrapper(self, **kwargs) -> numpy.array:
+ """Wrap pipeline step image method."""
+
+ if kwargs:
+
+ logging.debug('\t> using kwargs')
+
+ return method(self, **kwargs)
+
+ else:
+
+ logging.debug('\t> using image_parameters')
+
+ return method(self, **self.image_parameters)
+
+ return wrapper
+
+def PipelineStepDraw(method):
+ """Define a decorator use into PipelineStepObject class to wrap pipeline step draw method."""
+
+ def wrapper(self, image: numpy.array, **kwargs):
+ """Wrap pipeline step draw method."""
+
+ if kwargs:
+
+ logging.debug('\t> using kwargs')
+
+ method(self, image, **kwargs)
+
+ else:
+
+ logging.debug('\t> using draw_parameters')
+
+ method(self, image, **self.draw_parameters)
+
+ return wrapper
+
class PipelineStepObject():
"""
Define class to assess pipeline step methods execution time and observe them.
@@ -761,7 +801,12 @@ class PipelineStepObject():
self.__name = None
self.__observers = []
self.__execution_times = {}
+ self.__image_parameters = {}
+ # Init protected attributes
+ self._image_parameters = {}
+ self._draw_parameters = {}
+
# Parent attribute will be setup later by parent it self
self.__parent = None
@@ -845,6 +890,28 @@ class PipelineStepObject():
"""Get pipeline step object observers execution times dictionary."""
return self.__execution_times
+ @property
+ def image_parameters(self) -> dict:
+ """image method parameters dictionary."""
+ return self._image_parameters
+
+ @image_parameters.setter
+ @PipelineStepAttributeSetter
+ def image_parameters(self, image_parameters: dict):
+
+ self._image_parameters = image_parameters
+
+ @property
+ def draw_parameters(self) -> dict:
+ """draw method parameters dictionary."""
+ return self._draw_parameters
+
+ @draw_parameters.setter
+ @PipelineStepAttributeSetter
+ def draw_parameters(self, draw_parameters: dict):
+
+ self._draw_parameters = draw_parameters
+
def as_dict(self) -> dict:
"""Export PipelineStepObject attributes as dictionary.
@@ -999,14 +1066,14 @@ def PipelineStepMethod(method):
PipelineStepMethod must have a timestamp as first argument.
"""
- def wrapper(self, *args, timestamp: int|float = None, unwrap: bool = False, debug: bool = False, **kwargs):
+ def wrapper(self, *args, timestamp: int|float = None, unwrap: bool = False, catch_exceptions: bool = True, **kwargs):
"""Wrap pipeline step method to measure execution time.
Parameters:
args: any arguments defined by PipelineStepMethod.
timestamp: optional method call timestamp (unit does'nt matter) if first args parameter is not a TimestampedObject instance.
unwrap: extra arguments used in wrapper function to call wrapped method directly.
- debug: extra arguments used in wrapper function to not catch exception.
+ catch_exceptions: extra arguments used in wrapper function to catch exception.
"""
if timestamp is None and len(args) > 0:
@@ -1027,7 +1094,7 @@ def PipelineStepMethod(method):
exception = None
result = None
- if debug:
+ if not catch_exceptions:
# Execute wrapped method without catching exceptions
result = method(self, *args, **kwargs)
diff --git a/src/argaze/utils/UtilsFeatures.py b/src/argaze/utils/UtilsFeatures.py
index f7d74c7..05821b1 100644
--- a/src/argaze/utils/UtilsFeatures.py
+++ b/src/argaze/utils/UtilsFeatures.py
@@ -249,7 +249,7 @@ class FileWriter(DataFeatures.PipelineStepObject):
# Write into file
print(log, file=self.__file, flush=True)
-class VideoWriter(DataFeatures.PipelineStepObject):
+class VideoWriter(DataFeatures.PipelineStepObject, DataFeatures.SharedObject):
"""Open ffmpeg application as sub-process.
FFmpeg input PIPE: RAW images in BGR color format
FFmpeg output MP4 file encoded with HEVC codec.
@@ -269,6 +269,9 @@ class VideoWriter(DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepInit
def __init__(self, **kwargs):
+
+ # Init parent classes
+ DataFeatures.SharedObject.__init__(self)
# Init private attributes
self.__path = None
@@ -319,36 +322,48 @@ class VideoWriter(DataFeatures.PipelineStepObject):
@DataFeatures.PipelineStepEnter
def __enter__(self):
"""Check that folder structure exist then, open ffmpeg subprocess."""
+
+ # Use lock feature
+ with self._lock:
- import subprocess as sp
- import shlex
+ import subprocess as sp
+ import shlex
- if not os.path.exists(self.__path.parent.absolute()):
-
- os.makedirs(self.__path.parent.absolute())
+ if not os.path.exists(self.__path.parent.absolute()):
+
+ os.makedirs(self.__path.parent.absolute())
- self.__process = sp.Popen(shlex.split(f'ffmpeg -hide_banner -loglevel error -y -s {self.__width}x{self.__height} -pixel_format bgr24 -f rawvideo -r {self.__fps} -i pipe: -vcodec libx265 -x265-params log-level=error -pix_fmt yuv420p -crf 24 {self.__path}'), stdin=sp.PIPE)
-
+ self.__process = sp.Popen(shlex.split(f'ffmpeg -hide_banner -loglevel error -y -s {self.__width}x{self.__height} -pixel_format bgr24 -f rawvideo -r {self.__fps} -i pipe: -vcodec libx265 -x265-params log-level=error -pix_fmt yuv420p -crf 24 {self.__path}'), stdin=sp.PIPE)
+
@DataFeatures.PipelineStepExit
def __exit__(self, exception_type, exception_value, exception_traceback):
- # Close and flush stdin
- self.__process.stdin.close()
+ # Use lock feature
+ with self._lock:
+
+ # Close and flush stdin
+ self.__process.stdin.close()
- # Wait for sub-process to finish
- self.__process.wait()
+ # Wait for sub-process to finish
+ self.__process.wait()
- # Terminate the sub-process
- # Note: We don't have to terminate the sub-process (after process.wait(), the sub-process is supposed to be closed).
- self.__process.terminate()
+ # Terminate the sub-process
+ # Note: We don't have to terminate the sub-process (after process.wait(), the sub-process is supposed to be closed).
+ self.__process.terminate()
def write(self, image: numpy.array):
"""Write raw video frame to input stream of ffmpeg sub-process."""
- # Resize image to adapt to video resolution
- output = cv2.resize(image, dsize=(self.__width, self.__height), interpolation=cv2.INTER_LINEAR)
+ # Use lock feature
+ with self._lock:
+
+ # Check if subprocess still alive
+ if self.__process.poll() is None:
+
+ # Resize image to adapt to video resolution
+ output = cv2.resize(image, dsize=(self.__width, self.__height), interpolation=cv2.INTER_LINEAR)
- self.__process.stdin.write(output.tobytes())
+ self.__process.stdin.write(output.tobytes())
def PrintCallStack(method):
"""Define a decorator to print call stack until the decorated method."""
diff --git a/src/argaze/utils/contexts/TobiiProGlasses2.py b/src/argaze/utils/contexts/TobiiProGlasses2.py
index 7830036..fc2e3f0 100644
--- a/src/argaze/utils/contexts/TobiiProGlasses2.py
+++ b/src/argaze/utils/contexts/TobiiProGlasses2.py
@@ -329,6 +329,7 @@ class LiveStream(ArFeatures.ArContext):
self.__parser = TobiiJsonDataParser()
+ # Init protected attributes
self._image_parameters = {**ArFeatures.DEFAULT_ARCONTEXT_IMAGE_PARAMETERS, **DEFAULT_TOBII_IMAGE_PARAMETERS}
@property
@@ -585,15 +586,17 @@ class LiveStream(ArFeatures.ArContext):
# Stop video streaming
threading.Thread.join(self.__video_thread)
- def __image(self, draw_something: bool, **kwargs: dict) -> numpy.array:
+ @DataFeatures.PipelineStepImage
+ def image(self, draw_something: bool = None) -> numpy.array:
"""Get Tobbi visualisation.
Parameters:
+ draw_something: example
kwargs: ArContext.image parameters
"""
# Get context image
- image = super().image(**kwargs)
+ image = super().image()
if draw_something:
@@ -601,21 +604,6 @@ class LiveStream(ArFeatures.ArContext):
return image
- def image(self, **kwargs: dict) -> numpy.array:
- """
- Get Tobbi visualisation.
-
- Parameters:
- kwargs: LiveStream.__image parameters
- """
-
- # Use image_parameters attribute if no kwargs
- if kwargs:
-
- return self.__image(**kwargs)
-
- return self.__image(**self._image_parameters)
-
def __make_socket(self):
"""Create a socket to enable network communication."""
@@ -1224,8 +1212,9 @@ class PostProcessing(ArFeatures.ArContext):
self.__data_list = []
+ # Init protected attributes
self._image_parameters = {**ArFeatures.DEFAULT_ARCONTEXT_IMAGE_PARAMETERS, **DEFAULT_TOBII_IMAGE_PARAMETERS}
-
+
@property
def segment(self) -> str:
"""Path to segment folder."""