aboutsummaryrefslogtreecommitdiff
path: root/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
diff options
context:
space:
mode:
authorThéo de la Hogue2024-07-09 13:42:21 +0200
committerThéo de la Hogue2024-07-09 13:42:21 +0200
commit94ccab6f91c00b1f669b09445bd5af8c32957e72 (patch)
treea910239e1892ae420ae1f33442d17454c6096902 /docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
parent2753c71f0121cd380b67b150e1ea296bd7e39600 (diff)
downloadargaze-94ccab6f91c00b1f669b09445bd5af8c32957e72.zip
argaze-94ccab6f91c00b1f669b09445bd5af8c32957e72.tar.gz
argaze-94ccab6f91c00b1f669b09445bd5af8c32957e72.tar.bz2
argaze-94ccab6f91c00b1f669b09445bd5af8c32957e72.tar.xz
Replacing processing word by capture or playback words.
Diffstat (limited to 'docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md')
-rw-r--r--docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md72
1 files changed, 36 insertions, 36 deletions
diff --git a/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md b/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
index c163696..0702c8e 100644
--- a/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
+++ b/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
@@ -3,27 +3,27 @@ Define a context class
The [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) class defines a generic base class interface to handle incoming eye tracker data before to pass them to a processing pipeline according to [Python context manager feature](https://docs.python.org/3/reference/datamodel.html#context-managers).
-The [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) class interface provides playback features to stop or pause processings, performance assement features to measure how many times processings are called and the time spent by the process.
+The [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) class interface provides control features to stop or pause working threads, performance assement features to measure how many times processings are called and the time spent by the process.
-Besides, there is also a [LiveProcessingContext](../../../argaze.md/#argaze.ArFeatures.LiveProcessingContext) class that inherits from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) and that defines an abstract *calibrate* method to write specific device calibration process.
+Besides, there is also a [DataCaptureContext](../../../argaze.md/#argaze.ArFeatures.DataCaptureContext) class that inherits from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) and that defines an abstract *calibrate* method to write specific device calibration process.
-In the same way, there is a [PostProcessingContext](../../../argaze.md/#argaze.ArFeatures.PostProcessingContext) class that inherits from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) and that defines abstract *previous* and *next* playback methods to move into record's frames and also defines *duration* and *progression* properties to get information about a record length and processing advancement.
+In the same way, there is a [DataPlaybackContext](../../../argaze.md/#argaze.ArFeatures.DataPlaybackContext) class that inherits from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) and that defines abstract *previous* and *next* playback methods to move into record's frames and also defines *duration* and *progression* properties to get information about a record length and playback advancement.
-Finally, a specific eye tracking context can be defined into a Python file by writing a class that inherits either from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext), [LiveProcessingContext](../../../argaze.md/#argaze.ArFeatures.LiveProcessingContext) or [PostProcessingContext](../../../argaze.md/#argaze.ArFeatures.PostProcessingContext) class.
+Finally, a specific eye tracking context can be defined into a Python file by writing a class that inherits either from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext), [DataCaptureContext](../../../argaze.md/#argaze.ArFeatures.DataCaptureContext) or [DataPlaybackContext](../../../argaze.md/#argaze.ArFeatures.DataPlaybackContext) class.
-## Write live processing context
+## Write data capture context
-Here is a live processing context example that processes gaze positions and camera images in two separated threads:
+Here is a data cpature context example that processes gaze positions and camera images in two separated threads:
```python
from argaze import ArFeatures, DataFeatures
-class LiveProcessingExample(ArFeatures.LiveProcessingContext):
+class DataCaptureExample(ArFeatures.DataCaptureContext):
@DataFeatures.PipelineStepInit
def __init__(self, **kwargs):
- # Init LiveProcessingContext class
+ # Init DataCaptureContext class
super().__init__()
# Init private attribute
@@ -45,23 +45,23 @@ class LiveProcessingExample(ArFeatures.LiveProcessingContext):
# Start context according any specific parameter
... self.parameter
- # Start a gaze position processing thread
- self.__gaze_thread = threading.Thread(target = self.__gaze_position_processing)
+ # Start a gaze position capture thread
+ self.__gaze_thread = threading.Thread(target = self.__gaze_position_capture)
self.__gaze_thread.start()
- # Start a camera image processing thread if applicable
- self.__camera_thread = threading.Thread(target = self.__camera_image_processing)
+ # Start a camera image capture thread if applicable
+ self.__camera_thread = threading.Thread(target = self.__camera_image_capture)
self.__camera_thread.start()
return self
- def __gaze_position_processing(self):
- """Process gaze position."""
+ def __gaze_position_capture(self):
+ """Capture gaze position."""
- # Processing loop
+ # Capture loop
while self.is_running():
- # Pause processing
+ # Pause capture
if not self.is_paused():
# Assuming that timestamp, x and y values are available
@@ -73,13 +73,13 @@ class LiveProcessingExample(ArFeatures.LiveProcessingContext):
# Wait some time eventually
...
- def __camera_image_processing(self):
- """Process camera image if applicable."""
+ def __camera_image_capture(self):
+ """Capture camera image if applicable."""
- # Processing loop
+ # Capture loop
while self.is_running():
- # Pause processing
+ # Pause capture
if not self.is_paused():
# Assuming that timestamp, camera_image are available
@@ -95,10 +95,10 @@ class LiveProcessingExample(ArFeatures.LiveProcessingContext):
def __exit__(self, exception_type, exception_value, exception_traceback):
"""End context."""
- # Stop processing loops
+ # Stop capture loops
self.stop()
- # Stop processing threads
+ # Stop capture threads
threading.Thread.join(self.__gaze_thread)
threading.Thread.join(self.__camera_thread)
@@ -108,19 +108,19 @@ class LiveProcessingExample(ArFeatures.LiveProcessingContext):
...
```
-## Write post processing context
+## Write data playback context
-Here is a post processing context example that processes gaze positions and camera images in a same thread:
+Here is a data playback context example that reads gaze positions and camera images in a same thread:
```python
from argaze import ArFeatures, DataFeatures
-class PostProcessingExample(ArFeatures.PostProcessingContext):
+class DataPlaybackExample(ArFeatures.DataPlaybackContext):
@DataFeatures.PipelineStepInit
def __init__(self, **kwargs):
- # Init LiveProcessingContext class
+ # Init DataCaptureContext class
super().__init__()
# Init private attribute
@@ -142,19 +142,19 @@ class PostProcessingExample(ArFeatures.PostProcessingContext):
# Start context according any specific parameter
... self.parameter
- # Start a reading data thread
- self.__read_thread = threading.Thread(target = self.__data_reading)
- self.__read_thread.start()
+ # Start a data playback thread
+ self.__data_thread = threading.Thread(target = self.__data_playback)
+ self.__data_thread.start()
return self
- def __data_reading(self):
- """Process gaze position and camera image if applicable."""
+ def __data_playback(self):
+ """Playback gaze position and camera image if applicable."""
- # Processing loop
+ # Playback loop
while self.is_running():
- # Pause processing
+ # Pause playback
if not self.is_paused():
# Assuming that timestamp, camera_image are available
@@ -176,11 +176,11 @@ class PostProcessingExample(ArFeatures.PostProcessingContext):
def __exit__(self, exception_type, exception_value, exception_traceback):
"""End context."""
- # Stop processing loops
+ # Stop playback loop
self.stop()
- # Stop processing threads
- threading.Thread.join(self.__read_thread)
+ # Stop playback threads
+ threading.Thread.join(self.__data_thread)
def previous(self):
"""Go to previous camera image frame."""