aboutsummaryrefslogtreecommitdiff
path: root/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
diff options
context:
space:
mode:
authorThéo de la Hogue2024-07-03 17:14:43 +0200
committerThéo de la Hogue2024-07-03 17:14:43 +0200
commit8fc18a434da400f0fe82707e23838d6cc40a787d (patch)
tree9e42c9f7edb9364e9a0afedab30194820987a907 /docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
parent7b82b09e87d1475acf5040c67323421699a3ad06 (diff)
downloadargaze-8fc18a434da400f0fe82707e23838d6cc40a787d.zip
argaze-8fc18a434da400f0fe82707e23838d6cc40a787d.tar.gz
argaze-8fc18a434da400f0fe82707e23838d6cc40a787d.tar.bz2
argaze-8fc18a434da400f0fe82707e23838d6cc40a787d.tar.xz
Rewriting eye tracking context and gaze analysis sections.
Diffstat (limited to 'docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md')
-rw-r--r--docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md193
1 files changed, 193 insertions, 0 deletions
diff --git a/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md b/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
new file mode 100644
index 0000000..99b6c7a
--- /dev/null
+++ b/docs/user_guide/eye_tracking_context/advanced_topics/context_definition.md
@@ -0,0 +1,193 @@
+Define a context class
+======================
+
+The [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) class defines a generic base class interface to handle incoming eye tracker data before to pass them to a processing pipeline according to [Python context manager feature](https://docs.python.org/3/reference/datamodel.html#context-managers).
+
+The [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) class interface provides playback features to stop or pause processings, performance assement features to measure how many times processings are called and the time spent by the process.
+
+Besides, there is also a [LiveProcessingContext](../../../argaze.md/#argaze.ArFeatures.LiveProcessingContext) class that inherits from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) and that defines an abstract *calibrate* method to write specific device calibration process.
+
+In the same way, there is a [PostProcessingContext](../../../argaze.md/#argaze.ArFeatures.PostProcessingContext) class that inherits from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext) and that defines abstract *previous* and *next* playback methods to move into record's frames and also defines *duration* and *progression* properties to get information about a record length and processing advancment.
+
+Finally, a specific eye tracking context can be defined into a Python file by writing a class that inherits either from [ArContext](../../../argaze.md/#argaze.ArFeatures.ArContext), [LiveProcessingContext](../../../argaze.md/#argaze.ArFeatures.LiveProcessingContext) or [PostProcessingContext](../../../argaze.md/#argaze.ArFeatures.PostProcessingContext) class.
+
+## Write live processing context
+
+Here is a live processing context example that processes gaze positions and camera images in two separated threads:
+
+```python
+from argaze import ArFeatures, DataFeatures
+
+class LiveProcessingExample(ArFeatures.LiveProcessingContext):
+
+ @DataFeatures.PipelineStepInit
+ def __init__(self, **kwargs):
+
+ # Init LiveProcessingContext class
+ super().__init__()
+
+ # Init private attribute
+ self.__parameter = ...
+
+ @property
+ def parameter(self):
+ """Any context specific parameter."""
+ return self.__parameter
+
+ @parameter.setter
+ def parameter(self, parameter):
+ self.__parameter = parameter
+
+ @DataFeatures.PipelineStepEnter
+ def __enter__(self):
+ """Start context."""
+
+ # Start context according any specific parameter
+ ... self.parameter
+
+ # Start a gaze position processing thread
+ self.__gaze_thread = threading.Thread(target = self.__gaze_position_processing)
+ self.__gaze_thread.start()
+
+ # Start a camera image processing thread if applicable
+ self.__camera_thread = threading.Thread(target = self.__camera_image_processing)
+ self.__camera_thread.start()
+
+ return self
+
+ def __gaze_position_processing(self):
+ """Process gaze position."""
+
+ # Processing loop
+ while self.is_running():
+
+ # Pause processing
+ if not self.is_paused():
+
+ # Assuming that timestamp, x and y values are available
+ ...
+
+ # Process timestamped gaze position
+ self._process_gaze_position(timestamp = timestamp, x = x, y = y)
+
+ # Wait some time eventually
+ ...
+
+ def __camera_image_processing(self):
+ """Process camera image if applicable."""
+
+ # Processing loop
+ while self.is_running():
+
+ # Pause processing
+ if not self.is_paused():
+
+ # Assuming that timestamp, camera_image are available
+ ...
+
+ # Process timestamped camera image
+ self._process_camera_image(timestamp = timestamp, image = camera_image)
+
+ # Wait some time eventually
+ ...
+
+ @DataFeatures.PipelineStepExit
+ def __exit__(self, exception_type, exception_value, exception_traceback):
+ """End context."""
+
+ # Stop processing loops
+ self.stop()
+
+ # Stop processing threads
+ threading.Thread.join(self.__gaze_thread)
+ threading.Thread.join(self.__camera_thread)
+
+ def calibrate(self):
+ """Handle device calibration process."""
+
+ ...
+```
+
+## Write post processing context
+
+Here is a post processing context example that processes gaze positions and camera images in a same thread:
+
+```python
+from argaze import ArFeatures, DataFeatures
+
+class PostProcessingExample(ArFeatures.PostProcessingContext):
+
+ @DataFeatures.PipelineStepInit
+ def __init__(self, **kwargs):
+
+ # Init LiveProcessingContext class
+ super().__init__()
+
+ # Init private attribute
+ self.__parameter = ...
+
+ @property
+ def parameter(self):
+ """Any context specific parameter."""
+ return self.__parameter
+
+ @parameter.setter
+ def parameter(self, parameter):
+ self.__parameter = parameter
+
+ @DataFeatures.PipelineStepEnter
+ def __enter__(self):
+ """Start context."""
+
+ # Start context according any specific parameter
+ ... self.parameter
+
+ # Start a reading data thread
+ self.__read_thread = threading.Thread(target = self.__data_reading)
+ self.__read_thread.start()
+
+ return self
+
+ def __data_reading(self):
+ """Process gaze position and camera image if applicable."""
+
+ # Processing loop
+ while self.is_running():
+
+ # Pause processing
+ if not self.is_paused():
+
+ # Assuming that timestamp, camera_image are available
+ ...
+
+ # Process timestamped camera image
+ self._process_camera_image(timestamp = timestamp, image = camera_image)
+
+ # Assuming that timestamp, x and y values are available
+ ...
+
+ # Process timestamped gaze position
+ self._process_gaze_position(timestamp = timestamp, x = x, y = y)
+
+ # Wait some time eventually
+ ...
+
+ @DataFeatures.PipelineStepExit
+ def __exit__(self, exception_type, exception_value, exception_traceback):
+ """End context."""
+
+ # Stop processing loops
+ self.stop()
+
+ # Stop processing threads
+ threading.Thread.join(self.__read_thread)
+
+ def previous(self):
+ """Go to previous camera image frame."""
+ ...
+
+ def next(self):
+ """Go to next camera image frame."""
+ ...
+```
+