From ad97a3e15da2ca72ceb97ad2e19a5c16bcfc2e09 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Wed, 10 Apr 2024 17:08:51 +0200 Subject: Replacing visualisation by visualization. --- .../gaze_analysis_pipeline/visualisation.md | 116 --------------------- 1 file changed, 116 deletions(-) delete mode 100644 docs/user_guide/gaze_analysis_pipeline/visualisation.md (limited to 'docs/user_guide/gaze_analysis_pipeline/visualisation.md') diff --git a/docs/user_guide/gaze_analysis_pipeline/visualisation.md b/docs/user_guide/gaze_analysis_pipeline/visualisation.md deleted file mode 100644 index b139f9b..0000000 --- a/docs/user_guide/gaze_analysis_pipeline/visualisation.md +++ /dev/null @@ -1,116 +0,0 @@ -Visualize pipeline steps -======================== - -Visualisation is not a pipeline step but each [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame) pipeline steps outputs can be drawn in real-time or afterward, depending of application purpose. - -![ArFrame visualisation](../../img/visualisation.png) - -## Add image parameters to ArFrame JSON configuration file - -[ArFrame.image](../../argaze.md/#argaze.ArFeatures.ArFrame.image) method parameters can be configured thanks to a dedicated JSON entry. - -Here is an extract from the JSON ArFrame configuration file with a sample where image parameters are added: - -```json -{ - "name": "My FullHD screen", - "size": [1920, 1080], - ... - "image_parameters": { - "draw_gaze_positions": { - "color": [0, 255, 255], - "size": 2 - }, - "draw_fixations": { - "deviation_circle_color": [255, 255, 255], - "duration_border_color": [127, 0, 127], - "duration_factor": 1e-2, - "draw_positions": { - "position_color": [0, 255, 255], - "line_color": [0, 0, 0] - } - }, - "draw_saccades": { - "line_color": [255, 0, 255] - }, - "draw_scan_path": { - "draw_fixations": { - "deviation_circle_color": [255, 0, 255], - "duration_border_color": [127, 0, 127], - "duration_factor": 1e-2 - }, - "draw_saccades": { - "line_color": [255, 0, 255] - } - }, - "draw_layers": { - "MyLayer": { - "draw_aoi_scene": { - "draw_aoi": { - "color": [255, 255, 255], - "border_size": 1 - } - }, - "draw_aoi_matching": { - "draw_matched_fixation": { - "deviation_circle_color": [255, 255, 255], - "draw_positions": { - "position_color": [0, 255, 0], - "line_color": [0, 0, 0] - } - }, - "draw_matched_region": { - "color": [0, 255, 0], - "border_size": 4 - }, - "draw_looked_aoi": { - "color": [0, 255, 0], - "border_size": 2 - }, - "looked_aoi_name_color": [255, 255, 255], - "looked_aoi_name_offset": [0, -10] - } - } - } - } -} -``` - -!!! warning - Most of *image_parameters* entries work if related ArFrame/ArLayer pipeline steps are enabled. - For example, JSON *draw_scan_path* entry needs GazeMovementIdentifier and ScanPath steps to be enabled. - -Then, [ArFrame.image](../../argaze.md/#argaze.ArFeatures.ArFrame.image) method can be called in various situations. - -## Live window display - -While timestamped gaze positions are processed by [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method, it is possible to display [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame) image thanks to [OpenCV package](https://pypi.org/project/opencv-python/). - -```python -import cv2 - -def main(): - - # Assuming ArFrame is loaded - ... - - # Create a window to display ArFrame - cv2.namedWindow(ar_frame.name, cv2.WINDOW_AUTOSIZE) - - # Assuming that timestamped gaze positions are being processed by ArFrame.look method - ... - - # Update ArFrame image display - cv2.imshow(ar_frame.name, ar_frame.image()) - - # Wait 10 ms - cv2.waitKey(10) - -if __name__ == '__main__': - - main() -``` - -!!! note "Export to video file" - - Video exportation is detailed in [gaze analysis recording chapter](recording.md). \ No newline at end of file -- cgit v1.1