From 6cce228412f3f3b074fdaf87775b8d62a5adb060 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Wed, 30 Aug 2023 22:26:31 +0200 Subject: Reorganizing gaze analizis pipeline documentation. --- .../gaze_analysis_pipeline/visualisation.md | 190 +++++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 docs/user_guide/gaze_analysis_pipeline/visualisation.md (limited to 'docs/user_guide/gaze_analysis_pipeline/visualisation.md') diff --git a/docs/user_guide/gaze_analysis_pipeline/visualisation.md b/docs/user_guide/gaze_analysis_pipeline/visualisation.md new file mode 100644 index 0000000..5eae7f5 --- /dev/null +++ b/docs/user_guide/gaze_analysis_pipeline/visualisation.md @@ -0,0 +1,190 @@ +Visualize ArFrame and ArLayers +============================== + +All [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame) and [ArLayers](../../argaze.md/#argaze.ArFeatures.ArFrame) pipeline steps can be drawn in real time or afterward. + +![ArFrame visualisation](../../img/ar_frame_visualisation.png) + +## Export to PNG file + +Once timestamped gaze positions have been processed by [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method, it is possible to write [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame) image into a file thanks to [OpenCV package](https://pypi.org/project/opencv-python/). + +```python +import cv2 + +# Assuming that timestamped gaze positions have been processed by ArFrame.look method +... + +# Export heatmap image +cv2.imwrite('./ar_frame.png', ar_frame.image()) +``` + +## Export to MP4 file + +While timestamped gaze positions are processed by [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method, it is possible to write [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame) image into a video file thanks to [OpenCV package](https://pypi.org/project/opencv-python/). + +```python +import cv2 + +# Assuming ArFrame is loaded +... + +# Create a video file to save ArFrame +video = cv2.VideoWriter('ar_frame.avi', cv2.VideoWriter_fourcc(*'MJPG'), 10, ar_frame.size) + +# Assuming that timestamped gaze positions are being processed by ArFrame.look method +... + + # Write ArFrame image into video file + video.write(ar_frame.image()) + +# Close video file +video.release() +``` + +## Live window display + +While timestamped gaze positions are processed by [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method, it is possible to display [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame) image thanks to [OpenCV package](https://pypi.org/project/opencv-python/). + +```python +import cv2 + +def main(): + + # Assuming ArFrame is loaded + ... + + # Create a window to display ArFrame + cv2.namedWindow(ar_frame.name, cv2.WINDOW_AUTOSIZE) + + # Assuming that timestamped gaze positions are being processed by ArFrame.look method + ... + + # Update ArFrame image display + cv2.imshow(ar_frame.name, ar_frame.image()) + + # Wait 10 ms + cv2.waitKey(10) + +if __name__ == '__main__': + + main() +``` + +## Edit ArFrame image parameters + +[ArFrame.image](../../argaze.md/#argaze.ArFeatures.ArFrame.image) method parameters can be configured thanks to dictionary. + +```python +# Edit ArFrame image parameters +image_parameters = { + "draw_scan_path": { + "draw_fixations": { + "deviation_circle_color": [255, 0, 255], + "duration_border_color": [127, 0, 127], + "duration_factor": 1e-2 + }, + "draw_saccades": { + "line_color": [255, 0, 255] + }, + "deepness": 0 + }, + "draw_layers": { + "MyLayer": { + "draw_aoi_scene": { + "draw_aoi": { + "color": [255, 255, 255], + "border_size": 1 + } + }, + "draw_aoi_matching": { + "draw_matched_fixation": { + "deviation_circle_color": [255, 255, 255] + }, + "draw_matched_fixation_positions": { + "position_color": [0, 255, 255], + "line_color": [0, 0, 0] + }, + "draw_matched_region": { + "color": [0, 255, 0], + "border_size": 4 + }, + "draw_looked_aoi": { + "color": [0, 255, 0], + "border_size": 2 + }, + "looked_aoi_name_color": [255, 255, 255], + "looked_aoi_name_offset": [0, -10] + } + } + }, + "draw_gaze_position": { + "color": [0, 255, 255] + } +} + +# Pass image parameters to ArFrame +ar_frame_image = ar_frame.image(**image_parameters) + +# Do something with ArFrame image +... +``` + +## Configure ArFrame image parameters + +[ArFrame.image](../../argaze.md/#argaze.ArFeatures.ArFrame.image) method parameters can also be configured thanks to a dedicated JSON entry. + +Here is the JSON ArFrame configuration file example with image parameters included: + +```json +{ + "name": "My FullHD screen", + "size": [1920, 1080], + ... + "image_parameters": { + "draw_scan_path": { + "draw_fixations": { + "deviation_circle_color": [255, 0, 255], + "duration_border_color": [127, 0, 127], + "duration_factor": 1e-2 + }, + "draw_saccades": { + "line_color": [255, 0, 255] + }, + "deepness": 0 + }, + "draw_layers": { + "MyLayer": { + "draw_aoi_scene": { + "draw_aoi": { + "color": [255, 255, 255], + "border_size": 1 + } + }, + "draw_aoi_matching": { + "draw_matched_fixation": { + "deviation_circle_color": [255, 255, 255] + }, + "draw_matched_fixation_positions": { + "position_color": [0, 255, 255], + "line_color": [0, 0, 0] + }, + "draw_matched_region": { + "color": [0, 255, 0], + "border_size": 4 + }, + "draw_looked_aoi": { + "color": [0, 255, 0], + "border_size": 2 + }, + "looked_aoi_name_color": [255, 255, 255], + "looked_aoi_name_offset": [0, -10] + } + } + }, + "draw_gaze_position": { + "color": [0, 255, 255] + } + } +} +``` \ No newline at end of file -- cgit v1.1