From 79872df7900c07ad2f28181fc65b61a52869a5b3 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Wed, 30 Aug 2023 20:43:43 +0200 Subject: Renaming utils demo script and setup files. --- src/argaze/utils/demo_ar_features_run.py | 111 --------- src/argaze/utils/demo_augmented_reality_run.py | 111 +++++++++ .../demo_environment/demo_ar_features_setup.json | 161 ------------- .../demo_augmented_reality_setup.json | 161 +++++++++++++ .../demo_environment/demo_gaze_analysis_setup.json | 98 ++++++++ .../demo_environment/demo_gaze_features_setup.json | 98 -------- src/argaze/utils/demo_gaze_analysis_run.py | 254 +++++++++++++++++++++ src/argaze/utils/demo_gaze_features_run.py | 254 --------------------- 8 files changed, 624 insertions(+), 624 deletions(-) delete mode 100644 src/argaze/utils/demo_ar_features_run.py create mode 100644 src/argaze/utils/demo_augmented_reality_run.py delete mode 100644 src/argaze/utils/demo_environment/demo_ar_features_setup.json create mode 100644 src/argaze/utils/demo_environment/demo_augmented_reality_setup.json create mode 100644 src/argaze/utils/demo_environment/demo_gaze_analysis_setup.json delete mode 100644 src/argaze/utils/demo_environment/demo_gaze_features_setup.json create mode 100644 src/argaze/utils/demo_gaze_analysis_run.py delete mode 100644 src/argaze/utils/demo_gaze_features_run.py (limited to 'src') diff --git a/src/argaze/utils/demo_ar_features_run.py b/src/argaze/utils/demo_ar_features_run.py deleted file mode 100644 index 25d4083..0000000 --- a/src/argaze/utils/demo_ar_features_run.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python - -""" """ - -__author__ = "Théo de la Hogue" -__credits__ = [] -__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)" -__license__ = "BSD" - -import argparse -import contextlib -import os -import time - -from argaze import ArFeatures, GazeFeatures - -import cv2 -import numpy - -def main(): - """ - Load AR environment from .json file, detect ArUco markers into camera device images and project it. - """ - - current_directory = os.path.dirname(os.path.abspath(__file__)) - - # Manage arguments - parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) - parser.add_argument('environment', metavar='ENVIRONMENT', type=str, help='ar environment filepath') - parser.add_argument('-s', '--source', metavar='SOURCE', type=str, default='0', help='video capture source (a number to select camera device or a filepath to load a movie)') - args = parser.parse_args() - - # Load AR enviroment - ar_environment = ArFeatures.ArEnvironment.from_json(args.environment) - - # Create a window to display AR environment - cv2.namedWindow(ar_environment.name, cv2.WINDOW_AUTOSIZE) - - # Init timestamp - start_time = time.time() - - # Fake gaze position with mouse pointer - def on_mouse_event(event, x, y, flags, param): - - # Edit millisecond timestamp - timestamp = int((time.time() - start_time) * 1e3) - - # Project gaze position into environment - for frame, look_data in ar_environment.look(timestamp, GazeFeatures.GazePosition((x, y))): - - # Unpack look data - movement, scan_step_analysis, layer_analysis, execution_times, exception = look_data - - # Do something with look data - # ... - - # Attach mouse callback to window - cv2.setMouseCallback(ar_environment.name, on_mouse_event) - - # Enable camera video capture into separate thread - video_capture = cv2.VideoCapture(int(args.source) if args.source.isdecimal() else args.source) - - # Waiting for 'ctrl+C' interruption - with contextlib.suppress(KeyboardInterrupt): - - # Capture images - while video_capture.isOpened(): - - # Read video image - success, video_image = video_capture.read() - - if success: - - # Detect and project environment - detection_time, exceptions = ar_environment.detect_and_project(video_image) - - # Get environment image - environment_image = ar_environment.image() - - # Write detection fps - cv2.rectangle(environment_image, (0, 0), (420, 50), (63, 63, 63), -1) - cv2.putText(environment_image, f'Detection fps: {1e3/detection_time:.1f}', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) - - # Handle exceptions - for i, (scene_name, e) in enumerate(exceptions.items()): - - # Write errors - cv2.rectangle(environment_image, (0, (i+1)*50), (720, (i+2)*50), (127, 127, 127), -1) - cv2.putText(environment_image, f'{scene_name} error: {e}', (20, (i+1)*90), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - # Display environment - cv2.imshow(ar_environment.name, environment_image) - - # Draw and display each frames - for frame in ar_environment.frames: - - # Display frame - cv2.imshow(f'{frame.parent.name}:{frame.name}', frame.image()) - - # Stop by pressing 'Esc' key - if cv2.waitKey(10) == 27: - - # Close camera video capture - video_capture.release() - - # Stop image display - cv2.destroyAllWindows() - -if __name__ == '__main__': - - main() diff --git a/src/argaze/utils/demo_augmented_reality_run.py b/src/argaze/utils/demo_augmented_reality_run.py new file mode 100644 index 0000000..25d4083 --- /dev/null +++ b/src/argaze/utils/demo_augmented_reality_run.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python + +""" """ + +__author__ = "Théo de la Hogue" +__credits__ = [] +__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)" +__license__ = "BSD" + +import argparse +import contextlib +import os +import time + +from argaze import ArFeatures, GazeFeatures + +import cv2 +import numpy + +def main(): + """ + Load AR environment from .json file, detect ArUco markers into camera device images and project it. + """ + + current_directory = os.path.dirname(os.path.abspath(__file__)) + + # Manage arguments + parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) + parser.add_argument('environment', metavar='ENVIRONMENT', type=str, help='ar environment filepath') + parser.add_argument('-s', '--source', metavar='SOURCE', type=str, default='0', help='video capture source (a number to select camera device or a filepath to load a movie)') + args = parser.parse_args() + + # Load AR enviroment + ar_environment = ArFeatures.ArEnvironment.from_json(args.environment) + + # Create a window to display AR environment + cv2.namedWindow(ar_environment.name, cv2.WINDOW_AUTOSIZE) + + # Init timestamp + start_time = time.time() + + # Fake gaze position with mouse pointer + def on_mouse_event(event, x, y, flags, param): + + # Edit millisecond timestamp + timestamp = int((time.time() - start_time) * 1e3) + + # Project gaze position into environment + for frame, look_data in ar_environment.look(timestamp, GazeFeatures.GazePosition((x, y))): + + # Unpack look data + movement, scan_step_analysis, layer_analysis, execution_times, exception = look_data + + # Do something with look data + # ... + + # Attach mouse callback to window + cv2.setMouseCallback(ar_environment.name, on_mouse_event) + + # Enable camera video capture into separate thread + video_capture = cv2.VideoCapture(int(args.source) if args.source.isdecimal() else args.source) + + # Waiting for 'ctrl+C' interruption + with contextlib.suppress(KeyboardInterrupt): + + # Capture images + while video_capture.isOpened(): + + # Read video image + success, video_image = video_capture.read() + + if success: + + # Detect and project environment + detection_time, exceptions = ar_environment.detect_and_project(video_image) + + # Get environment image + environment_image = ar_environment.image() + + # Write detection fps + cv2.rectangle(environment_image, (0, 0), (420, 50), (63, 63, 63), -1) + cv2.putText(environment_image, f'Detection fps: {1e3/detection_time:.1f}', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) + + # Handle exceptions + for i, (scene_name, e) in enumerate(exceptions.items()): + + # Write errors + cv2.rectangle(environment_image, (0, (i+1)*50), (720, (i+2)*50), (127, 127, 127), -1) + cv2.putText(environment_image, f'{scene_name} error: {e}', (20, (i+1)*90), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + # Display environment + cv2.imshow(ar_environment.name, environment_image) + + # Draw and display each frames + for frame in ar_environment.frames: + + # Display frame + cv2.imshow(f'{frame.parent.name}:{frame.name}', frame.image()) + + # Stop by pressing 'Esc' key + if cv2.waitKey(10) == 27: + + # Close camera video capture + video_capture.release() + + # Stop image display + cv2.destroyAllWindows() + +if __name__ == '__main__': + + main() diff --git a/src/argaze/utils/demo_environment/demo_ar_features_setup.json b/src/argaze/utils/demo_environment/demo_ar_features_setup.json deleted file mode 100644 index b1c0696..0000000 --- a/src/argaze/utils/demo_environment/demo_ar_features_setup.json +++ /dev/null @@ -1,161 +0,0 @@ -{ - "name": "ArEnvironment Demo", - "aruco_detector": { - "dictionary": { - "name": "DICT_APRILTAG_16h5" - }, - "marker_size": 5, - "optic_parameters": "optic_parameters.json", - "parameters": { - "cornerRefinementMethod": 1, - "aprilTagQuadSigma": 2, - "aprilTagDeglitch": 1 - } - }, - "camera_frame": { - "layers": { - "Camera_layer": {} - }, - "image_parameters": { - "background_weight": 1, - "draw_layers": { - "Camera_layer": { - "draw_aoi_scene": { - "draw_aoi": { - "color": [255, 255, 255], - "border_size": 1 - } - } - } - }, - "draw_gaze_position": { - "color": [0, 255, 255], - "size": 4 - } - } - }, - "scenes": { - "ArScene Demo" : { - "aruco_scene": "aruco_scene.obj", - "layers": { - "Camera_layer" : { - "aoi_scene": "aoi_3d_scene.obj" - } - }, - "frames": { - "GrayRectangle": { - "size": [640, 383], - "background": "frame_background.jpg", - "gaze_movement_identifier": { - "DispersionThresholdIdentification": { - "deviation_max_threshold": 25, - "duration_min_threshold": 200 - } - }, - "scan_path": { - "duration_max": 10000 - }, - "layers": { - "GrayRectangle": { - "aoi_scene": "aoi_3d_scene.obj", - "aoi_matcher": { - "FocusPointInside": {} - } - } - }, - "heatmap": { - "size": [320, 240] - }, - "image_parameters": { - "background_weight": 1, - "heatmap_weight": 0.5, - "draw_scan_path": { - "draw_fixations": { - "deviation_circle_color": [0, 255, 255], - "duration_border_color": [0, 127, 127], - "duration_factor": 1e-2 - }, - "draw_saccades": { - "line_color": [0, 255, 255] - } - }, - "draw_layers": { - "GrayRectangle": { - "draw_aoi_scene": { - "draw_aoi": { - "color": [255, 255, 255], - "border_size": 1 - } - }, - "draw_aoi_matching": { - "draw_matched_fixation": { - "deviation_circle_color": [255, 255, 255] - }, - "draw_matched_fixation_positions": { - "position_color": [0, 255, 255], - "line_color": [0, 0, 0] - }, - "draw_looked_aoi": { - "color": [0, 255, 0], - "border_size": 2 - }, - "looked_aoi_name_color": [255, 255, 255], - "looked_aoi_name_offset": [10, 10] - } - } - }, - "draw_gaze_position": { - "color": [0, 255, 255], - "size": 2 - } - } - } - }, - "aruco_axis": { - "lower_left_corner": { - "origin_marker": 2, - "horizontal_axis_marker": 3, - "vertical_axis_marker": 0 - }, - "lower_right_corner": { - "origin_marker": 3, - "horizontal_axis_marker": 2, - "vertical_axis_marker": 1 - }, - "upper_left_corner": { - "origin_marker": 0, - "horizontal_axis_marker": 1, - "vertical_axis_marker": 2 - }, - "upper_right_corner": { - "origin_marker": 1, - "horizontal_axis_marker": 0, - "vertical_axis_marker": 3 - } - }, - "aruco_aoi": { - "GrayRectangle": { - "upper_left_corner": { - "marker_identifier": 0, - "marker_corner_index": 2 - }, - "upper_right_corner": { - "marker_identifier": 1, - "marker_corner_index": 3 - }, - "lower_left_corner": { - "marker_identifier": 2, - "marker_corner_index": 1 - }, - "lower_right_corner": { - "marker_identifier": 3, - "marker_corner_index": 0 - }, - "inner_aoi": "all" - } - }, - "angle_tolerance": 15.0, - "distance_tolerance": 2.54 - } - } -} \ No newline at end of file diff --git a/src/argaze/utils/demo_environment/demo_augmented_reality_setup.json b/src/argaze/utils/demo_environment/demo_augmented_reality_setup.json new file mode 100644 index 0000000..b1c0696 --- /dev/null +++ b/src/argaze/utils/demo_environment/demo_augmented_reality_setup.json @@ -0,0 +1,161 @@ +{ + "name": "ArEnvironment Demo", + "aruco_detector": { + "dictionary": { + "name": "DICT_APRILTAG_16h5" + }, + "marker_size": 5, + "optic_parameters": "optic_parameters.json", + "parameters": { + "cornerRefinementMethod": 1, + "aprilTagQuadSigma": 2, + "aprilTagDeglitch": 1 + } + }, + "camera_frame": { + "layers": { + "Camera_layer": {} + }, + "image_parameters": { + "background_weight": 1, + "draw_layers": { + "Camera_layer": { + "draw_aoi_scene": { + "draw_aoi": { + "color": [255, 255, 255], + "border_size": 1 + } + } + } + }, + "draw_gaze_position": { + "color": [0, 255, 255], + "size": 4 + } + } + }, + "scenes": { + "ArScene Demo" : { + "aruco_scene": "aruco_scene.obj", + "layers": { + "Camera_layer" : { + "aoi_scene": "aoi_3d_scene.obj" + } + }, + "frames": { + "GrayRectangle": { + "size": [640, 383], + "background": "frame_background.jpg", + "gaze_movement_identifier": { + "DispersionThresholdIdentification": { + "deviation_max_threshold": 25, + "duration_min_threshold": 200 + } + }, + "scan_path": { + "duration_max": 10000 + }, + "layers": { + "GrayRectangle": { + "aoi_scene": "aoi_3d_scene.obj", + "aoi_matcher": { + "FocusPointInside": {} + } + } + }, + "heatmap": { + "size": [320, 240] + }, + "image_parameters": { + "background_weight": 1, + "heatmap_weight": 0.5, + "draw_scan_path": { + "draw_fixations": { + "deviation_circle_color": [0, 255, 255], + "duration_border_color": [0, 127, 127], + "duration_factor": 1e-2 + }, + "draw_saccades": { + "line_color": [0, 255, 255] + } + }, + "draw_layers": { + "GrayRectangle": { + "draw_aoi_scene": { + "draw_aoi": { + "color": [255, 255, 255], + "border_size": 1 + } + }, + "draw_aoi_matching": { + "draw_matched_fixation": { + "deviation_circle_color": [255, 255, 255] + }, + "draw_matched_fixation_positions": { + "position_color": [0, 255, 255], + "line_color": [0, 0, 0] + }, + "draw_looked_aoi": { + "color": [0, 255, 0], + "border_size": 2 + }, + "looked_aoi_name_color": [255, 255, 255], + "looked_aoi_name_offset": [10, 10] + } + } + }, + "draw_gaze_position": { + "color": [0, 255, 255], + "size": 2 + } + } + } + }, + "aruco_axis": { + "lower_left_corner": { + "origin_marker": 2, + "horizontal_axis_marker": 3, + "vertical_axis_marker": 0 + }, + "lower_right_corner": { + "origin_marker": 3, + "horizontal_axis_marker": 2, + "vertical_axis_marker": 1 + }, + "upper_left_corner": { + "origin_marker": 0, + "horizontal_axis_marker": 1, + "vertical_axis_marker": 2 + }, + "upper_right_corner": { + "origin_marker": 1, + "horizontal_axis_marker": 0, + "vertical_axis_marker": 3 + } + }, + "aruco_aoi": { + "GrayRectangle": { + "upper_left_corner": { + "marker_identifier": 0, + "marker_corner_index": 2 + }, + "upper_right_corner": { + "marker_identifier": 1, + "marker_corner_index": 3 + }, + "lower_left_corner": { + "marker_identifier": 2, + "marker_corner_index": 1 + }, + "lower_right_corner": { + "marker_identifier": 3, + "marker_corner_index": 0 + }, + "inner_aoi": "all" + } + }, + "angle_tolerance": 15.0, + "distance_tolerance": 2.54 + } + } +} \ No newline at end of file diff --git a/src/argaze/utils/demo_environment/demo_gaze_analysis_setup.json b/src/argaze/utils/demo_environment/demo_gaze_analysis_setup.json new file mode 100644 index 0000000..85f704c --- /dev/null +++ b/src/argaze/utils/demo_environment/demo_gaze_analysis_setup.json @@ -0,0 +1,98 @@ +{ + "name": "ArFrame Demo", + "size": [1920, 1149], + "background": "frame_background.jpg", + "gaze_movement_identifier": { + "DispersionThresholdIdentification": { + "deviation_max_threshold": 50, + "duration_min_threshold": 200 + } + }, + "scan_path": { + "duration_max": 10000 + }, + "scan_path_analyzers": { + "Basic": {}, + "KCoefficient": {}, + "NearestNeighborIndex": { + "size": [1920, 1149] + }, + "ExploitExploreRatio": { + "short_fixation_duration_threshold": 0 + } + }, + "heatmap": { + "size": [320, 240] + }, + "layers": { + "GrayRectangle": { + "aoi_scene": "aoi_3d_scene.obj", + "aoi_matcher": { + "DeviationCircleCoverage": { + "coverage_threshold": 0.5 + } + }, + "aoi_scan_path": { + "duration_max": 10000 + }, + "aoi_scan_path_analyzers": { + "Basic": {}, + "TransitionMatrix": {}, + "KCoefficient": {}, + "LempelZivComplexity": {}, + "NGram": { + "n_min": 3, + "n_max": 3 + }, + "Entropy":{} + } + } + }, + "image_parameters": { + "background_weight": 1, + "heatmap_weight": 0.5, + "draw_scan_path": { + "draw_fixations": { + "deviation_circle_color": [255, 0, 255], + "duration_border_color": [127, 0, 127], + "duration_factor": 1e-2 + }, + "draw_saccades": { + "line_color": [255, 0, 255] + }, + "deepness": 0 + }, + "draw_layers": { + "GrayRectangle": { + "draw_aoi_scene": { + "draw_aoi": { + "color": [255, 255, 255], + "border_size": 1 + } + }, + "draw_aoi_matching": { + "draw_matched_fixation": { + "deviation_circle_color": [255, 255, 255] + }, + "draw_matched_fixation_positions": { + "position_color": [0, 255, 255], + "line_color": [0, 0, 0] + }, + "draw_matched_region": { + "color": [0, 255, 0], + "border_size": 4 + }, + "draw_looked_aoi": { + "color": [0, 255, 0], + "border_size": 2 + }, + "looked_aoi_name_color": [255, 255, 255], + "looked_aoi_name_offset": [0, -10] + } + } + }, + "draw_gaze_position": { + "color": [0, 255, 255] + } + } +} \ No newline at end of file diff --git a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json b/src/argaze/utils/demo_environment/demo_gaze_features_setup.json deleted file mode 100644 index 85f704c..0000000 --- a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "name": "ArFrame Demo", - "size": [1920, 1149], - "background": "frame_background.jpg", - "gaze_movement_identifier": { - "DispersionThresholdIdentification": { - "deviation_max_threshold": 50, - "duration_min_threshold": 200 - } - }, - "scan_path": { - "duration_max": 10000 - }, - "scan_path_analyzers": { - "Basic": {}, - "KCoefficient": {}, - "NearestNeighborIndex": { - "size": [1920, 1149] - }, - "ExploitExploreRatio": { - "short_fixation_duration_threshold": 0 - } - }, - "heatmap": { - "size": [320, 240] - }, - "layers": { - "GrayRectangle": { - "aoi_scene": "aoi_3d_scene.obj", - "aoi_matcher": { - "DeviationCircleCoverage": { - "coverage_threshold": 0.5 - } - }, - "aoi_scan_path": { - "duration_max": 10000 - }, - "aoi_scan_path_analyzers": { - "Basic": {}, - "TransitionMatrix": {}, - "KCoefficient": {}, - "LempelZivComplexity": {}, - "NGram": { - "n_min": 3, - "n_max": 3 - }, - "Entropy":{} - } - } - }, - "image_parameters": { - "background_weight": 1, - "heatmap_weight": 0.5, - "draw_scan_path": { - "draw_fixations": { - "deviation_circle_color": [255, 0, 255], - "duration_border_color": [127, 0, 127], - "duration_factor": 1e-2 - }, - "draw_saccades": { - "line_color": [255, 0, 255] - }, - "deepness": 0 - }, - "draw_layers": { - "GrayRectangle": { - "draw_aoi_scene": { - "draw_aoi": { - "color": [255, 255, 255], - "border_size": 1 - } - }, - "draw_aoi_matching": { - "draw_matched_fixation": { - "deviation_circle_color": [255, 255, 255] - }, - "draw_matched_fixation_positions": { - "position_color": [0, 255, 255], - "line_color": [0, 0, 0] - }, - "draw_matched_region": { - "color": [0, 255, 0], - "border_size": 4 - }, - "draw_looked_aoi": { - "color": [0, 255, 0], - "border_size": 2 - }, - "looked_aoi_name_color": [255, 255, 255], - "looked_aoi_name_offset": [0, -10] - } - } - }, - "draw_gaze_position": { - "color": [0, 255, 255] - } - } -} \ No newline at end of file diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py new file mode 100644 index 0000000..92fa282 --- /dev/null +++ b/src/argaze/utils/demo_gaze_analysis_run.py @@ -0,0 +1,254 @@ +#!/usr/bin/env python + +""" """ + +__author__ = "Théo de la Hogue" +__credits__ = [] +__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)" +__license__ = "BSD" + +import argparse +import os +import time + +from argaze import ArFeatures, GazeFeatures +from argaze.AreaOfInterest import AOIFeatures +from argaze.GazeAnalysis import * + +import cv2 +import numpy +import pandas + +def main(): + """ + Load ArFrame from .json file and use mouse pointer to simulate gaze positions. + """ + + current_directory = os.path.dirname(os.path.abspath(__file__)) + + # Manage arguments + parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) + parser.add_argument('frame', metavar='FRAME', type=str, help='ar frame filepath') + args = parser.parse_args() + + # Load ArFrame + ar_frame = ArFeatures.ArFrame.from_json(args.frame) + + # Create a window to display ArEnvironment + cv2.namedWindow(ar_frame.name, cv2.WINDOW_AUTOSIZE) + + # Heatmap buffer display option + enable_heatmap_buffer = False + + # Init timestamp + start_time = time.time() + + # Update pointer position + def on_mouse_event(event, x, y, flags, param): + + # Edit millisecond timestamp + timestamp = int((time.time() - start_time) * 1e3) + + # Project gaze position into frame + movement, scan_step_analysis, layer_analysis, execution_times, exception = ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y))) + + # Do something with look data + # ... + + # Attach mouse callback to window + cv2.setMouseCallback(ar_frame.name, on_mouse_event) + + # Waiting for 'ctrl+C' interruption + try: + + # Draw frame and mouse position analysis + while True: + + # Get frame image + frame_image = ar_frame.image() + + # Write heatmap buffer manual + buffer_on_off = 'on' if enable_heatmap_buffer else 'off' + buffer_display_disable = 'disable' if enable_heatmap_buffer else 'enable' + cv2.putText(frame_image, f'Heatmap buffer: {buffer_on_off} (Press \'b\' key to {buffer_display_disable})', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_heatmap_buffer else (255, 255, 255), 1, cv2.LINE_AA) + + # Write last 5 steps of aoi scan path + path = '' + for step in ar_frame.layers["GrayRectangle"].aoi_scan_path[-5:]: + + path += f'> {step.aoi} ' + + path += f'> {ar_frame.layers["GrayRectangle"].aoi_scan_path.current_aoi}' + + cv2.putText(frame_image, path, (20, ar_frame.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) + + # Display Transition matrix analysis if loaded + try: + + transition_matrix_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.TransitionMatrix"] + + cv2.putText(frame_image, f'Transition matrix density: {transition_matrix_analyzer.transition_matrix_density:.2f}', (20, ar_frame.size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + # Iterate over indexes (departures) + for from_aoi, row in transition_matrix_analyzer.transition_matrix_probabilities.iterrows(): + + # Iterate over columns (destinations) + for to_aoi, probability in row.items(): + + if from_aoi != to_aoi and probability > 0.0: + + from_center = ar_frame.layers['GrayRectangle'].aoi_scene[from_aoi].center.astype(int) + to_center = ar_frame.layers['GrayRectangle'].aoi_scene[to_aoi].center.astype(int) + start_line = (0.5 * from_center + 0.5 * to_center).astype(int) + + color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55] + + cv2.line(frame_image, start_line, to_center, color, int(probability*10) + 2) + cv2.line(frame_image, from_center, to_center, [55, 55, 55], 2) + + except KeyError: + pass + + # Display aoi scan path basic metrics analysis if loaded + try: + + basic_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.Basic"] + + # Write basic analysis + cv2.putText(frame_image, f'Step number: {basic_analyzer.steps_number}', (20, ar_frame.size[1]-440), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(frame_image, f'Step fixation duration average: {int(basic_analyzer.step_fixation_durations_average)} ms', (20, ar_frame.size[1]-400), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display scan path K Coefficient analysis if loaded + try: + + kc_analyzer = ar_frame.scan_path_analyzers["argaze.GazeAnalysis.KCoefficient"] + + # Write raw Kc analysis + if kc_analyzer.K < 0.: + + cv2.putText(frame_image, f'K coefficient: Ambient attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + elif kc_analyzer.K > 0.: + + cv2.putText(frame_image, f'K coefficient: Focal attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display aoi scan path K-modified coefficient analysis if loaded + try: + + aoi_kc_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.KCoefficient"] + + # Write aoi Kc analysis + if aoi_kc_analyzer.K < 0.: + + cv2.putText(frame_image, f'K-modified coefficient: Ambient attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + elif aoi_kc_analyzer.K > 0.: + + cv2.putText(frame_image, f'K-modified coefficient: Focal attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display Lempel-Ziv complexity analysis if loaded + try: + + lzc_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.LempelZivComplexity"] + + cv2.putText(frame_image, f'Lempel-Ziv complexity: {lzc_analyzer.lempel_ziv_complexity}', (20, ar_frame.size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display N-Gram analysis if loaded + try: + + ngram_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.NGram"] + + # Display only 3-gram analysis + start = ar_frame.size[1] - ((len(ngram_analyzer.ngrams_count[3]) + 1) * 40) + cv2.putText(frame_image, f'{ngram_analyzer.n_max}-Gram:', (ar_frame.size[0]-700, start-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + for i, (ngram, count) in enumerate(ngram_analyzer.ngrams_count[3].items()): + + ngram_string = f'{ngram[0]}' + for g in range(1, 3): + ngram_string += f'>{ngram[g]}' + + cv2.putText(frame_image, f'{ngram_string}: {count}', (ar_frame.size[0]-700, start+(i*40)), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display Entropy analysis if loaded + try: + + entropy_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.Entropy"] + + cv2.putText(frame_image, f'Stationary entropy: {entropy_analyzer.stationary_entropy:.3f},', (20, ar_frame.size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(frame_image, f'Transition entropy: {entropy_analyzer.transition_entropy:.3f},', (20, ar_frame.size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display Nearest Neighbor index analysis if loaded + try: + + nni_analyzer = ar_frame.scan_path_analyzers["argaze.GazeAnalysis.NearestNeighborIndex"] + + cv2.putText(frame_image, f'Nearest neighbor index: {nni_analyzer.nearest_neighbor_index:.3f}', (20, ar_frame.size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + pass + + # Display Exploit/Explore ratio analysis if loaded + try: + + xxr_analyser = ar_frame.scan_path_analyzers["argaze.GazeAnalysis.ExploitExploreRatio"] + + cv2.putText(frame_image, f'Exploit explore ratio: {xxr_analyser.exploit_explore_ratio:.3f}', (20, ar_frame.size[1]-360), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) + + except KeyError: + + pass + + # Display frame image + cv2.imshow(ar_frame.name, frame_image) + + key_pressed = cv2.waitKey(10) + + #if key_pressed != -1: + # print(key_pressed) + + # Reload environment with 'h' key + if key_pressed == 114: + + ar_frame = ArFeatures.ArFrame.from_json(args.frame) + + # Enable heatmap buffer with 'b' key + if key_pressed == 98: + + enable_heatmap_buffer = not enable_heatmap_buffer + + ar_frame.heatmap.buffer = 10 if enable_heatmap_buffer else 0 + ar_frame.heatmap.clear() + + # Stop by pressing 'Esc' key + if key_pressed == 27: + break + + # Stop on 'ctrl+C' interruption + except KeyboardInterrupt: + pass + + # Stop frame image display + cv2.destroyAllWindows() + +if __name__ == '__main__': + + main() diff --git a/src/argaze/utils/demo_gaze_features_run.py b/src/argaze/utils/demo_gaze_features_run.py deleted file mode 100644 index 92fa282..0000000 --- a/src/argaze/utils/demo_gaze_features_run.py +++ /dev/null @@ -1,254 +0,0 @@ -#!/usr/bin/env python - -""" """ - -__author__ = "Théo de la Hogue" -__credits__ = [] -__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)" -__license__ = "BSD" - -import argparse -import os -import time - -from argaze import ArFeatures, GazeFeatures -from argaze.AreaOfInterest import AOIFeatures -from argaze.GazeAnalysis import * - -import cv2 -import numpy -import pandas - -def main(): - """ - Load ArFrame from .json file and use mouse pointer to simulate gaze positions. - """ - - current_directory = os.path.dirname(os.path.abspath(__file__)) - - # Manage arguments - parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) - parser.add_argument('frame', metavar='FRAME', type=str, help='ar frame filepath') - args = parser.parse_args() - - # Load ArFrame - ar_frame = ArFeatures.ArFrame.from_json(args.frame) - - # Create a window to display ArEnvironment - cv2.namedWindow(ar_frame.name, cv2.WINDOW_AUTOSIZE) - - # Heatmap buffer display option - enable_heatmap_buffer = False - - # Init timestamp - start_time = time.time() - - # Update pointer position - def on_mouse_event(event, x, y, flags, param): - - # Edit millisecond timestamp - timestamp = int((time.time() - start_time) * 1e3) - - # Project gaze position into frame - movement, scan_step_analysis, layer_analysis, execution_times, exception = ar_frame.look(timestamp, GazeFeatures.GazePosition((x, y))) - - # Do something with look data - # ... - - # Attach mouse callback to window - cv2.setMouseCallback(ar_frame.name, on_mouse_event) - - # Waiting for 'ctrl+C' interruption - try: - - # Draw frame and mouse position analysis - while True: - - # Get frame image - frame_image = ar_frame.image() - - # Write heatmap buffer manual - buffer_on_off = 'on' if enable_heatmap_buffer else 'off' - buffer_display_disable = 'disable' if enable_heatmap_buffer else 'enable' - cv2.putText(frame_image, f'Heatmap buffer: {buffer_on_off} (Press \'b\' key to {buffer_display_disable})', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_heatmap_buffer else (255, 255, 255), 1, cv2.LINE_AA) - - # Write last 5 steps of aoi scan path - path = '' - for step in ar_frame.layers["GrayRectangle"].aoi_scan_path[-5:]: - - path += f'> {step.aoi} ' - - path += f'> {ar_frame.layers["GrayRectangle"].aoi_scan_path.current_aoi}' - - cv2.putText(frame_image, path, (20, ar_frame.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) - - # Display Transition matrix analysis if loaded - try: - - transition_matrix_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.TransitionMatrix"] - - cv2.putText(frame_image, f'Transition matrix density: {transition_matrix_analyzer.transition_matrix_density:.2f}', (20, ar_frame.size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - # Iterate over indexes (departures) - for from_aoi, row in transition_matrix_analyzer.transition_matrix_probabilities.iterrows(): - - # Iterate over columns (destinations) - for to_aoi, probability in row.items(): - - if from_aoi != to_aoi and probability > 0.0: - - from_center = ar_frame.layers['GrayRectangle'].aoi_scene[from_aoi].center.astype(int) - to_center = ar_frame.layers['GrayRectangle'].aoi_scene[to_aoi].center.astype(int) - start_line = (0.5 * from_center + 0.5 * to_center).astype(int) - - color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55] - - cv2.line(frame_image, start_line, to_center, color, int(probability*10) + 2) - cv2.line(frame_image, from_center, to_center, [55, 55, 55], 2) - - except KeyError: - pass - - # Display aoi scan path basic metrics analysis if loaded - try: - - basic_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.Basic"] - - # Write basic analysis - cv2.putText(frame_image, f'Step number: {basic_analyzer.steps_number}', (20, ar_frame.size[1]-440), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - cv2.putText(frame_image, f'Step fixation duration average: {int(basic_analyzer.step_fixation_durations_average)} ms', (20, ar_frame.size[1]-400), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - except KeyError: - pass - - # Display scan path K Coefficient analysis if loaded - try: - - kc_analyzer = ar_frame.scan_path_analyzers["argaze.GazeAnalysis.KCoefficient"] - - # Write raw Kc analysis - if kc_analyzer.K < 0.: - - cv2.putText(frame_image, f'K coefficient: Ambient attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - elif kc_analyzer.K > 0.: - - cv2.putText(frame_image, f'K coefficient: Focal attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA) - - except KeyError: - pass - - # Display aoi scan path K-modified coefficient analysis if loaded - try: - - aoi_kc_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.KCoefficient"] - - # Write aoi Kc analysis - if aoi_kc_analyzer.K < 0.: - - cv2.putText(frame_image, f'K-modified coefficient: Ambient attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - elif aoi_kc_analyzer.K > 0.: - - cv2.putText(frame_image, f'K-modified coefficient: Focal attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA) - - except KeyError: - pass - - # Display Lempel-Ziv complexity analysis if loaded - try: - - lzc_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.LempelZivComplexity"] - - cv2.putText(frame_image, f'Lempel-Ziv complexity: {lzc_analyzer.lempel_ziv_complexity}', (20, ar_frame.size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - except KeyError: - pass - - # Display N-Gram analysis if loaded - try: - - ngram_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.NGram"] - - # Display only 3-gram analysis - start = ar_frame.size[1] - ((len(ngram_analyzer.ngrams_count[3]) + 1) * 40) - cv2.putText(frame_image, f'{ngram_analyzer.n_max}-Gram:', (ar_frame.size[0]-700, start-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - for i, (ngram, count) in enumerate(ngram_analyzer.ngrams_count[3].items()): - - ngram_string = f'{ngram[0]}' - for g in range(1, 3): - ngram_string += f'>{ngram[g]}' - - cv2.putText(frame_image, f'{ngram_string}: {count}', (ar_frame.size[0]-700, start+(i*40)), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - except KeyError: - pass - - # Display Entropy analysis if loaded - try: - - entropy_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.Entropy"] - - cv2.putText(frame_image, f'Stationary entropy: {entropy_analyzer.stationary_entropy:.3f},', (20, ar_frame.size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - cv2.putText(frame_image, f'Transition entropy: {entropy_analyzer.transition_entropy:.3f},', (20, ar_frame.size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - except KeyError: - pass - - # Display Nearest Neighbor index analysis if loaded - try: - - nni_analyzer = ar_frame.scan_path_analyzers["argaze.GazeAnalysis.NearestNeighborIndex"] - - cv2.putText(frame_image, f'Nearest neighbor index: {nni_analyzer.nearest_neighbor_index:.3f}', (20, ar_frame.size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - except KeyError: - pass - - # Display Exploit/Explore ratio analysis if loaded - try: - - xxr_analyser = ar_frame.scan_path_analyzers["argaze.GazeAnalysis.ExploitExploreRatio"] - - cv2.putText(frame_image, f'Exploit explore ratio: {xxr_analyser.exploit_explore_ratio:.3f}', (20, ar_frame.size[1]-360), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) - - except KeyError: - - pass - - # Display frame image - cv2.imshow(ar_frame.name, frame_image) - - key_pressed = cv2.waitKey(10) - - #if key_pressed != -1: - # print(key_pressed) - - # Reload environment with 'h' key - if key_pressed == 114: - - ar_frame = ArFeatures.ArFrame.from_json(args.frame) - - # Enable heatmap buffer with 'b' key - if key_pressed == 98: - - enable_heatmap_buffer = not enable_heatmap_buffer - - ar_frame.heatmap.buffer = 10 if enable_heatmap_buffer else 0 - ar_frame.heatmap.clear() - - # Stop by pressing 'Esc' key - if key_pressed == 27: - break - - # Stop on 'ctrl+C' interruption - except KeyboardInterrupt: - pass - - # Stop frame image display - cv2.destroyAllWindows() - -if __name__ == '__main__': - - main() -- cgit v1.1