aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/demo_gaze_analysis_run.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/argaze/utils/demo_gaze_analysis_run.py')
-rw-r--r--src/argaze/utils/demo_gaze_analysis_run.py274
1 files changed, 0 insertions, 274 deletions
diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py
deleted file mode 100644
index 16644ce..0000000
--- a/src/argaze/utils/demo_gaze_analysis_run.py
+++ /dev/null
@@ -1,274 +0,0 @@
-#!/usr/bin/env python
-
-"""Gaze analysis pipeline demo script.
-
-This program is free software: you can redistribute it and/or modify it under
-the terms of the GNU General Public License as published by the Free Software
-Foundation, either version 3 of the License, or (at your option) any later
-version.
-This program is distributed in the hope that it will be useful, but WITHOUT
-ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
-You should have received a copy of the GNU General Public License along with
-this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Théo de la Hogue"
-__credits__ = []
-__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
-__license__ = "GPLv3"
-
-import argparse
-import logging
-import contextlib
-import os
-import time
-
-from argaze import ArFeatures, GazeFeatures
-from argaze.GazeAnalysis import *
-from argaze.utils import UtilsFeatures
-
-import cv2
-
-current_directory = os.path.dirname(os.path.abspath(__file__))
-
-# Manage arguments
-parser = argparse.ArgumentParser(description=__doc__.split('-')[0])
-parser.add_argument('configuration', metavar='CONFIGURATION', type=str, help='configuration filepath')
-parser.add_argument('-v', '--verbose', action='store_true', default=False, help='enable verbose mode to print information in console')
-args = parser.parse_args()
-
-# Manage logging
-logging.basicConfig(format = '%(levelname)s: %(message)s', level = logging.DEBUG if args.verbose else logging.INFO)
-
-def main():
-
- # Load ArFrame
- with ArFeatures.ArFrame.from_json(args.configuration) as ar_frame:
-
- if args.verbose:
-
- print(ar_frame)
-
- # Create a window to display ArCamera
- cv2.namedWindow(ar_frame.name, cv2.WINDOW_AUTOSIZE)
-
- # Heatmap buffer display option
- enable_heatmap_buffer = False
-
- # Init timestamp
- start_time = time.time()
-
- # Update pointer position
- def on_mouse_event(event, x, y, flags, param):
-
- #try:
-
- # Project gaze position into frame with millisecond timestamp
- ar_frame.look(GazeFeatures.GazePosition((x, y), timestamp=int((time.time() - start_time) * 1e3)))
-
- # Catch pipeline exception
- #except Exception as e:
-
- # print('Gaze projection error:', e)
-
- # Attach mouse callback to window
- cv2.setMouseCallback(ar_frame.name, on_mouse_event)
-
- # Waiting for 'ctrl+C' interruption
- with contextlib.suppress(KeyboardInterrupt):
-
- # Draw frame and mouse position analysis
- while True:
-
- # Get frame image
- frame_image = ar_frame.image()
-
- # Write heatmap buffer manual
- buffer_on_off = 'on' if enable_heatmap_buffer else 'off'
- buffer_display_disable = 'disable' if enable_heatmap_buffer else 'enable'
- cv2.putText(frame_image, f'Heatmap buffer: {buffer_on_off} (Press \'b\' key to {buffer_display_disable})', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_heatmap_buffer else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Write last 5 steps of aoi scan path
- path = ''
- for step in ar_frame.layers["demo_layer"].aoi_scan_path[-5:]:
-
- path += f'> {step.aoi} '
-
- path += f'> {ar_frame.layers["demo_layer"].aoi_scan_path.current_aoi}'
-
- cv2.putText(frame_image, path, (20, ar_frame.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
-
- # Display frame analysis
- analysis = ar_frame.analysis()
-
- # Display scan path K Coefficient analysis if loaded
- try:
-
- kc_analysis = analysis[KCoefficient.ScanPathAnalyzer]
-
- # Write raw Kc analysis
- if kc_analysis.K < 0.:
-
- cv2.putText(frame_image, f'K coefficient: Ambient attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- elif kc_analysis.K > 0.:
-
- cv2.putText(frame_image, f'K coefficient: Focal attention', (20, ar_frame.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display Nearest Neighbor index analysis if loaded
- try:
-
- nni_analysis = analysis[NearestNeighborIndex.ScanPathAnalyzer]
-
- cv2.putText(frame_image, f'Nearest neighbor index: {nni_analysis.nearest_neighbor_index:.3f}', (20, ar_frame.size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display Explore/Exploit ratio analysis if loaded
- try:
-
- xxr_analyser = analysis[ExploreExploitRatio.ScanPathAnalyzer]
-
- cv2.putText(frame_image, f'Explore/Exploit ratio: {xxr_analyser.explore_exploit_ratio:.3f}', (20, ar_frame.size[1]-360), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display demo_layer analysis
- analysis = ar_frame.layers["demo_layer"].analysis()
-
- # Display Transition matrix analysis if loaded
- try:
-
- transition_matrix_analysis = analysis[TransitionMatrix.AOIScanPathAnalyzer]
-
- cv2.putText(frame_image, f'Transition matrix density: {transition_matrix_analysis.transition_matrix_density:.2f}', (20, ar_frame.size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- # Iterate over indexes (departures)
- for from_aoi, row in transition_matrix_analysis.transition_matrix_probabilities.iterrows():
-
- # Iterate over columns (destinations)
- for to_aoi, probability in row.items():
-
- if from_aoi != GazeFeatures.OutsideAOI and to_aoi != GazeFeatures.OutsideAOI:
-
- if from_aoi != to_aoi and probability > 0.0:
-
- from_center = ar_frame.layers["demo_layer"].aoi_scene[from_aoi].center.astype(int)
- to_center = ar_frame.layers["demo_layer"].aoi_scene[to_aoi].center.astype(int)
- start_line = (0.5 * from_center + 0.5 * to_center).astype(int)
-
- color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55]
-
- cv2.line(frame_image, start_line, to_center, color, int(probability*10) + 2)
- cv2.line(frame_image, from_center, to_center, [55, 55, 55], 2)
-
- except KeyError:
- pass
-
- # Display aoi scan path basic metrics analysis if loaded
- try:
-
- basic_analysis = analysis[Basic.AOIScanPathAnalyzer]
-
- # Write basic analysis
- cv2.putText(frame_image, f'Step number: {basic_analysis.steps_number}', (20, ar_frame.size[1]-440), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- cv2.putText(frame_image, f'Step fixation duration average: {int(basic_analysis.step_fixation_durations_average)} ms', (20, ar_frame.size[1]-400), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display aoi scan path K-modified coefficient analysis if loaded
- try:
-
- aoi_kc_analysis = analysis[KCoefficient.AOIScanPathAnalyzer]
-
- # Write aoi Kc analysis
- if aoi_kc_analysis.K < 0.:
-
- cv2.putText(frame_image, f'K-modified coefficient: Ambient attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- elif aoi_kc_analysis.K > 0.:
-
- cv2.putText(frame_image, f'K-modified coefficient: Focal attention', (20, ar_frame.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display Lempel-Ziv complexity analysis if loaded
- try:
-
- lzc_analysis = analysis[LempelZivComplexity.AOIScanPathAnalyzer]
-
- cv2.putText(frame_image, f'Lempel-Ziv complexity: {lzc_analysis.lempel_ziv_complexity}', (20, ar_frame.size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display N-Gram analysis if loaded
- try:
-
- ngram_analysis = analysis[NGram.AOIScanPathAnalyzer]
-
- # Display only 3-gram analysis
- start = ar_frame.size[1] - ((len(ngram_analysis.ngrams_count[3]) + 1) * 40)
- cv2.putText(frame_image, f'{ngram_analysis.n_max}-Gram:', (ar_frame.size[0]-700, start-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- for i, (ngram, count) in enumerate(ngram_analysis.ngrams_count[3].items()):
-
- ngram_string = f'{ngram[0]}'
- for g in range(1, 3):
- ngram_string += f'>{ngram[g]}'
-
- cv2.putText(frame_image, f'{ngram_string}: {count}', (ar_frame.size[0]-700, start+(i*40)), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display Entropy analysis if loaded
- try:
-
- entropy_analysis = analysis[Entropy.AOIScanPathAnalyzer]
-
- cv2.putText(frame_image, f'Stationary entropy: {entropy_analysis.stationary_entropy:.3f},', (20, ar_frame.size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- cv2.putText(frame_image, f'Transition entropy: {entropy_analysis.transition_entropy:.3f},', (20, ar_frame.size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- except KeyError:
- pass
-
- # Display frame image
- cv2.imshow(ar_frame.name, frame_image)
-
- key_pressed = cv2.waitKey(10)
-
- #if key_pressed != -1:
- # print(key_pressed)
-
- # Reload environment with 'h' key
- if key_pressed == 114:
-
- ar_frame = ArFeatures.ArFrame.from_json(args.frame)
-
- # Enable heatmap buffer with 'b' key
- if key_pressed == 98:
-
- enable_heatmap_buffer = not enable_heatmap_buffer
-
- ar_frame.heatmap.buffer = 10 if enable_heatmap_buffer else 0
- ar_frame.heatmap.clear()
-
- # Stop by pressing 'Esc' key
- if key_pressed == 27:
- break
-
- # Stop frame image display
- cv2.destroyAllWindows()
-
-if __name__ == '__main__':
-
- main()