aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2023-05-09 12:08:41 +0200
committerThéo de la Hogue2023-05-09 12:08:41 +0200
commit3aa72812a5f45dce412f2f355dcac430e440564b (patch)
tree1c498b413f229ad57160e78183d3e4db009bc905 /src
parentb02136fa43289e0b23583d354dc8450761829b58 (diff)
downloadargaze-3aa72812a5f45dce412f2f355dcac430e440564b.zip
argaze-3aa72812a5f45dce412f2f355dcac430e440564b.tar.gz
argaze-3aa72812a5f45dce412f2f355dcac430e440564b.tar.bz2
argaze-3aa72812a5f45dce412f2f355dcac430e440564b.tar.xz
Adding a new utils script
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/README.md13
-rw-r--r--src/argaze/utils/demo_gaze_features_run.py171
2 files changed, 181 insertions, 3 deletions
diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md
index c1b09a5..e73b75f 100644
--- a/src/argaze/utils/README.md
+++ b/src/argaze/utils/README.md
@@ -36,12 +36,19 @@ python ./src/argaze/utils/camera_calibrate.py 7 5 5 3 DICT_APRILTAG_16h5 -d DEVI
Load AR environment from **setup.json** file, detect ArUco markers into camera device (-d DEVICE) frames and estimate envirnoment pose.
```
-python ./src/argaze/utils/demo_environment_run.py ./src/argaze/utils/demo_environment/setup.json -d DEVICE
+python ./src/argaze/utils/demo_ar_features_run.py ./src/argaze/utils/demo_environment/setup.json -d DEVICE
```
.. note::
This demonstration assumes that camera calibration step is done and a **calibration.json** has been exported into *./src/argaze/utils/demo_environment/* folder.
-
.. note::
- Use **A3_demo.pdf** file located in *./src/argaze/utils/demo_environment/* folder ready to be printed on A3 paper sheet. \ No newline at end of file
+ Use **A3_demo.pdf** file located in *./src/argaze/utils/demo_environment/* folder ready to be printed on A3 paper sheet.
+
+# Gaze features demonstration
+
+Simulate gaze position using mouse pointer to illustrate gaze features.
+
+```
+python ./src/argaze/utils/gaze_analysis.py ./src/argaze/utils/demo_environment/setup.json
+``` \ No newline at end of file
diff --git a/src/argaze/utils/demo_gaze_features_run.py b/src/argaze/utils/demo_gaze_features_run.py
new file mode 100644
index 0000000..0efb232
--- /dev/null
+++ b/src/argaze/utils/demo_gaze_features_run.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+
+import argparse
+import time
+import threading
+
+from argaze import ArFeatures, GazeFeatures
+from argaze.GazeAnalysis import *
+
+import cv2
+import numpy
+
+def main():
+ """
+ Load AR environment from .json file to project AOI scene on screen and use mouse pointer to simulate gaze positions.
+ """
+
+ # Manage arguments
+ parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
+ parser.add_argument('environment', metavar='ENVIRONMENT', type=str, help='ar environment filepath')
+
+ parser.add_argument('-dev', '--deviation_max_threshold', metavar='DEVIATION_MAX_THRESHOLD', type=int, default=50, help='maximal distance for fixation identification in pixel')
+ parser.add_argument('-dmin', '--duration_min_threshold', metavar='DURATION_MIN_THRESHOLD', type=int, default=200, help='minimal duration for fixation identification in millisecond')
+ parser.add_argument('-s', '--window-size', metavar='WINDOW_SIZE', type=tuple, default=(1920, 1080), help='size of window in pixel')
+ args = parser.parse_args()
+
+ # Load AR enviroment
+ demo_environment = ArFeatures.ArEnvironment.from_json(args.environment)
+
+ # Access to main AR scene
+ demo_scene = demo_environment.scenes["AR Scene Demo"]
+
+ # Project AOI scene onto Full HD screen
+ aoi_scene_projection = demo_scene.orthogonal_projection * args.window_size
+
+ # Create a window to display AR environment
+ window_name = "AOI Scene"
+ cv2.namedWindow(window_name, cv2.WINDOW_AUTOSIZE)
+
+ # Init gaze movement identification
+ gaze_position = GazeFeatures.GazePosition()
+ gaze_movement_identifier = DispersionBasedGazeMovementIdentifier.GazeMovementIdentifier(args.deviation_max_threshold, args.duration_min_threshold)
+ gaze_movement_lock = threading.Lock()
+
+ # Init timestamp
+ start_ts = time.time()
+
+ # Update pointer position
+ def on_mouse_event(event, x, y, flags, param):
+
+ nonlocal gaze_position
+
+ # Edit millisecond timestamp
+ data_ts = int((time.time() - start_ts) * 1e3)
+
+ # Update gaze position with mouse pointer position
+ gaze_position = GazeFeatures.GazePosition((x, y))
+
+ # Don't identify gaze movement while former identification is exploited in video loop
+ if gaze_movement_lock.locked():
+ return
+
+ # Lock gaze movement exploitation
+ gaze_movement_lock.acquire()
+
+ # Identify gaze movement
+ gaze_movement = gaze_movement_identifier.identify(data_ts, gaze_position)
+
+ if isinstance(gaze_movement, DispersionBasedGazeMovementIdentifier.Fixation):
+
+ # Does the fixation match an AOI?
+ for name, aoi in aoi_scene_projection.items():
+
+ _, _, circle_ratio = aoi.circle_intersection(gaze_movement.centroid, args.deviation_max_threshold)
+
+ if circle_ratio > 0.25:
+
+ if name != 'Screen':
+
+ print(f'{data_ts}: gaze step on {name} aoi')
+ #gaze_step = GazeStep(gaze_movement, name)
+
+ # Unlock gaze movement exploitation
+ gaze_movement_lock.release()
+
+ return
+
+ # Attach mouse callback to window
+ cv2.setMouseCallback(window_name, on_mouse_event)
+
+ # Waiting for 'ctrl+C' interruption
+ try:
+
+ # Analyse mouse positions
+ while True:
+
+ aoi_matrix = numpy.full((int(args.window_size[1]), int(args.window_size[0]), 3), 0, dtype=numpy.uint8)
+
+ # Lock gaze movement identification
+ gaze_movement_lock.acquire()
+
+ # Check fixation identification
+ if gaze_movement_identifier.current_fixation != None:
+
+ current_fixation = gaze_movement_identifier.current_fixation
+
+ # Draw looked AOI
+ aoi_scene_projection.draw_circlecast(aoi_matrix, current_fixation.centroid, current_fixation.deviation_max)
+
+ # Draw current fixation
+ cv2.circle(aoi_matrix, (int(current_fixation.centroid[0]), int(current_fixation.centroid[1])), int(current_fixation.deviation_max), (0, 255, 0), len(current_fixation.positions))
+
+ # Draw current fixation gaze positions
+ gaze_positions = current_fixation.positions.copy()
+ while len(gaze_positions) >= 2:
+
+ ts_start, start_gaze_position = gaze_positions.pop_first()
+ ts_next, next_gaze_position = gaze_positions.first
+
+ # Draw start gaze
+ start_gaze_position.draw(aoi_matrix, draw_precision=False)
+
+ # Draw movement from start to next
+ cv2.line(aoi_matrix, start_gaze_position, next_gaze_position, (0, 55, 55), 1)
+
+ else:
+
+ # Draw pointer as gaze position
+ gaze_position.draw(aoi_matrix, draw_precision=False)
+
+ # Draw AOI scene projection
+ aoi_scene_projection.draw(aoi_matrix, color=(0, 0, 255))
+
+ # Check saccade identification
+ if gaze_movement_identifier.current_saccade != None:
+
+ current_saccade = gaze_movement_identifier.current_saccade
+
+ # Draw current saccade gaze positions
+ gaze_positions = current_saccade.positions.copy()
+ while len(gaze_positions) >= 2:
+
+ ts_start, start_gaze_position = gaze_positions.pop_first()
+ ts_next, next_gaze_position = gaze_positions.first
+
+ # Draw start gaze
+ start_gaze_position.draw(aoi_matrix, draw_precision=False)
+
+ # Draw movement from start to next
+ cv2.line(aoi_matrix, start_gaze_position, next_gaze_position, (0, 0, 255), 1)
+
+ # Unlock gaze movement identification
+ gaze_movement_lock.release()
+
+ # Draw frame
+ cv2.imshow(window_name, aoi_matrix)
+
+ # Stop calibration by pressing 'Esc' key
+ if cv2.waitKey(10) == 27:
+ break
+
+ # Stop calibration on 'ctrl+C' interruption
+ except KeyboardInterrupt:
+ pass
+
+ # Stop frame display
+ cv2.destroyAllWindows()
+
+if __name__ == '__main__':
+
+ main()