aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2023-06-27 14:56:56 +0200
committerThéo de la Hogue2023-06-27 14:56:56 +0200
commitdf0af798124b14ccc7188d8d7bbd3c9fd6f49ff0 (patch)
tree2515c8e1d333605bc476ea3acb045ce5f50c20bd /src
parent4b4b67a653bd1ae9bddac806e23dbbfc0a1bfabd (diff)
downloadargaze-df0af798124b14ccc7188d8d7bbd3c9fd6f49ff0.zip
argaze-df0af798124b14ccc7188d8d7bbd3c9fd6f49ff0.tar.gz
argaze-df0af798124b14ccc7188d8d7bbd3c9fd6f49ff0.tar.bz2
argaze-df0af798124b14ccc7188d8d7bbd3c9fd6f49ff0.tar.xz
Using new ArEnvironment/ArScreen pipeline.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/demo_ar_features_run.py72
-rw-r--r--src/argaze/utils/demo_environment/setup.json9
2 files changed, 25 insertions, 56 deletions
diff --git a/src/argaze/utils/demo_ar_features_run.py b/src/argaze/utils/demo_ar_features_run.py
index b241b26..5c96abc 100644
--- a/src/argaze/utils/demo_ar_features_run.py
+++ b/src/argaze/utils/demo_ar_features_run.py
@@ -9,8 +9,9 @@ __license__ = "BSD"
import argparse
import os
+import time
-from argaze import ArFeatures
+from argaze import ArFeatures, GazeFeatures
import cv2
import numpy
@@ -34,25 +35,17 @@ def main():
# Create a window to display AR environment
cv2.namedWindow(demo_environment.name, cv2.WINDOW_AUTOSIZE)
- # Access to main AR scene
- demo_scene = demo_environment.scenes["AR Scene Demo"]
+ # Init timestamp
+ start_time = time.time()
- # Access to main AR screen
- demo_screen = demo_scene.screens["GrayRectangle"]
-
- # Create a window to display AR screen
- cv2.namedWindow(demo_screen.name, cv2.WINDOW_AUTOSIZE)
-
- # Init mouse interaction
- pointer = (0, 0)
-
- # Update pointer position
+ # Fake gaze position with mouse pointer
def on_mouse_event(event, x, y, flags, param):
- nonlocal pointer
+ # Edit millisecond timestamp
+ ts = (time.time() - start_time) * 1e3
- # Update pointer
- pointer = (x, y)
+ # Project gaze posiiton into environment
+ demo_environment.look(ts, GazeFeatures.GazePosition((x, y)))
# Attach mouse callback to window
cv2.setMouseCallback(demo_environment.name, on_mouse_event)
@@ -69,58 +62,27 @@ def main():
# Read video image
success, video_image = video_capture.read()
- # Reset screen image
- demo_screen.init()
- demo_screen.draw_aoi()
-
if success:
- # Detect markers
- demo_environment.aruco_detector.detect_markers(video_image)
-
- # Draw detected markers
- demo_environment.aruco_detector.draw_detected_markers(video_image)
-
- # Try to project scene
+ # Try to detect and project environment
try:
- try:
- # Try to build AOI scene from detected ArUco marker corners
- aoi_scene_projection = demo_scene.build_aruco_aoi_scene(demo_environment.aruco_detector.detected_markers)
+ demo_environment.detect_and_project(video_image)
- except:
+ # Draw environment
+ cv2.imshow(demo_environment.name, demo_environment.image)
- # Estimate scene markers poses
- demo_environment.aruco_detector.estimate_markers_pose(demo_scene.aruco_scene.identifiers)
+ # Draw each screens
+ for scene_name, screen_name, screen_image in demo_environment.screens_image():
- # Estimate scene pose from detected scene markers
- tvec, rmat, _, _ = demo_scene.estimate_pose(demo_environment.aruco_detector.detected_markers)
+ cv2.imshow(f'{scene_name}:{screen_name}', screen_image)
- # Project AOI scene into video image according estimated pose
- aoi_scene_projection = demo_scene.project(tvec, rmat)
-
- # Draw AOI scene projection
- aoi_scene_projection.draw(video_image, color=(255, 255, 255))
-
- # Project pointer into screen image
- if aoi_scene_projection[demo_screen.name].contains_point(pointer):
-
- inner_x, inner_y = aoi_scene_projection[demo_screen.name].clockwise().inner_axis(pointer)
-
- cv2.circle(demo_screen.image, (int(inner_x * demo_screen.size[0]), int(inner_y * demo_screen.size[1])), 5, (255, 255, 255), -1)
-
- # Catch exceptions raised by estimate_pose and project methods
+ # Catch errors
except (ArFeatures.PoseEstimationFailed, ArFeatures.SceneProjectionFailed) as e:
cv2.rectangle(video_image, (0, 50), (700, 100), (127, 127, 127), -1)
cv2.putText(video_image, f'Error: {e}', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- # Draw video image
- cv2.imshow(demo_environment.name, video_image)
-
- # Draw screen image
- cv2.imshow(demo_screen.name, demo_screen.image)
-
# Stop by pressing 'Esc' key
if cv2.waitKey(10) == 27:
break
diff --git a/src/argaze/utils/demo_environment/setup.json b/src/argaze/utils/demo_environment/setup.json
index 582c231..f4f1fbe 100644
--- a/src/argaze/utils/demo_environment/setup.json
+++ b/src/argaze/utils/demo_environment/setup.json
@@ -18,7 +18,14 @@
"aoi_scene": "aoi_scene.obj",
"screens": {
"GrayRectangle": {
- "size": [320, 240]
+ "size": [320, 240],
+ "gaze_movement_identifier": {
+ "type": "DispersionThresholdIdentification",
+ "parameters": {
+ "deviation_max_threshold": 10,
+ "duration_min_threshold": 200
+ }
+ }
}
},
"aruco_axis": {