aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/demo_environment_run.py115
1 files changed, 0 insertions, 115 deletions
diff --git a/src/argaze/utils/demo_environment_run.py b/src/argaze/utils/demo_environment_run.py
deleted file mode 100644
index efd795b..0000000
--- a/src/argaze/utils/demo_environment_run.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python
-
-import argparse
-
-from argaze import ArFeatures, GazeFeatures
-
-import cv2
-
-def main():
- """
- Load AR environment from .json file, detect ArUco markers into camera device frames and estimate environment pose.
- """
-
- # Manage arguments
- parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
- parser.add_argument('environment', metavar='ENVIRONMENT', type=str, help='ar environment filepath')
-
- parser.add_argument('-d', '--device', metavar='DEVICE', type=int, default=0, help='video capture device id')
- args = parser.parse_args()
-
- # Load AR enviroment
- demo_environment = ArFeatures.ArEnvironment.from_json(args.environment)
-
- print('ArEnvironment:\n', demo_environment)
-
- # Access to main AR scene
- demo_scene = demo_environment.scenes["AR Scene Demo"]
-
- # Create a window to display AR environment
- cv2.namedWindow(demo_environment.name, cv2.WINDOW_AUTOSIZE)
-
- # Init mouse interaction
- pointer = (0, 0)
-
- # Update pointer position
- def on_mouse_event(event, x, y, flags, param):
-
- nonlocal pointer
-
- # Update pointer
- pointer = (x, y)
-
- # Attach mouse callback to window
- cv2.setMouseCallback(demo_environment.name, on_mouse_event)
-
- # Enable camera video capture
- video_capture = cv2.VideoCapture(args.device)
-
- # Waiting for 'ctrl+C' interruption
- try:
-
- # Capture frames
- while video_capture.isOpened():
-
- success, video_frame = video_capture.read()
-
- if success:
-
- # Draw pointer as gaze position
- gaze_position = GazeFeatures.GazePosition(pointer, precision=10)
- gaze_position.draw(video_frame)
-
- # Detect markers
- demo_environment.aruco_detector.detect_markers(video_frame)
-
- # Draw detected markers
- demo_environment.aruco_detector.draw_detected_markers(video_frame)
-
- # Try to project scene
- try:
-
- try:
- # Try to build AOI scene from detected ArUco marker corners
- aoi_scene_projection = demo_scene.build_aruco_aoi_scene(demo_environment.aruco_detector.detected_markers)
-
- except:
-
- # Estimate scene markers poses
- demo_environment.aruco_detector.estimate_markers_pose(demo_scene.aruco_scene.identifiers)
-
- # Estimate scene pose from detected scene markers
- tvec, rmat, _, _ = demo_scene.estimate_pose(demo_environment.aruco_detector.detected_markers)
-
- # Project AOI scene into frame according estimated pose
- aoi_scene_projection = demo_scene.project(tvec, rmat)
-
- # Draw AOI scene projection with gaze
- aoi_scene_projection.draw_circlecast(video_frame, gaze_position)
-
- # Catch exceptions raised by estimate_pose and project methods
- except (ArFeatures.PoseEstimationFailed, ArFeatures.SceneProjectionFailed) as e:
-
- cv2.rectangle(video_frame, (0, 50), (700, 100), (127, 127, 127), -1)
- cv2.putText(video_frame, f'Error: {e}', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- # Draw frame
- cv2.imshow(demo_environment.name, video_frame)
-
- # Stop calibration by pressing 'Esc' key
- if cv2.waitKey(10) == 27:
- break
-
- # Stop calibration on 'ctrl+C' interruption
- except KeyboardInterrupt:
- pass
-
- # Close camera video capture
- video_capture.release()
-
- # Stop frame display
- cv2.destroyAllWindows()
-
-if __name__ == '__main__':
-
- main()