diff options
-rw-r--r-- | src/argaze/utils/demo_ar_features_run.py | 38 |
1 files changed, 21 insertions, 17 deletions
diff --git a/src/argaze/utils/demo_ar_features_run.py b/src/argaze/utils/demo_ar_features_run.py index 0fa275b..ec42338 100644 --- a/src/argaze/utils/demo_ar_features_run.py +++ b/src/argaze/utils/demo_ar_features_run.py @@ -10,7 +10,7 @@ __license__ = "BSD" import argparse import os -from argaze import ArFeatures, GazeFeatures +from argaze import ArFeatures import cv2 import numpy @@ -31,17 +31,16 @@ def main(): demo_environment_filepath = os.path.join(current_directory, 'demo_environment/setup.json') demo_environment = ArFeatures.ArEnvironment.from_json(demo_environment_filepath) - print('ArEnvironment:\n', demo_environment) + # Create a window to display AR environment + cv2.namedWindow(demo_environment.name, cv2.WINDOW_AUTOSIZE) # Access to main AR scene demo_scene = demo_environment.scenes["AR Scene Demo"] - # Create a window to display AR environment - cv2.namedWindow(demo_environment.name, cv2.WINDOW_AUTOSIZE) - - # Prepare screen AOI projection + # Reframe AR scene to a scene bounded by screen AOI screen_name = "Screen" screen_size = (320, 240) + screen_scene = demo_scene.orthogonal_projection.reframe(screen_name, screen_size) # Create a window to display screen projection cv2.namedWindow(screen_name, cv2.WINDOW_AUTOSIZE) @@ -69,12 +68,13 @@ def main(): # Capture frames while video_capture.isOpened(): + # Read video frame success, video_frame = video_capture.read() - if success: + # Create screen frame + screen_frame = numpy.zeros((240, 320, 3)).astype(numpy.uint8) - # Create screen frame - screen_frame = numpy.zeros((240, 320, 3)).astype(numpy.uint8) + if success: # Detect markers demo_environment.aruco_detector.detect_markers(video_frame) @@ -103,12 +103,13 @@ def main(): # Draw AOI scene projection aoi_scene_projection.draw(video_frame, color=(255, 255, 255)) - # Reframe AOI scene to screen AOI - screen_projection = aoi_scene_projection.reframe("Screen", screen_size) + # Project pointer into screen + if aoi_scene_projection[screen_name].contains_point(pointer): - # Draw screen projection - screen_projection.draw(screen_frame, color=(255, 255, 255)) + inner_x, inner_y = aoi_scene_projection[screen_name].clockwise().inner_axis(pointer) + cv2.circle(screen_frame, (int(inner_x * screen_size[0]), int(inner_y * screen_size[1])), 5, (255, 255, 255), -1) + # Catch exceptions raised by estimate_pose and project methods except (ArFeatures.PoseEstimationFailed, ArFeatures.SceneProjectionFailed) as e: @@ -118,14 +119,17 @@ def main(): # Draw video frame cv2.imshow(demo_environment.name, video_frame) - # Draw screen frame - cv2.imshow(screen_name, screen_frame) + # Draw screen scene + screen_scene.draw(screen_frame, color=(255, 255, 255)) + + # Draw screen frame + cv2.imshow(screen_name, screen_frame) - # Stop calibration by pressing 'Esc' key + # Stop by pressing 'Esc' key if cv2.waitKey(10) == 27: break - # Stop calibration on 'ctrl+C' interruption + # Stop on 'ctrl+C' interruption except KeyboardInterrupt: pass |