aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2023-06-28 15:48:39 +0200
committerThéo de la Hogue2023-06-28 15:48:39 +0200
commitefbca53f612327642a3f4c2bf72ebfdd4da5d5a6 (patch)
tree9a809c57fecdd4357d3695a8f4cb289423d41b97
parent9078d44f26c0234a9b2cfdf1d5e0bb280ac6e75b (diff)
downloadargaze-efbca53f612327642a3f4c2bf72ebfdd4da5d5a6.zip
argaze-efbca53f612327642a3f4c2bf72ebfdd4da5d5a6.tar.gz
argaze-efbca53f612327642a3f4c2bf72ebfdd4da5d5a6.tar.bz2
argaze-efbca53f612327642a3f4c2bf72ebfdd4da5d5a6.tar.xz
Drawing screen.
-rw-r--r--src/argaze/utils/demo_ar_features_run.py36
1 files changed, 25 insertions, 11 deletions
diff --git a/src/argaze/utils/demo_ar_features_run.py b/src/argaze/utils/demo_ar_features_run.py
index 2ce6731..39c69e7 100644
--- a/src/argaze/utils/demo_ar_features_run.py
+++ b/src/argaze/utils/demo_ar_features_run.py
@@ -42,10 +42,10 @@ def main():
def on_mouse_event(event, x, y, flags, param):
# Edit millisecond timestamp
- ts = (time.time() - start_time) * 1e3
+ timestamp = int((time.time() - start_time) * 1e3)
- # Project gaze posiiton into environment
- ar_environment.look(ts, GazeFeatures.GazePosition((x, y)))
+ # Project gaze position into environment
+ ar_environment.look(timestamp, GazeFeatures.GazePosition((x, y)))
# Attach mouse callback to window
cv2.setMouseCallback(ar_environment.name, on_mouse_event)
@@ -69,20 +69,34 @@ def main():
ar_environment.detect_and_project(video_image)
- # Draw environment
- cv2.imshow(ar_environment.name, ar_environment.image)
-
- # Draw each screens
- for scene_name, screen_name, screen_image in ar_environment.screens_image():
-
- cv2.imshow(f'{scene_name}:{screen_name}', screen_image)
-
# Catch errors
except (ArFeatures.PoseEstimationFailed, ArFeatures.SceneProjectionFailed) as e:
cv2.rectangle(video_image, (0, 50), (700, 100), (127, 127, 127), -1)
cv2.putText(video_image, f'Error: {e}', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ # Draw environment
+ cv2.imshow(ar_environment.name, ar_environment.image)
+
+ # Draw each screens
+ for scene_name, screen_name, screen in ar_environment.screens:
+
+ image = screen.background.copy()
+
+ screen.aoi_2d_scene.draw(image, color=(255, 255, 255))
+ screen.current_gaze_position.draw(image, color=(255, 255, 255))
+
+ screen.current_gaze_movement.draw(image, color=(0, 255, 255))
+ screen.current_gaze_movement.draw_positions(image)
+
+ # Check screen fixation
+ if GazeFeatures.is_fixation(screen.current_gaze_movement):
+
+ # Draw looked AOI
+ screen.aoi_2d_scene.draw_circlecast(image, screen.current_gaze_movement.focus, screen.current_gaze_movement.deviation_max, base_color=(0, 0, 0), matching_color=(255, 255, 255))
+
+ cv2.imshow(f'{scene_name}:{screen_name}', image)
+
# Stop by pressing 'Esc' key
if cv2.waitKey(10) == 27:
break