1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
|
#!/usr/bin/env python
import argparse
import os
import time
from argaze import ArFeatures
import cv2
def main():
"""
Load AR environment from .json file, detect ArUco markers into camera device frames and estimate environment pose.
"""
# Manage arguments
parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
parser.add_argument('environment', metavar='ENVIRONMENT', type=str, help='ar environment filepath')
parser.add_argument('-d', '--device', metavar='DEVICE', type=int, default=0, help='video capture device id')
args = parser.parse_args()
# Load AR enviroment
demo_environment = ArFeatures.ArEnvironment.from_json(args.environment)
print('ArEnvironment:\n', demo_environment)
# Access to main AR scene
demo_scene = demo_environment.scenes["AR Scene Demo"]
# Enable camera video capture
video_capture = cv2.VideoCapture(args.device)
# Capture loop
try:
# Capture frames with a full displayed board inside
while video_capture.isOpened():
success, video_frame = video_capture.read()
if success:
# Detect markers
demo_environment.aruco_detector.detect_markers(video_frame)
# Draw detected markers
demo_environment.aruco_detector.draw_detected_markers(video_frame)
# Process detected markers for Matsuri scene
try:
try:
# Try to build AOI scene from detected ArUco marker corners
scene_projection = demo_scene.build_aruco_aoi_scene(demo_environment.aruco_detector.detected_markers)
except:
# Estimate scene markers poses
demo_environment.aruco_detector.estimate_markers_pose(demo_scene.aruco_scene.identifiers)
# Estimate scene pose from detected scene markers
tvec, rmat, _ = demo_scene.estimate_pose(demo_environment.aruco_detector.detected_markers)
# Project AOI scene into frame according estimated pose
scene_projection = demo_scene.project(tvec, rmat)
# Draw AOI
scene_projection.draw(video_frame, (0, 0), color=(0, 255, 255))
# Catch exceptions raised by estimate_pose and project methods
except (ArFeatures.PoseEstimationFailed, ArFeatures.SceneProjectionFailed) as e:
cv2.rectangle(video_frame, (0, 50), (700, 100), (127, 127, 127), -1)
cv2.putText(video_frame, f'Error: {e}', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
# Draw frame
cv2.imshow(demo_environment.name, video_frame)
# Stop calibration by pressing 'Esc' key
if cv2.waitKey(1) == 27:
break
# Stop calibration on 'ctrl+C' interruption
except KeyboardInterrupt:
pass
# Stop frame display
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
|