aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/demo_aruco_markers_run.py
blob: f5bc7568c457df909411920574ab90fd9f54db75 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
#!/usr/bin/env python

"""Augmented Reality pipeline demo script.

This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
"""

__author__ = "Théo de la Hogue"
__credits__ = []
__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
__license__ = "GPLv3"

import argparse
import contextlib
import os
import time

from argaze import ArFeatures, GazeFeatures
from argaze.ArUcoMarkers import ArUcoCamera
from argaze.utils import UtilsFeatures

import cv2

current_directory = os.path.dirname(os.path.abspath(__file__))

# Manage arguments
parser = argparse.ArgumentParser(description=__doc__.split('-')[0])
parser.add_argument('configuration', metavar='CONFIGURATION', type=str, help='configuration filepath')
parser.add_argument('-s', '--source', metavar='SOURCE', type=str, default='0', help='video capture source (a number to select camera device or a filepath to load a movie)')
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='enable verbose mode to print information in console')
args = parser.parse_args()

def main():

    # Load ArUcoCamera
    with ArUcoCamera.ArUcoCamera.from_json(args.configuration) as aruco_camera:

        if args.verbose:

            print(aruco_camera)

        # Create a window to display ArUcoCamera
        cv2.namedWindow(aruco_camera.name, cv2.WINDOW_AUTOSIZE)

        # Init timestamp
        start_time = time.time()

        # Prepare gaze analysis assessment
        call_chrono = UtilsFeatures.TimeProbe()
        call_chrono.start()

        gaze_positions_frequency = 0
        gaze_analysis_time = 0

        # Fake gaze position with mouse pointer
        def on_mouse_event(event, x, y, flags, param):

            nonlocal gaze_positions_frequency
            nonlocal gaze_analysis_time
            
            # Assess gaze analysis
            lap_time, nb_laps, elapsed_time = call_chrono.lap()

            if elapsed_time > 1e3:

                gaze_positions_frequency = nb_laps
                call_chrono.restart()

            # Edit millisecond timestamp
            timestamp = int((time.time() - start_time) * 1e3)

            #try:

            # Project gaze position into camera
            aruco_camera.look(GazeFeatures.GazePosition((x, y), timestamp=timestamp))

            # Assess gaze analysis
            gaze_analysis_time = aruco_camera.execution_times['look']

            #except Exception as e:

            #    print(e)
            #    gaze_analysis_time = 0

        # Attach mouse callback to window
        cv2.setMouseCallback(aruco_camera.name, on_mouse_event)

        # Prepare video fps assessment
        video_fps = 0
        video_chrono = UtilsFeatures.TimeProbe()
        video_chrono.start()

        # Prepare visualisation time assessment
        visualisation_time = 0

        # Enable camera video capture into separate thread
        video_capture = cv2.VideoCapture(int(args.source) if args.source.isdecimal() else args.source)

        # Waiting for 'ctrl+C' interruption
        with contextlib.suppress(KeyboardInterrupt):

            # Assess capture time
            capture_start = time.time()

            # Capture images
            while video_capture.isOpened():

                # Read video image
                success, video_image = video_capture.read()

                # Assess capture time
                capture_time = int((time.time() - capture_start) * 1e3)

                if success:

                    # Assess video fps
                    lap_time, nb_laps, elapsed_time = video_chrono.lap()

                    if elapsed_time > 1e3:

                        video_fps = nb_laps
                        video_chrono.restart()

                    #try:

                    # Detect and project AR features
                    aruco_camera.watch(video_image, timestamp=capture_time)

                    # Detection suceeded
                    exception = None

                    # Write errors
                    #except Exception as e:

                    #    exception = e

                    # Assess visualisation time
                    visualisation_start = time.time()

                    # Get ArUcoCamera frame image
                    aruco_camera_image = aruco_camera.image()

                    # Get execution times
                    detection_time = aruco_camera.aruco_detector.execution_times['detect_markers']
                    projection_time = aruco_camera.execution_times['watch'] - detection_time

                    # Write time info
                    cv2.rectangle(aruco_camera_image, (0, 0), (aruco_camera.size[0], 100), (63, 63, 63), -1)
                    cv2.putText(aruco_camera_image, f'{video_fps} FPS | Capture {capture_time}ms | Detection {int(detection_time)}ms | Projection {int(projection_time)}ms | Visualisation {visualisation_time}ms', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
                    cv2.putText(aruco_camera_image, f'{gaze_positions_frequency} gaze positions/s | Gaze analysis {gaze_analysis_time:.2f}ms', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)

                    # Handle exceptions
                    if exception is not None:

                        cv2.rectangle(aruco_camera_image, (0, 100), (aruco_camera.size[0], 80), (127, 127, 127), -1)
                        cv2.putText(aruco_camera_image, f'error: {exception}', (20, 140), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)

                    # Write hint
                    cv2.putText(aruco_camera_image, 'Mouve mouse pointer over gray rectangle area', (20, aruco_camera.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)

                    # Display ArUcoCamera frame image
                    cv2.imshow(aruco_camera.name, aruco_camera_image)

                    # Draw and display each scene frames
                    for scene_frame in aruco_camera.scene_frames():

                        # Display scene frame
                        cv2.imshow(f'{scene_frame.parent.name}:{scene_frame.name}', scene_frame.image())

                else:

                    # Assess visualisation time
                    visualisation_start = time.time()

                # Stop by pressing 'Esc' key
                # NOTE: on MacOS, cv2.waitKey(1) waits ~40ms
                if cv2.waitKey(1) == 27:

                    # Close camera video capture
                    video_capture.release()

                # Assess visualisation time
                visualisation_time = int((time.time() - visualisation_start) * 1e3)

    # Stop image display
    cv2.destroyAllWindows()

if __name__ == '__main__':

    main()