aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/export_tobii_segment_aruco_aois.py
blob: 8e8c3fd090babb2f3757b993e89301206a09bd10 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
#!/usr/bin/env python

import argparse
import bisect
import os

from argaze import DataStructures
from argaze import GazeFeatures
from argaze.TobiiGlassesPro2 import TobiiEntities, TobiiVideo
from argaze.ArUcoMarkers import *
from argaze.AreaOfInterest import *
from argaze.utils import MiscFeatures

import numpy

import cv2 as cv

def main():
    """
    Track any ArUco marker into Tobii Glasses Pro 2 segment video file. 
    From a loaded AOI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame. 
    Then, detect if Tobii gaze point is inside any AOI.
    Export AOIs video and data.
    """

    # Manage arguments
    parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
    parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path')
    parser.add_argument('-r', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)')
    parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath')
    parser.add_argument('-a', '--aoi_scene', metavar='AOI_SCENE', type=str, default='aoi3D_scene.obj', help='obj aoi scene filepath')
    parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
    parser.add_argument('-m', '--marker_size', metavar='MARKER_SIZE', type=float, default=6, help='aruco marker size (cm)')
    parser.add_argument('-i', '--markers_id', metavar='MARKERS_ID', nargs='*', type=int, default=[], help='markers id to track')
    parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)')
    args = parser.parse_args()

    if args.segment_path != None:

        empty_marker_set = len(args.markers_id) == 0
        if empty_marker_set:
            print(f'Track any Aruco markers from the {args.dictionary} dictionary')
        else:
            print(f'Track Aruco markers {args.markers_id} from the {args.dictionary} dictionary')

        # Manage destination path
        if args.output != None:

            if not os.path.exists(os.path.dirname(args.output)):

                os.makedirs(os.path.dirname(args.output))
                print(f'{os.path.dirname(args.output)} folder created')

            aois_filepath = f'{args.output}/aois.json'
            video_filepath = f'{args.output}/fullstream+visu.mp4'

        else:

            aois_filepath = f'{args.segment_path}/aois.json'
            video_filepath = f'{args.segment_path}/fullstream+visu.mp4'

        # Load a tobii segment
        tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1000000), int(args.time_range[1] * 1000000) if args.time_range[1] != None else None)

        # Load a tobii segment video
        tobii_segment_video = tobii_segment.load_video()
        print(f'Video duration: {tobii_segment_video.get_duration()/1000000}, width: {tobii_segment_video.get_width()}, height: {tobii_segment_video.get_height()}')

        # Load a tobii segment data
        tobii_segment_data = tobii_segment.load_data()
        print(f'Data keys: {tobii_segment_data.keys()}')

        # Access to timestamped gaze position data buffer
        tobii_ts_gaze_positions = tobii_segment_data.gidx_l_gp
        print(f'{len(tobii_ts_gaze_positions)} gaze positions loaded')

        # Prepare video exportation at the same format than segment video
        output_video = TobiiVideo.TobiiVideoOutput(video_filepath, tobii_segment_video.get_stream())

        # Create aruco camera
        aruco_camera = ArUcoCamera.ArUcoCamera()
        aruco_camera.load_calibration_file(args.camera_calibration)

        # Create aruco tracker
        aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)

        # Create AOIs 3D scene
        aoi3D_scene = AOI3DScene.AOI3DScene()
        aoi3D_scene.load(args.aoi_scene)
        print(f'AOIs names: {aoi3D_scene.areas()}')

        # Create timestamped buffer to store AOIs scene in time
        ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()

        # Create timestamped buffer to store gaze positions in time
        ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()

        # Video and data replay loop
        try:

            # Count frame to display a progress bar
            MiscFeatures.printProgressBar(0, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100)

            # Iterate on video frames activating video / data synchronisation through vts data buffer
            for video_ts, video_frame in tobii_segment_video.frames(tobii_segment_data.vts):

                try:

                    # Get closest gaze position before video timestamp and remove all gaze positions before
                    closest_gaze_ts, closest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts)

                    # Draw video synchronized gaze position
                    gaze_position = GazeFeatures.GazePosition(int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height))
                    cv.circle(video_frame.matrix, tuple(gaze_position), 4, (0, 255, 255), -1)

                    # Store gaze position at this time
                    ts_gaze_positions[video_ts] = gaze_position

                # When expected values can't be found
                except (KeyError, AttributeError, ValueError):

                    pass # keep last gaze position

                # Track markers with pose estimation and draw them
                aruco_tracker.track(video_frame.matrix)
                aruco_tracker.draw(video_frame.matrix)

                # Project 3D scene related to each aruco marker
                if aruco_tracker.get_markers_number():

                    for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):

                        # TODO : Select different 3D scene depending on aruco id
                        
                        in_marker_set = marker_id in list(args.markers_id)

                        if not empty_marker_set and not in_marker_set:
                            continue

                        aoi3D_scene.rotation = aruco_tracker.get_marker_rotation(i)
                        aoi3D_scene.translation = aruco_tracker.get_marker_translation(i)
                        
                        # Edit Zero distorsion matrix
                        D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])

                        # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
                        # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
                        aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0)

                        # Check which 2D aois is looked
                        if gaze_position != None:
                            aoi2D_scene.look_at(gaze_position)

                        # Draw 2D aois
                        aoi2D_scene.draw(video_frame.matrix)

                        # Store 2D aois scene at this time
                        ts_aois_scenes[video_ts] = aoi2D_scene

                # Close window using 'Esc' key
                if cv.waitKey(1) == 27:
                    break

                # Display video
                cv.imshow(f'Segment {tobii_segment.get_id()} video', video_frame.matrix)

                # Write video
                output_video.write(video_frame.matrix)

                # Update Progress Bar
                progress = video_ts - int(args.time_range[0] * 1000000) # - tobii_segment_video.get_vts_offset() ?
                MiscFeatures.printProgressBar(progress, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100)

        # Exit on 'ctrl+C' interruption
        except KeyboardInterrupt:
            pass

        # Stop frame display
        cv.destroyAllWindows()

        # End output video file
        output_video.close()

        print(f'\nAOIs video saved into {video_filepath}')

        # Export 2D aois
        ts_aois_scenes.export_as_json(aois_filepath)

        print(f'Timestamped AOIs positions saved into {aois_filepath}')

if __name__ == '__main__':

    main()