aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/live_tobii_aruco_aois.py
blob: 51c244cde1b4537a74f6b9e0119600ca1959c01a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
#!/usr/bin/env python

import argparse
import os

from argaze import DataStructures
from argaze.TobiiGlassesPro2 import *
from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera
from argaze.AreaOfInterest import *
from argaze.TobiiGlassesPro2 import *

import cv2 as cv
import numpy

def main():
    """
    Track any ArUco marker into Tobii Glasses Pro 2 camera video stream. 
    From a loaded AOI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame. 
    Then, detect if Tobii gaze point is inside any AOI.
    """

    # Manage arguments
    parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
    parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip')
    parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath')
    parser.add_argument('-r', '--roi_scene', metavar='AOI_SCENE', type=str, default='aoi3D_scene.obj', help='obj roi scene filepath')
    parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary')
    parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)')
    args = parser.parse_args()

    # Create tobii controller
    tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf')

    # Calibrate tobii glasses
    tobii_controller.calibrate()

    # Enable tobii data stream
    tobii_data_stream = tobii_controller.enable_data_stream()

    # Enable tobii video stream
    tobii_video_stream = tobii_controller.enable_video_stream()

    # create aruco camera
    aruco_camera = ArUcoCamera.ArUcoCamera()
    aruco_camera.load_calibration_file(args.camera_calibration)

    # Create aruco tracker
    aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)

    # Create AOIs 3D scene
    aoi3D_scene = AOI3DScene.AOI3DScene()
    aoi3D_scene.load(args.roi_scene)

    # Start streaming
    tobii_controller.start_streaming()

    # Live video stream capture loop
    try:

        past_gaze_positions = DataStructures.TimeStampedBuffer()
    
        while tobii_video_stream.is_alive():

            video_ts, video_frame = tobii_video_stream.read()

            try:

                # Read data stream
                data_stream = tobii_data_stream.read()

                # Store received gaze positions
                past_gaze_positions.append(data_stream.gidx_l_gp)

                # Get last gaze position before video timestamp and remove all former gaze positions
                earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts)

                # Draw video synchronized gaze pointer
                pointer = (int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height))
                cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1)

            # When expected values aren't in data stream
            except (KeyError, AttributeError, ValueError):

                pass # keep last pointer position

            # Track markers with pose estimation and draw them
            aruco_tracker.track(video_frame.matrix)
            aruco_tracker.draw(video_frame.matrix)

            # Project 3D scenes related to each aruco markers
            if aruco_tracker.get_markers_number():

                for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):

                    # TODO : Select different 3D scenes depending on aruco id
                    
                    marker_rotation = aruco_tracker.get_marker_rotation(i)
                    marker_translation = aruco_tracker.get_marker_translation(i)

                    aoi3D_scene.rotation = marker_rotation
                    aoi3D_scene.translation = marker_translation

                    # Edit Zero distorsion matrix
                    D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0])

                    # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it
                    # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
                    aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0)

                    # Check if gaze is inside 2D aois
                    if pointer != None:
                        aoi2D_scene.inside(pointer)

                    # Draw 2D aois
                    aoi2D_scene.draw(video_frame.matrix)

            # Close window using 'Esc' key
            if cv.waitKey(1) == 27:
                break

            cv.imshow('Live Scene', video_frame.matrix)

    # Exit on 'ctrl+C' interruption
    except KeyboardInterrupt:
        pass

    # Stop frame display
    cv.destroyAllWindows()

    # Stop streaming
    tobii_controller.stop_streaming()

if __name__ == '__main__':

    main()