aboutsummaryrefslogtreecommitdiff
path: root/src/argaze/utils/tobii_stream_display.py
blob: 76da3d6929a2688e5a2c564cd815c0ef15c330ce (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
#!/usr/bin/env python

import argparse
import os, time

from argaze import DataStructures, GazeFeatures
from argaze.TobiiGlassesPro2 import *

import cv2 as cv
import numpy

def main():
    """
    Capture video camera and gaze data streams and synchronise them.
    """

    # Manage arguments
    parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
    parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.12', help='tobii glasses ip')

    args = parser.parse_args()

    # Create tobii controller
    tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf')

    # Enable tobii data stream
    tobii_data_stream = tobii_controller.enable_data_stream()

    # Enable tobii video stream
    tobii_video_stream = tobii_controller.enable_video_stream()

    # Start streaming
    tobii_controller.start_streaming()

    # Prepare to timestamped gaze position data stream bufferring
    tobii_ts_gaze_positions = DataStructures.TimeStampedBuffer()

    # Prepare to timestamped gaze position 3d data stream bufferring
    tobii_ts_gaze_positions_3d = DataStructures.TimeStampedBuffer()

    # Prepare to timestamped head rotations data stream bufferring
    tobii_ts_head_rotations = DataStructures.TimeStampedBuffer()

    # Live video and data stream capture loop
    try:

        while tobii_video_stream.is_alive():

            # Read video stream
            video_ts, video_frame = tobii_video_stream.read()
            video_ts_ms = video_ts / 1e3

            # Read data stream
            data_ts, data_stream = tobii_data_stream.read()
            data_ts_ms = data_ts / 1e3

            try:

                # Buffer last received gaze positions
                tobii_ts_gaze_positions.append(data_stream['GazePosition'])

                # Buffer last received gaze positions 3d
                tobii_ts_gaze_positions_3d.append(data_stream['GazePosition3D'])

                # Buffer last received gaze positions 3d
                tobii_ts_head_rotations.append(data_stream['Gyroscope'])

            # Ignore missing data stream
            except KeyError as e:
                pass

            try:

                # Get nearest head rotation before video timestamp and remove all head rotations before
                earliest_ts, earliest_head_rotation = tobii_ts_head_rotations.pop_last()

                # Calculate head movement considering only head yaw and pitch
                head_movement = numpy.array(earliest_head_rotation.value)
                head_movement_px = head_movement.astype(int)
                head_movement_norm = numpy.linalg.norm(head_movement[0:2])

                # Draw movement vector
                cv.line(video_frame.matrix, (int(video_frame.width/2), int(video_frame.height/2)), (int(video_frame.width/2) + head_movement_px[1], int(video_frame.height/2) - head_movement_px[0]), (150, 150, 150), 3)
            
            # Wait for head rotation
            except KeyError:
                pass

            try:

                # Get nearest gaze position before video timestamp and remove all gaze positions before
                _, earliest_gaze_position = tobii_ts_gaze_positions.pop_last()

                # Ignore frame when gaze position is not valid
                if earliest_gaze_position.validity == 0:
                
                    gaze_position_pixel = GazeFeatures.GazePosition( (int(earliest_gaze_position.value[0] * video_frame.width), int(earliest_gaze_position.value[1] * video_frame.height)) )
                    
                    # Get nearest gaze position 3D before video timestamp and remove all gaze positions before
                    _, earliest_gaze_position_3d = tobii_ts_gaze_positions_3d.pop_last()

                    # Ignore frame when gaze position 3D is not valid
                    if earliest_gaze_position_3d.validity == 0:
                        
                        gaze_accuracy_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.ACCURACY)) * earliest_gaze_position_3d.value[2]
                        tobii_camera_hfov_mm = numpy.tan(numpy.deg2rad(TobiiSpecifications.CAMERA_HFOV / 2)) * earliest_gaze_position_3d.value[2]

                        gaze_position_pixel.accuracy = round(video_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
                    
                        # Draw gaze
                        gaze_position_pixel.draw(video_frame.matrix)

            # Wait for gaze position
            except KeyError:
                pass

            # Draw center
            cv.line(video_frame.matrix, (int(video_frame.width/2) - 50, int(video_frame.height/2)), (int(video_frame.width/2) + 50, int(video_frame.height/2)), (255, 150, 150), 1)
            cv.line(video_frame.matrix, (int(video_frame.width/2), int(video_frame.height/2) - 50), (int(video_frame.width/2), int(video_frame.height/2) + 50), (255, 150, 150), 1)

            # Write stream timing
            cv.rectangle(video_frame.matrix, (0, 0), (950, 50), (63, 63, 63), -1)
            cv.putText(video_frame.matrix, f'Data stream time: {int(data_ts_ms)} ms', (20, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
            cv.putText(video_frame.matrix, f'Video delay: {int(data_ts_ms - video_ts_ms)} ms', (550, 40), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)

            # Close window using 'Esc' key
            if cv.waitKey(1) == 27:
                break

            cv.imshow(f'Video and data stream', video_frame.matrix)

    # Exit on 'ctrl+C' interruption
    except KeyboardInterrupt:
        pass

    # Stop frame display
    cv.destroyAllWindows()

    # Stop streaming
    tobii_controller.stop_streaming()
    
if __name__ == '__main__':

    main()