aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/live_tobii_aruco_aois.py25
-rw-r--r--src/argaze/utils/live_tobii_session.py14
2 files changed, 17 insertions, 22 deletions
diff --git a/src/argaze/utils/live_tobii_aruco_aois.py b/src/argaze/utils/live_tobii_aruco_aois.py
index 7642aaf..8ad458b 100644
--- a/src/argaze/utils/live_tobii_aruco_aois.py
+++ b/src/argaze/utils/live_tobii_aruco_aois.py
@@ -3,7 +3,7 @@
import argparse
import os
-from argaze import DataStructures
+from argaze import DataStructures, GazeFeatures
from argaze.TobiiGlassesPro2 import *
from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera
from argaze.AreaOfInterest import *
@@ -61,7 +61,7 @@ def main():
# Start streaming
tobii_controller.start_streaming()
-
+
# Live video stream capture loop
try:
@@ -82,14 +82,13 @@ def main():
# Get last gaze position before video timestamp and remove all former gaze positions
earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts)
- # Draw video synchronized gaze position
- gaze_position = GazeFeatures.GazePosition((int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height)))
- cv.circle(video_frame.matrix, gaze_position, 4, (0, 255, 255), -1)
-
- # When expected values aren't in data stream
- except (KeyError, AttributeError, ValueError):
+ # Draw gaze position
+ gaze_position = GazeFeatures.GazePosition(int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height))
+ cv.circle(video_frame.matrix, gaze_position.as_tuple(), 4, (0, 255, 255), -1)
- pass # keep last gaze position
+ # Wait for gaze position
+ except (AttributeError, ValueError):
+ continue
# Track markers with pose estimation and draw them
aruco_tracker.track(video_frame.matrix)
@@ -117,12 +116,8 @@ def main():
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0)
- # Check which 2D aois is looked
- if gaze_position != None:
- aoi2D_scene.look_at(gaze_position)
-
- # Draw 2D aois
- aoi2D_scene.draw(video_frame.matrix)
+ # Draw 2D scene
+ aoi2D_scene.draw(video_frame.matrix, gaze_position)
# Close window using 'Esc' key
if cv.waitKey(1) == 27:
diff --git a/src/argaze/utils/live_tobii_session.py b/src/argaze/utils/live_tobii_session.py
index f5ef0f4..561dc6c 100644
--- a/src/argaze/utils/live_tobii_session.py
+++ b/src/argaze/utils/live_tobii_session.py
@@ -3,7 +3,7 @@
import argparse
import os, time
-from argaze import DataStructures
+from argaze import DataStructures, GazeFeatures
from argaze.TobiiGlassesPro2 import *
import cv2 as cv
@@ -55,13 +55,13 @@ def main():
# Get last gaze position before video timestamp and remove all former gaze positions
earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts)
- # Draw video synchronized gaze pointer
- pointer = (int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height))
- cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1)
+ # Draw gaze position
+ gaze_position = GazeFeatures.GazePosition(int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height))
+ cv.circle(video_frame.matrix, gaze_position.as_tuple(), 4, (0, 255, 255), -1)
- # When expected values aren't in data stream
- except (KeyError, AttributeError, ValueError):
- pass
+ # Wait for gaze position
+ except (AttributeError, ValueError):
+ continue
# Close window using 'Esc' key
if cv.waitKey(1) == 27: