aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py8
-rw-r--r--src/argaze/GazeFeatures.py10
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py19
-rw-r--r--src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py32
4 files changed, 45 insertions, 24 deletions
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index 2d0ff17..9752d30 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -17,7 +17,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
# set dimension member
self.dimension = 2
- def look_at(self, gaze_position: GazeFeatures.GazePosition, gaze_precision: GazeFeatures.GazePrecision):
+ def look_at(self, gaze_position: GazeFeatures.GazePosition, gaze_accuracy: GazeFeatures.GazeAccuracy):
"""Get looked and ignored AOI names."""
looked = {}
@@ -26,7 +26,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
for name, aoi in self.items():
# TODO : use looked_region
- # looked_region, aoi_ratio, gaze_ratio = aoi2D.looked_region(gaze_position, gaze_precision)
+ # looked_region, aoi_ratio, gaze_ratio = aoi2D.looked_region(gaze_position, gaze_accuracy)
if aoi.looked():
@@ -38,7 +38,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
return looked, ignored
- def draw(self, frame, gaze_position: GazeFeatures.GazePosition, gaze_precision: GazeFeatures.GazePrecision, exclude=[], base_color=(0, 0, 255), looked_color=(0, 255, 0)):
+ def draw(self, frame, gaze_position: GazeFeatures.GazePosition, gaze_accuracy: GazeFeatures.GazeAccuracy, exclude=[], base_color=(0, 0, 255), looked_color=(0, 255, 0)):
"""Draw AOI polygons on frame."""
for name, aoi2D in self.items():
@@ -46,7 +46,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
if name in exclude:
continue
- looked_region, aoi_ratio, gaze_ratio = aoi2D.looked_region(gaze_position, gaze_precision)
+ looked_region, aoi_ratio, gaze_ratio = aoi2D.looked_region(gaze_position, gaze_accuracy)
# Draw looked region
looked_region.draw(frame, base_color, 4)
diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py
index 18a7e0c..0cf586d 100644
--- a/src/argaze/GazeFeatures.py
+++ b/src/argaze/GazeFeatures.py
@@ -18,13 +18,13 @@ class TimeStampedGazePositions(DataStructures.TimeStampedBuffer):
def __setitem__(self, key, value: GazePosition):
super().__setitem__(key, value)
-GazePrecision = float
-"""Define gaze precision as a float number."""
+GazeAccuracy = float
+"""Define gaze accuracy as a float number."""
-class TimeStampedGazePrecisions(DataStructures.TimeStampedBuffer):
- """Define timestamped buffer to store gaze precisions."""
+class TimeStampedGazeAccuracies(DataStructures.TimeStampedBuffer):
+ """Define timestamped buffer to store gaze accuracies."""
- def __setitem__(self, key, value: GazePrecision):
+ def __setitem__(self, key, value: GazeAccuracy):
super().__setitem__(key, value)
@dataclass
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index e28b2e7..2b06bf9 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -181,7 +181,7 @@ def main():
ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
# Create timestamped buffer to store gaze precision in time
- ts_gaze_precisions = GazeFeatures.TimeStampedGazePrecisions()
+ ts_gaze_accuracies = GazeFeatures.TimeStampedGazeAccuracies()
# Video and data replay loop
try:
@@ -200,8 +200,9 @@ def main():
try:
# !!! the parameters below are specific to the TobiiGlassesPro2 !!!
- camera_precision = 1.5
- camera_opening = 82
+ # Reference : https://www.biorxiv.org/content/10.1101/299925v1
+ tobii_accuracy = 1.42 # degree
+ tobii_camera_hfov = 82 # degree
# Get nearest gaze position before video timestamp and remove all gaze positions before
_, nearest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts)
@@ -214,17 +215,17 @@ def main():
gaze_position_pixel = (int(nearest_gaze_position.value[0] * video_frame.width), int(nearest_gaze_position.value[1] * video_frame.height))
- gaze_precision_mm = numpy.sin(numpy.deg2rad(camera_precision)) * nearest_gaze_position_3d.value[2]
- camera_opening_mm = numpy.sin(numpy.deg2rad(camera_opening)) * nearest_gaze_position_3d.value[2]
- gaze_precision_pixel = round(video_frame.width * float(gaze_precision_mm) / float(camera_opening_mm))
+ gaze_accuracy_mm = numpy.sin(numpy.deg2rad(tobii_accuracy)) * nearest_gaze_position_3d.value[2]
+ tobii_camera_hfov_mm = numpy.sin(numpy.deg2rad(tobii_camera_hfov)) * nearest_gaze_position_3d.value[2]
+ gaze_accuracy_pixel = round(video_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
# Draw gaze position and precision
cv.circle(video_frame.matrix, gaze_position_pixel, 2, (0, 255, 255), -1)
- cv.circle(video_frame.matrix, gaze_position_pixel, gaze_precision_pixel, (0, 255, 255), 1)
+ cv.circle(video_frame.matrix, gaze_position_pixel, gaze_accuracy_pixel, (0, 255, 255), 1)
# Store gaze position and precision at this time in millisecond
ts_gaze_positions[round(video_ts_ms)] = gaze_position_pixel
- ts_gaze_precisions[round(video_ts_ms)] = gaze_precision_pixel
+ ts_gaze_accuracies[round(video_ts_ms)] = gaze_accuracy_pixel
else:
@@ -298,7 +299,7 @@ def main():
for name, aoi_array in aoi2D_dict.items():
aoi2D_merged_scene[name] = numpy.sum(aoi_array, axis=0) / len(aoi_array)
- aoi2D_merged_scene.draw(video_frame.matrix, gaze_position_pixel, gaze_precision_pixel, exclude=['Visualisation_Plan'])
+ aoi2D_merged_scene.draw(video_frame.matrix, gaze_position_pixel, gaze_accuracy_pixel, exclude=['Visualisation_Plan'])
# Store 2D merged scene at this time in millisecond
ts_aois_scenes[round(video_ts_ms)] = aoi2D_merged_scene
diff --git a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
index 1ff3835..4966d7d 100644
--- a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
+++ b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
@@ -99,12 +99,32 @@ def main():
# Store received gaze positions
past_gaze_positions.append(data_stream['GazePosition'])
- # Get last gaze position before video timestamp and remove all former gaze positions
- earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts)
+ # !!! the parameters below are specific to the TobiiGlassesPro2 !!!
+ tobii_accuracy = 1.5
+ tobii_camera_hfov = 82
- # Draw gaze position
- video_gaze_pixel = (int(earliest_gaze_position.value[0] * video_frame.width), int(earliest_gaze_position.value[1] * video_frame.height))
- cv.circle(video_frame.matrix, video_gaze_pixel, 4, (0, 255, 255), -1)
+ # Get nearest gaze position before video timestamp and remove all gaze positions before
+ _, nearest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts)
+
+ # Get nearest gaze position 3D before video timestamp and remove all gaze positions before
+ _, nearest_gaze_position_3d = tobii_ts_gaze_positions_3d.pop_first_until(video_ts)
+
+ # Consider gaze position if gaze precision can be evaluated
+ if nearest_gaze_position_3d.value[2] > 0:
+
+ gaze_position_pixel = (int(nearest_gaze_position.value[0] * video_frame.width), int(nearest_gaze_position.value[1] * video_frame.height))
+
+ gaze_accuracy_mm = numpy.sin(numpy.deg2rad(tobii_accuracy)) * nearest_gaze_position_3d.value[2]
+ tobii_camera_hfov_mm = numpy.sin(numpy.deg2rad(tobii_camera_hfov)) * nearest_gaze_position_3d.value[2]
+ gaze_accuracy_pixel = round(video_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
+
+ # Draw gaze position and precision
+ cv.circle(video_frame.matrix, gaze_position_pixel, 2, (0, 255, 255), -1)
+ cv.circle(video_frame.matrix, gaze_position_pixel, gaze_accuracy_pixel, (0, 255, 255), 1)
+
+ else:
+
+ ValueError('Unable to evaluate gaze precision')
# Wait for gaze position
except (AttributeError, ValueError):
@@ -165,7 +185,7 @@ def main():
for name, aoi_array in aoi2D_dict.items():
aoi2D_merged_scene[name] = numpy.sum(aoi_array, axis=0) / len(aoi_array)
- aoi2D_merged_scene.draw(video_frame.matrix, video_gaze_pixel, exclude=['Visualisation_Plan'])
+ aoi2D_merged_scene.draw(video_frame.matrix, video_gaze_pixel, gaze_accuracy_pixel, exclude=['Visualisation_Plan'])
# Send look at aoi pointer
for name, aoi in aoi2D_merged_scene.items():