aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorThéo de la Hogue2022-09-21 11:26:18 +0200
committerThéo de la Hogue2022-09-21 11:26:18 +0200
commit4f9e43776d0bbc560fdb359729a260a29afe8c64 (patch)
tree78b6ed93afcc0ada02e0334b36a6e67fbfcc74b9 /src
parentafde4d6bc58125c2ac7770707c1e5b55fbf84c16 (diff)
downloadargaze-4f9e43776d0bbc560fdb359729a260a29afe8c64.zip
argaze-4f9e43776d0bbc560fdb359729a260a29afe8c64.tar.gz
argaze-4f9e43776d0bbc560fdb359729a260a29afe8c64.tar.bz2
argaze-4f9e43776d0bbc560fdb359729a260a29afe8c64.tar.xz
Redefining GazePositon as a dataclass.
Diffstat (limited to 'src')
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py2
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py2
-rw-r--r--src/argaze/GazeFeatures.py70
-rw-r--r--src/argaze/utils/edit_tobii_segment_aruco_pose.py7
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py26
-rw-r--r--src/argaze/utils/export_tobii_segment_movements.py31
-rw-r--r--src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py13
7 files changed, 85 insertions, 66 deletions
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index 695925c..ae29d6a 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -26,7 +26,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
for name, aoi in self.items():
# TODO : use looked_region
- # looked_region, aoi_ratio, gaze_ratio = aoi2D.looked_region(gaze_position.value)
+ # looked_region, aoi_ratio, gaze_ratio = aoi2D.looked_region(gaze_position)
if aoi.looked():
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index 14d8166..4b57ed1 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -48,7 +48,7 @@ class AreaOfInterest(numpy.ndarray):
if self.dimension() != 2:
raise RuntimeError(f'Bad area dimension ({self.dimension()})')
- return mpath.Path(self).contains_points([gaze_position])[0]
+ return mpath.Path(self).contains_points([tuple(gaze_position)])[0]
def look_at(self, gaze_pixel):
"""Get where the area is looked using perpespective transformation."""
diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py
index 4d7103b..717e01b 100644
--- a/src/argaze/GazeFeatures.py
+++ b/src/argaze/GazeFeatures.py
@@ -9,30 +9,42 @@ from argaze.AreaOfInterest import AOIFeatures
import numpy
import pandas
-GazePosition = tuple
-"""Define gaze position as a tuple of coordinates."""
+@dataclass
+class GazePosition():
+ """Define gaze position as a tuple of coordinates with accuracy."""
-class TimeStampedGazePositions(DataStructures.TimeStampedBuffer):
- """Define timestamped buffer to store gaze positions."""
+ value: tuple
+ accuracy: float = 0.
+
+ def __getitem__(self, key):
+ """Get a position coordinate."""
+ return self.value[key]
- def __setitem__(self, key, value: GazePosition):
- super().__setitem__(key, value)
+ def __setitem__(self, key, coord):
+ """Set a position coordinate."""
+ self.value[name] = coord
+
+ def __iter__(self):
+ return iter(self.value)
+
+ def __len__(self):
+ return len(self.value)
-GazeAccuracy = float
-"""Define gaze accuracy as a float number."""
+ def __array__(self):
+ return numpy.array(self.value)
-class TimeStampedGazeAccuracies(DataStructures.TimeStampedBuffer):
- """Define timestamped buffer to store gaze accuracies."""
+class GazePositionMissing(GazePosition, Exception):
+ """Exception to raise when gaze position is missing."""
- def __setitem__(self, key, value: GazeAccuracy):
- super().__setitem__(key, value)
+ def __init__(self, message):
-class GazeDataMissing(Exception):
- """Exception to raise when gaze position or accuracy is missing."""
- pass
+ super(Exception, self).__init__(message)
-GazePositionMissingItem = GazePosition
-GazeAccuracyMissingItem = GazeAccuracy
+class TimeStampedGazePositions(DataStructures.TimeStampedBuffer):
+ """Define timestamped buffer to store gaze positions."""
+
+ def __setitem__(self, key, value: GazePosition):
+ super().__setitem__(key, value)
@dataclass
class Movement():
@@ -312,12 +324,12 @@ class PointerBasedVisualScan(VisualScanGenerator):
(ts_current, aoi_scene_current) = self.__ts_aoi_scenes.pop_first()
# is aoi scene a missing exception ?
- try: raise aoi_scene_current
+ try:
+
+ raise aoi_scene_current
# when aoi scene is missing
except AOIFeatures.AOISceneMissing as e:
-
- #print(ts_current, e)
pass
# when aoi scene is not missing
@@ -327,6 +339,20 @@ class PointerBasedVisualScan(VisualScanGenerator):
gaze_position = self.__ts_gaze_positions[ts_current]
+ # is aoi scene a missing exception ?
+ raise gaze_position
+
+ # when gaze position is missing
+ except GazePositionMissing as e:
+ pass
+
+ # when there is no gaze position at current time
+ except KeyError as e:
+ pass
+
+ # when gaze position is not missing
+ except:
+
for name, aoi in aoi_scene_current.items():
looked = aoi.looked(gaze_position)
@@ -355,10 +381,6 @@ class PointerBasedVisualScan(VisualScanGenerator):
# forget the aoi
del self.__step_dict[name]
- # ignore missing gaze position
- except KeyError:
- pass
-
# close started steps
for name, step in self.__step_dict.items():
diff --git a/src/argaze/utils/edit_tobii_segment_aruco_pose.py b/src/argaze/utils/edit_tobii_segment_aruco_pose.py
index 9e5aad1..61d695d 100644
--- a/src/argaze/utils/edit_tobii_segment_aruco_pose.py
+++ b/src/argaze/utils/edit_tobii_segment_aruco_pose.py
@@ -207,6 +207,9 @@ def main():
video_frame = last_frame.copy()
+ # Edit fake gaze position from pointer
+ gaze_position = GazeFeatures.GazePosition(pointer, accuracy=2)
+
# Copy video frame to edit visualisation on it with out disrupting aruco tracking
visu_frame = video_frame.copy()
@@ -300,7 +303,7 @@ def main():
aoi2D_video_scene = aoi3D_scene_edited.project(aruco_tracker.get_marker_translation(selected_marker_index), aruco_tracker.get_marker_rotation(selected_marker_index), aruco_camera.get_K())
# Draw scene
- aoi2D_video_scene.draw(visu_frame.matrix, pointer, 2, exclude=['Visualisation_Plan'])
+ aoi2D_video_scene.draw(visu_frame.matrix, gaze_position, exclude=['Visualisation_Plan'])
# Write warning related to marker pose processing
except UserWarning as e:
@@ -321,7 +324,7 @@ def main():
cv.line(visu_frame.matrix, (int(visu_frame.width/2), int(visu_frame.height/2) - 50), (int(visu_frame.width/2), int(visu_frame.height/2) + 50), (255, 150, 150), 1)
# Draw pointer
- cv.circle(visu_frame.matrix, pointer, 2, (0, 255, 255), -1)
+ cv.circle(visu_frame.matrix, gaze_position, gaze_position.accuracy, (0, 255, 255), -1)
# Write segment timing
cv.rectangle(visu_frame.matrix, (0, 0), (550, 50), (63, 63, 63), -1)
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index c252124..a3a31d0 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -186,9 +186,6 @@ def main():
# Create timestamped buffer to store gaze positions in time
ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
- # Create timestamped buffer to store gaze accuracies in time
- ts_gaze_accuracies = GazeFeatures.TimeStampedGazeAccuracies()
-
# !!! the parameters below are specific to the TobiiGlassesPro2 !!!
# Reference : https://www.biorxiv.org/content/10.1101/299925v1
tobii_accuracy = 1.42 # degree
@@ -244,9 +241,9 @@ def main():
# Ignore frame when gaze position is not valid
if nearest_gaze_position.validity == 1:
- raise GazeFeatures.GazeDataMissing('Unvalid gaze position')
+ raise GazeFeatures.GazePositionMissing('Unvalid gaze position')
- gaze_position_pixel = (int(nearest_gaze_position.value[0] * visu_frame.width), int(nearest_gaze_position.value[1] * visu_frame.height))
+ gaze_position_pixel = GazeFeatures.GazePosition( (int(nearest_gaze_position.value[0] * visu_frame.width), int(nearest_gaze_position.value[1] * visu_frame.height)) )
# Draw gaze position
cv.circle(visu_frame.matrix, gaze_position_pixel, 2, (0, 255, 255), -1)
@@ -256,18 +253,18 @@ def main():
# Ignore frame when gaze position 3D is not valid
if nearest_gaze_position_3d.validity == 1:
- raise GazeFeatures.GazeDataMissing('Unvalid gaze position 3D')
+ raise GazeFeatures.GazePositionMissing('Unvalid gaze position 3D')
gaze_accuracy_mm = numpy.tan(numpy.deg2rad(tobii_accuracy)) * nearest_gaze_position_3d.value[2]
tobii_camera_hfov_mm = numpy.tan(numpy.deg2rad(tobii_camera_hfov / 2)) * nearest_gaze_position_3d.value[2]
- gaze_accuracy_pixel = round(visu_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
+
+ gaze_position_pixel.accuracy = round(visu_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
# Draw gaze accuracy
- cv.circle(visu_frame.matrix, gaze_position_pixel, gaze_accuracy_pixel, (0, 255, 255), 1)
+ cv.circle(visu_frame.matrix, gaze_position_pixel, gaze_position_pixel.accuracy, (0, 255, 255), 1)
- # Store gaze position and accuracy in millisecond for further visual scan processing
+ # Store gaze position in millisecond for further visual scan processing
ts_gaze_positions[round(video_ts_ms)] = gaze_position_pixel
- ts_gaze_accuracies[round(video_ts_ms)] = gaze_accuracy_pixel
# Hide frame left and right borders before tracking to ignore markers outside focus area
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
@@ -334,7 +331,7 @@ def main():
for name, aoi_array in aoi2D_dict.items():
aoi2D_merged_scene[name] = numpy.sum(aoi_array, axis=0) / len(aoi_array)
- aoi2D_merged_scene.draw(visu_frame.matrix, gaze_position_pixel, gaze_accuracy_pixel, exclude=['Visualisation_Plan'])
+ aoi2D_merged_scene.draw(visu_frame.matrix, gaze_position_pixel, exclude=['Visualisation_Plan'])
# When the merged scene is empty
if len(aoi2D_merged_scene.keys()) == 0:
@@ -344,12 +341,11 @@ def main():
ts_aois_scenes[round(video_ts_ms)] = aoi2D_merged_scene
# Raised when gaze data is missing
- except GazeFeatures.GazeDataMissing as e:
+ except GazeFeatures.GazePositionMissing as e:
# Store missing gaze data exception
- ts_gaze_positions[round(video_ts_ms)] = GazeFeatures.GazePositionMissingItem
- ts_gaze_accuracies[round(video_ts_ms)] = GazeFeatures.GazeAccuracyMissingItem
-
+ ts_gaze_positions[round(video_ts_ms)] = e
+
cv.rectangle(visu_frame.matrix, (0, 50), (550, 100), (127, 127, 127), -1)
cv.putText(visu_frame.matrix, str(e), (20, 80), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv.LINE_AA)
diff --git a/src/argaze/utils/export_tobii_segment_movements.py b/src/argaze/utils/export_tobii_segment_movements.py
index cb2e607..b0c273a 100644
--- a/src/argaze/utils/export_tobii_segment_movements.py
+++ b/src/argaze/utils/export_tobii_segment_movements.py
@@ -78,18 +78,11 @@ def main():
# Access to timestamped gaze position data buffer
tobii_ts_gaze_positions = tobii_segment_data['GazePosition']
- # Format tobii gaze position in pixel and store them using millisecond unit timestamp
- ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
-
- for ts, tobii_gaze_position in tobii_ts_gaze_positions.items():
- gaze_position_pixel = (int(tobii_gaze_position.value[0] * tobii_segment_video.get_width()), int(tobii_gaze_position.value[1] * tobii_segment_video.get_height()))
- ts_gaze_positions[ts/1000] = gaze_position_pixel
-
# Access to timestamped gaze 3D positions data buffer
tobii_ts_gaze_positions_3d = tobii_segment_data['GazePosition3D']
- # Format gaze accuracies in pixel and store them using millisecond unit timestamp
- ts_gaze_accuracies = GazeFeatures.TimeStampedGazeAccuracies()
+ # Format tobii gaze position in pixel and store them using millisecond unit timestamp
+ ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
# !!! the parameters below are specific to the TobiiGlassesPro2 !!!
# Reference : https://www.biorxiv.org/content/10.1101/299925v1
@@ -97,15 +90,22 @@ def main():
tobii_precision = 0.34 # degree
tobii_camera_hfov = 82 # degree
+ for ts, tobii_gaze_position in tobii_ts_gaze_positions.items():
+
+ if tobii_gaze_position.validity == 0:
+
+ gaze_position_pixel = GazeFeatures.GazePosition( (int(tobii_gaze_position.value[0] * tobii_segment_video.get_width()), int(tobii_gaze_position.value[1] * tobii_segment_video.get_height())) )
+
+ ts_gaze_positions[ts/1000] = gaze_position_pixel
+
for ts, tobii_ts_gaze_position_3d in tobii_ts_gaze_positions_3d.items():
- if tobii_ts_gaze_position_3d.value[2] > 0:
+ if tobii_ts_gaze_position_3d.validity == 0:
gaze_accuracy_mm = numpy.sin(numpy.deg2rad(tobii_accuracy)) * tobii_ts_gaze_position_3d.value[2]
tobii_camera_hfov_mm = numpy.sin(numpy.deg2rad(tobii_camera_hfov)) * tobii_ts_gaze_position_3d.value[2]
- gaze_accuracy_pixel = round(tobii_segment_video.get_width() * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
-
- ts_gaze_accuracies[ts/1000] = gaze_accuracy_pixel
+
+ ts_gaze_positions[ts/1000].accuracy = round(tobii_segment_video.get_width() * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
print(f'Dispersion threshold: {args.dispersion_threshold}')
print(f'Duration threshold: {args.duration_threshold}')
@@ -218,12 +218,9 @@ def main():
# Get closest gaze position before video timestamp and remove all gaze positions before
_, nearest_gaze_position = ts_gaze_positions.pop_first_until(video_ts_ms)
- # Get closest gaze accuracy before video timestamp and remove all gaze accuracies before
- _, nearest_gaze_accuracy = ts_gaze_accuracies.pop_first_until(video_ts_ms)
-
# Draw gaze position and precision
cv.circle(video_frame.matrix, nearest_gaze_position, 2, (0, 255, 255), -1)
- cv.circle(video_frame.matrix, nearest_gaze_position, nearest_gaze_accuracy, (0, 255, 255), 1)
+ cv.circle(video_frame.matrix, nearest_gaze_position, nearest_gaze_position.accuracy, (0, 255, 255), 1)
# Wait for gaze position
except ValueError:
diff --git a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
index c6aa316..070e3ee 100644
--- a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
+++ b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
@@ -143,9 +143,9 @@ def main():
# Ignore frame when gaze position is not valid
if nearest_gaze_position.validity == 1:
- raise GazeFeatures.GazeDataMissing('Unvalid gaze position')
+ raise GazeFeatures.GazePositionMissing('Unvalid gaze position')
- gaze_position_pixel = (int(nearest_gaze_position.value[0] * visu_frame.width), int(nearest_gaze_position.value[1] * visu_frame.height))
+ gaze_position_pixel = GazeFeatures.GazePosition( (int(nearest_gaze_position.value[0] * visu_frame.width), int(nearest_gaze_position.value[1] * visu_frame.height)) )
# Draw gaze position
cv.circle(visu_frame.matrix, gaze_position_pixel, 2, (0, 255, 255), -1)
@@ -155,17 +155,18 @@ def main():
# Ignore frame when gaze position 3D is not valid
if nearest_gaze_position_3d.validity == 1:
- raise GazeFeatures.GazeDataMissing('Unvalid gaze position 3D')
+ raise GazeFeatures.GazePositionMissing('Unvalid gaze position 3D')
gaze_position_pixel = (int(nearest_gaze_position.value[0] * visu_frame.width), int(nearest_gaze_position.value[1] * visu_frame.height))
gaze_accuracy_mm = numpy.tan(numpy.deg2rad(tobii_accuracy)) * nearest_gaze_position_3d.value[2]
tobii_camera_hfov_mm = numpy.tan(numpy.deg2rad(tobii_camera_hfov / 2)) * nearest_gaze_position_3d.value[2]
- gaze_accuracy_pixel = round(visu_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
+
+ gaze_position_pixel.accuracy = round(visu_frame.width * float(gaze_accuracy_mm) / float(tobii_camera_hfov_mm))
# Draw gaze position and accuracy
cv.circle(visu_frame.matrix, gaze_position_pixel, 2, (0, 255, 255), -1)
- cv.circle(visu_frame.matrix, gaze_position_pixel, gaze_accuracy_pixel, (0, 255, 255), 1)
+ cv.circle(visu_frame.matrix, gaze_position_pixel, gaze_position_pixel.accuracy, (0, 255, 255), 1)
# Hide frame left and right borders before tracking to ignore markers outside focus area
cv.rectangle(video_frame.matrix, (0, 0), (int(video_frame.width/6), int(video_frame.height)), (0, 0, 0), -1)
@@ -220,7 +221,7 @@ def main():
for name, aoi_array in aoi2D_dict.items():
aoi2D_merged_scene[name] = numpy.sum(aoi_array, axis=0) / len(aoi_array)
- aoi2D_merged_scene.draw(visu_frame.matrix, video_gaze_pixel, gaze_accuracy_pixel, exclude=['Visualisation_Plan'])
+ aoi2D_merged_scene.draw(visu_frame.matrix, gaze_position_pixel, gaze_position_pixel.accuracy, exclude=['Visualisation_Plan'])
# When the merged scene is empty
if len(aoi2D_merged_scene.keys()) == 0: