aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py38
-rw-r--r--src/argaze/AreaOfInterest/AOI3DScene.py8
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py67
-rw-r--r--src/argaze/GazeFeatures.py55
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py17
5 files changed, 119 insertions, 66 deletions
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index ee172f8..6eb0f7c 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -5,7 +5,7 @@ from argaze.AreaOfInterest import AOIFeatures
from argaze import GazeFeatures
import cv2 as cv
-import matplotlib.path as mpath
+import numpy
class AOI2DScene(AOIFeatures.AOIScene):
"""Define AOI 2D scene."""
@@ -18,31 +18,37 @@ class AOI2DScene(AOIFeatures.AOIScene):
self.dimension = 2
def look_at(self, gaze_position: GazeFeatures.GazePosition):
- """Store gaze position as a pointer inside looked AOIs."""
+ """Get looked and ignored AOI names."""
- for name, aoi2D in self.areas.items():
+ looked = {}
+ ignored = {}
+
+ for name, aoi in self.areas.items():
- if mpath.Path(aoi2D.vertices).contains_points([(gaze_position.x, gaze_position.y)])[0]:
+ if aoi.looked():
- # TODO : edit area relative pointer position
- aoi2D.pointer = (gaze_position.x, gaze_position.y)
+ looked[name] = aoi.look_at(gaze_position)
else:
- aoi2D.pointer = None
+ ignored[name] = None
- def draw(self, frame):
+ return looked, ignored
+
+ def draw(self, frame, gaze_position: GazeFeatures.GazePosition):
"""Draw AOI polygons on frame."""
for name, aoi2D in self.areas.items():
- inside = aoi2D.pointer != None
-
- color = (0, 255, 0) if inside else (0, 0, 255)
+ looked = aoi2D.looked(gaze_position)
+ looked_at = aoi2D.look_at(gaze_position)
- if inside:
- cv.putText(frame, name, (aoi2D.vertices[3][0], aoi2D.vertices[3][1]), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ color = (0, 255, 0) if looked else (0, 0, 255)
- cv.line(frame, aoi2D.vertices[-1], aoi2D.vertices[0], color, 1)
- for A, B in zip(aoi2D.vertices, aoi2D.vertices[1:]):
- cv.line(frame, A, B, color, 1)
+ if looked:
+ cv.putText(frame, name, aoi2D[3], cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
+ cv.circle(frame, looked_at.astype(int), 10, (255, 255, 255), -1)
+
+ # Draw form
+ aoi2D.draw(frame, color)
+
diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py
index d126f45..28150c6 100644
--- a/src/argaze/AreaOfInterest/AOI3DScene.py
+++ b/src/argaze/AreaOfInterest/AOI3DScene.py
@@ -87,7 +87,7 @@ class AOI3DScene(AOIFeatures.AOIScene):
# retreive all aoi3D vertices
for name, face in faces.items():
- aoi3D = AOIFeatures.AreaOfInterest(vertices=[ vertices[i-1] for i in face ])
+ aoi3D = numpy.array([ vertices[i-1] for i in face ]).astype('float32').view(AOIFeatures.AreaOfInterest)
self.append(name, aoi3D)
except IOError:
@@ -101,12 +101,10 @@ class AOI3DScene(AOIFeatures.AOIScene):
for name, aoi3D in self.areas.items():
- vertices_3D = numpy.array(aoi3D.vertices).astype('float32')
-
- vertices_2D, J = cv.projectPoints(vertices_3D, self.rotation, self.translation, K, D)
+ vertices_2D, J = cv.projectPoints(aoi3D, self.rotation, self.translation, K, D)
vertices_2D = vertices_2D.astype('int').reshape((len(vertices_2D), 2)).tolist()
- aoi2D = AOIFeatures.AreaOfInterest(vertices_2D)
+ aoi2D = numpy.array(vertices_2D).view(AOIFeatures.AreaOfInterest)
aoi2D_scene.append(name, aoi2D)
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index b78a104..a33db26 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -4,32 +4,77 @@ from dataclasses import dataclass, field
from argaze import DataStructures
+import cv2 as cv
+import matplotlib.path as mpath
+import numpy
+
@dataclass
-class AreaOfInterest():
+class AreaOfInterest(numpy.ndarray):
"""Define 2D/3D Area Of Interest."""
- dimension: int = field(init=False, default=None)
- """number of the coordinates to code vertice or pointer positions."""
+ def dimension(self):
+ """Number of coordinates coding area points positions."""
+ return self.shape[1]
+
+ def center(self):
+ """Center of mass"""
+ return self.mean(axis=0)
+
+ def clockwise(self):
+ """Get area points in clocwise order."""
+
+ if self.dimension() != 2:
+ raise RuntimeError(f'Bad area dimension ({self.dimension()})')
+
+ O = self.center()
+ OP = (self - O) / numpy.linalg.norm(self - O)
+ angles = numpy.arctan2(OP[:, 1], OP[:, 0])
+
+ return self[numpy.argsort(angles)]
+
+ def looked(self, gaze_position):
+ """Is gaze position inside area ?"""
+
+ if self.dimension() != 2:
+ raise RuntimeError(f'Bad area dimension ({self.dimension()})')
+
+ return mpath.Path(self).contains_points([(gaze_position.x, gaze_position.y)])[0]
+
+ def look_at(self, gaze_position):
+ """Get where the area is looked."""
+
+ if self.dimension() != 2:
+ raise RuntimeError(f'Bad area dimension ({self.dimension()})')
+
+ P = numpy.array([gaze_position.x, gaze_position.y])
+
+ clockwise_area = self.clockwise()
+
+ O = clockwise_area[0]
+ OX, OY = clockwise_area[1] - O, clockwise_area[-1] - O
+ OP = P - O
- vertices: list(tuple())
- """for each vertices of the area."""
+ return numpy.array([numpy.dot(OP, OX) / numpy.dot(OX, OX), numpy.dot(OP, OY) / numpy.dot(OY, OY)])
- pointer: tuple = None
- """to set where the area is looked."""
+ def draw(self, frame, color):
- def __post_init__(self):
+ # Draw form
+ cv.line(frame, self[-1], self[0], color, 1)
+ for A, B in zip(self, self[1:]):
+ cv.line(frame, A, B, color, 1)
- self.dimension = len(self.vertices[0])
+ # Draw center
+ cv.circle(frame, self.center().astype(int), 1, color, -1)
@dataclass
class AOIScene():
"""Define 2D/3D AOI scene."""
dimension: int = field(init=False, default=None)
- """dimension of the AOIs in scene."""
+ """Dimension of the AOIs in scene."""
areas: dict = field(init=False, default_factory=dict)
- """all aois in the scene."""
+ """All aois in the scene."""
def append(self, name, aoi: AreaOfInterest):
"""Add an aoi to the scene."""
diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py
index e0c3e73..73dace9 100644
--- a/src/argaze/GazeFeatures.py
+++ b/src/argaze/GazeFeatures.py
@@ -17,6 +17,9 @@ class GazePosition():
x: float
y: float
+ def as_tuple(self):
+ return (self.x, self.y)
+
class TimeStampedGazePositions(DataStructures.TimeStampedBuffer):
"""Define timestamped buffer to store gaze positions."""
@@ -206,11 +209,11 @@ class DispersionBasedFixationIdentifier(FixationIdentifier):
@dataclass
class VisualScanStep():
- """Define a visual scan step as a duration, the name of the area of interest and all its frames during the step."""
+ """Define a visual scan step as a duration, the name of the area of interest and where gaze looked at in each frame during the step."""
duration: float
area: str
- frames: DataStructures.TimeStampedBuffer
+ look_at: DataStructures.TimeStampedBuffer
class TimeStampedVisualScanSteps(DataStructures.TimeStampedBuffer):
"""Define timestamped buffer to store visual scan steps."""
@@ -247,14 +250,15 @@ class VisualScanGenerator():
return visual_scan_steps
class PointerBasedVisualScan(VisualScanGenerator):
- """Build visual scan on the basis of AOI's pointer information."""
+ """Build visual scan on the basis of which AOI are looked."""
- def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes): # TODO : add tolerance_to_lacking ?
+ def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes, ts_gaze_positions: TimeStampedGazePositions):
super().__init__(ts_aoi_scenes)
# process identification on a copy
self.__ts_aoi_scenes = ts_aoi_scenes.copy()
+ self.__ts_gaze_positions = ts_gaze_positions.copy()
# a dictionary to store when an aoi starts to be looked
self.__step_dict = {}
@@ -267,35 +271,40 @@ class PointerBasedVisualScan(VisualScanGenerator):
(ts_current, aoi_scene_current) = self.__ts_aoi_scenes.pop_first()
- #if not aoi_scene_current.looked:
- # raise ValueError('TimeStampedAOIScenes must be looked using look_at method.')
+ try:
+
+ gaze_position = self.__ts_gaze_positions[ts_current]
+
+ for name, aoi in aoi_scene_current.areas.items():
- for name, aoi in aoi_scene_current.areas.items():
+ looked = aoi.looked(gaze_position)
- aoi_looked = aoi.pointer != None
+ if looked:
- if aoi_looked:
+ if not name in self.__step_dict.keys():
- if not name in self.__step_dict.keys():
+ # aoi starts to be looked
+ self.__step_dict[name] = {
+ 'start': ts_current,
+ 'look_at': DataStructures.TimeStampedBuffer()
+ }
- # aoi starts to be looked
- self.__step_dict[name] = {
- 'start': ts_current,
- 'frames': DataStructures.TimeStampedBuffer()
- }
+ # store where the aoi is looked
+ self.__step_dict[name]['look_at'][ts_current] = aoi.look_at(gaze_position).tolist()
- # store current aoi
- self.__step_dict[name]['frames'][ts_current] = aoi
+ elif name in self.__step_dict.keys():
- elif name in self.__step_dict.keys():
+ ts_start = self.__step_dict[name]['start']
- ts_start = self.__step_dict[name]['start']
+ # aoi stops to be looked
+ yield ts_start, VisualScanStep(ts_current - ts_start, name, self.__step_dict[name]['look_at'])
- # aoi stops to be looked
- yield ts_start, VisualScanStep(ts_current - ts_start, name, self.__step_dict[name]['frames'])
+ # forget the aoi
+ del self.__step_dict[name]
- # forget the aoi
- del self.__step_dict[name]
+ # ignore missing gaze position
+ except KeyError:
+ pass
class FixationBasedVisualScan(VisualScanGenerator):
"""Build visual scan on the basis of timestamped fixations."""
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index 295a0da..4089d2f 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -118,7 +118,6 @@ def main():
# When expected values can't be found
except (KeyError, AttributeError, ValueError):
-
pass # keep last gaze position
# Track markers with pose estimation and draw them
@@ -147,14 +146,10 @@ def main():
# This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable.
aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0)
- # Check which 2D aois is looked
- if gaze_position != None:
- aoi2D_scene.look_at(gaze_position)
-
- # Draw 2D aois
- aoi2D_scene.draw(video_frame.matrix)
+ # Draw 2D scene
+ aoi2D_scene.draw(video_frame.matrix, gaze_position)
- # Store 2D aois scene at this time in millisecond
+ # Store 2D scene at this time in millisecond
ts_aois_scenes[video_ts/1000] = aoi2D_scene
# Close window using 'Esc' key
@@ -183,13 +178,13 @@ def main():
print(f'\nAOIs video saved into {video_filepath}')
- # Build visual scan based on aoi's pointer
- visual_scan = GazeFeatures.PointerBasedVisualScan(ts_aois_scenes).build()
+ # Build visual scan based on a pointer position
+ visual_scan = GazeFeatures.PointerBasedVisualScan(ts_aois_scenes, ts_gaze_positions).build()
print(f'{len(visual_scan)} visual scan steps found')
# Export visual scan
- visual_scan.export_as_csv(visual_scan_filepath, exclude=['frames'])
+ visual_scan.export_as_csv(visual_scan_filepath) #exclude=['frames']
print(f'Visual scan saved into {visual_scan_filepath}')