aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/argaze/GazeFeatures.py86
-rw-r--r--src/argaze/utils/README.md4
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py (renamed from src/argaze/utils/export_tobii_segment_aruco_aois.py)21
-rw-r--r--src/argaze/utils/export_tobii_segment_fixations.py8
4 files changed, 86 insertions, 33 deletions
diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py
index 13bd9f5..e132849 100644
--- a/src/argaze/GazeFeatures.py
+++ b/src/argaze/GazeFeatures.py
@@ -193,7 +193,24 @@ class DispersionBasedFixationIdentifier(FixationIdentifier):
return -1, None
-class VisualScan():
+class VisualScanStep(DataStructures.DictObject):
+ """Define a visual scan step as a duration and an area of interest."""
+
+ def __init__(self, duration, aoi):
+
+ super().__init__(type(self).__name__, **{'duration': duration, 'aoi': aoi})
+
+class TimeStampedVisualScanSteps(DataStructures.TimeStampedBuffer):
+ """Define timestamped buffer to store visual scan steps."""
+
+ def __setitem__(self, key, value: VisualScanStep):
+ """Force value to be a VisualScanStep"""
+ if type(value) != VisualScanStep:
+ raise ValueError('value must be a VisualScanStep')
+
+ super().__setitem__(key, value)
+
+class VisualScanGenerator():
"""Abstract class to define when an aoi starts to be looked and when it stops."""
def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes):
@@ -204,34 +221,68 @@ class VisualScan():
def __iter__(self):
raise NotImplementedError('__iter__() method not implemented')
- def __next__(self):
- raise NotImplementedError('__next__() method not implemented')
+ def build(self):
+
+ visual_scan_steps = TimeStampedVisualScanSteps()
+
+ for ts, step in self:
+
+ if step == None:
+ continue
-class PointerBasedVisualScan(VisualScan):
+ if step.get_type() == 'VisualScanStep':
+
+ visual_scan_steps[ts] = step
+
+ return visual_scan_steps
+
+class PointerBasedVisualScan(VisualScanGenerator):
"""Build visual scan on the basis of AOI's pointer information."""
- def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes, tolerance_to_lacking: int):
+ def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes): # TODO : add tolerance_to_lacking ?
super().__init__(ts_aoi_scenes)
# process identification on a copy
self.__ts_aoi_scenes = ts_aoi_scenes.copy()
- def __iter__(self):
- """Start to build visual scan."""
- return self
+ # a dictionary to store when an aoi starts to be looked
+ self.__start_dict = {}
- def __next__(self):
+ def __iter__(self):
+ """Visual scan generator function."""
# while there is aoi scene to process
- if len(self.__ts_aoi_scenes) > 0:
+ while len(self.__ts_aoi_scenes) > 0:
+
+ (ts_current, aoi_scene_current) = self.__ts_aoi_scenes.pop_first()
- #if not ts_aoi.looked:
+ #if not aoi_scene_current.looked:
# raise ValueError('TimeStampedAOIScenes must be looked using look_at method.')
- return # start timestamp, AOI name, duration
+ for name in aoi_scene_current.areas():
+
+ aoi_looked = aoi_scene_current[name].pointer != None
+
+ if aoi_looked:
+
+ if not name in self.__start_dict.keys():
+
+ # aoi starts to be looked
+ self.__start_dict[name] = ts_current
-class FixationBasedVisualScan(VisualScan):
+ elif name in self.__start_dict.keys():
+
+ # aoi stops to be looked
+ ts_start = self.__start_dict[name]
+ duration = ts_current - ts_start
+
+ # forget the aoi
+ del self.__start_dict[name]
+
+ yield ts_start, VisualScanStep(duration, name)
+
+class FixationBasedVisualScan(VisualScanGenerator):
"""Build visual scan on the basis of timestamped fixations."""
def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes, ts_fixations: TimeStampedFixations):
@@ -246,11 +297,6 @@ class FixationBasedVisualScan(VisualScan):
self.__ts_fixations = ts_fixations.copy()
def __iter__(self):
- """Start to build visual scan."""
- return self
-
- def __next__(self):
+ """Visual scan generator function."""
- # while there is aoi scene to process
- if len(self.__ts_aoi_scenes) > 0:
- return \ No newline at end of file
+ yield -1, None
diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md
index 03dd7ec..3243b26 100644
--- a/src/argaze/utils/README.md
+++ b/src/argaze/utils/README.md
@@ -72,10 +72,10 @@ python ./src/argaze/utils/replay_tobii_session.py -s SEGMENT_PATH -r IN OUT
python ./src/argaze/utils/export_tobii_segment_fixations.py -s SEGMENT_PATH -r IN OUT
```
-- Track ArUco markerinto a Tobii camera video segment (replace SEGMENT_PATH) into a time range selection (replace IN OUT). Load an aoi scene (replace AOI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any AOI. Export AOIs video and data.
+- Track ArUco markerinto a Tobii camera video segment (replace SEGMENT_PATH) into a time range selection (replace IN OUT). Load an aoi scene (replace AOI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any AOI. Export AOIs video and visual scan data.
```
-python ./src/argaze/utils/export_tobii_segment_aruco_aois.py -s SEGMENT_PATH -c export/tobii_camera.json -m 7.5 -a AOI_SCENE -r IN OUT
+python ./src/argaze/utils/export_tobii_segment_aruco_visual_scan.py -s SEGMENT_PATH -c export/tobii_camera.json -m 7.5 -a AOI_SCENE -r IN OUT
```
- Track ArUco markers (replace MARKER_ID) into Tobii camera video stream (replace IP_ADDRESS). Load an aoi scene (replace AOI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any AOI.
diff --git a/src/argaze/utils/export_tobii_segment_aruco_aois.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index 8e8c3fd..290bdb7 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_aois.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -51,12 +51,12 @@ def main():
os.makedirs(os.path.dirname(args.output))
print(f'{os.path.dirname(args.output)} folder created')
- aois_filepath = f'{args.output}/aois.json'
+ visual_scan_filepath = f'{args.output}/visual_scan.json'
video_filepath = f'{args.output}/fullstream+visu.mp4'
else:
- aois_filepath = f'{args.segment_path}/aois.json'
+ visual_scan_filepath = f'{args.segment_path}/visual_scan.json'
video_filepath = f'{args.segment_path}/fullstream+visu.mp4'
# Load a tobii segment
@@ -98,7 +98,7 @@ def main():
# Video and data replay loop
try:
- # Count frame to display a progress bar
+ # Initialise progress bar
MiscFeatures.printProgressBar(0, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100)
# Iterate on video frames activating video / data synchronisation through vts data buffer
@@ -113,7 +113,7 @@ def main():
gaze_position = GazeFeatures.GazePosition(int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height))
cv.circle(video_frame.matrix, tuple(gaze_position), 4, (0, 255, 255), -1)
- # Store gaze position at this time
+ # Store gaze position at this time in millisecond
ts_gaze_positions[video_ts] = gaze_position
# When expected values can't be found
@@ -154,7 +154,7 @@ def main():
# Draw 2D aois
aoi2D_scene.draw(video_frame.matrix)
- # Store 2D aois scene at this time
+ # Store 2D aois scene at this time in millisecond
ts_aois_scenes[video_ts] = aoi2D_scene
# Close window using 'Esc' key
@@ -183,10 +183,15 @@ def main():
print(f'\nAOIs video saved into {video_filepath}')
- # Export 2D aois
- ts_aois_scenes.export_as_json(aois_filepath)
+ # Build visual scan based on aoi's pointer
+ visual_scan = GazeFeatures.PointerBasedVisualScan(ts_aois_scenes).build()
- print(f'Timestamped AOIs positions saved into {aois_filepath}')
+ print(f'{len(visual_scan)} visual scan steps found')
+
+ # Export visual scan
+ visual_scan.export_as_json(visual_scan_filepath)
+
+ print(f'Visual scan saved into {visual_scan_filepath}')
if __name__ == '__main__':
diff --git a/src/argaze/utils/export_tobii_segment_fixations.py b/src/argaze/utils/export_tobii_segment_fixations.py
index f232495..f0a8a4d 100644
--- a/src/argaze/utils/export_tobii_segment_fixations.py
+++ b/src/argaze/utils/export_tobii_segment_fixations.py
@@ -64,11 +64,11 @@ def main():
print(f'Dispersion threshold: {args.dispersion_threshold}')
print(f'Duration threshold: {args.duration_threshold}')
- fixation_analyser = GazeFeatures.DispersionBasedFixationIdentifier(generic_ts_gaze_positions, args.dispersion_threshold, args.duration_threshold)
-
# Start fixation identification
+ fixation_analyser = GazeFeatures.DispersionBasedFixationIdentifier(generic_ts_gaze_positions, args.dispersion_threshold, args.duration_threshold)
ts_fixations = GazeFeatures.TimeStampedFixations()
+ # Initialise progress bar
MiscFeatures.printProgressBar(0, int(tobii_segment_video.get_duration()/1000), prefix = 'Progress:', suffix = 'Complete', length = 100)
for ts, item in fixation_analyser:
@@ -80,7 +80,9 @@ def main():
ts_fixations[ts] = item
- MiscFeatures.printProgressBar(ts-int(args.time_range[0]*1000), int(tobii_segment_video.get_duration()/1000), prefix = 'Progress:', suffix = 'Complete', length = 100)
+ # Update Progress Bar
+ progress = ts - int(args.time_range[0] * 1000)
+ MiscFeatures.printProgressBar(progress, int(tobii_segment_video.get_duration()/1000), prefix = 'Progress:', suffix = 'Complete', length = 100)
print(f'\n{len(ts_fixations)} fixations found')