From a4a0ef9e28a009ad073958891382b3215c8d96f6 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Wed, 20 Apr 2022 14:41:12 +0200 Subject: Adding AOIFeatures file to genaralize some AOI classes definition. Working on new visual scan class. --- src/argaze/AreaOfInterest/AOI2DScene.py | 52 ++++++--------- src/argaze/AreaOfInterest/AOI3DScene.py | 43 +++++-------- src/argaze/AreaOfInterest/AOIFeatures.py | 50 +++++++++++++++ src/argaze/AreaOfInterest/__init__.py | 2 +- src/argaze/GazeFeatures.py | 75 ++++++++++++++++++++-- .../utils/export_tobii_segment_aruco_aois.py | 36 ++++++----- src/argaze/utils/export_tobii_segment_fixations.py | 8 +-- src/argaze/utils/live_tobii_aruco_aois.py | 17 ++--- 8 files changed, 192 insertions(+), 91 deletions(-) create mode 100644 src/argaze/AreaOfInterest/AOIFeatures.py diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py index fc4b3b2..929cc30 100644 --- a/src/argaze/AreaOfInterest/AOI2DScene.py +++ b/src/argaze/AreaOfInterest/AOI2DScene.py @@ -1,44 +1,42 @@ #!/usr/bin/env python from argaze import DataStructures +from argaze.AreaOfInterest import AOIFeatures +from argaze import GazeFeatures import cv2 as cv import matplotlib.path as mpath -class AOI2D(DataStructures.DictObject): - """Define Area Of Interest 2D - ``` +class AOI2DScene(AOIFeatures.AOIScene): + """Define AOI 2D scene as: + ``` { - 'vertices': array of (x, y) tuples, - 'pointer': (x, y) tuple or None + 'dimension': 2, + 'name 1': AOI 1, + 'name 2': AOI 2, + ... } - ``` + ``` """ - def __init__(self, vertices, pointer = None): - - super().__init__(type(self).__name__, **{'vertices': vertices, 'pointer': pointer}) - -class AOI2DScene(DataStructures.DictObject): - """Define AOI 2D scene as dictionnary of named AOI2Ds.""" - def __init__(self, **aois_2d): - super().__init__(type(self).__name__, **aois_2d) + super().__init__(**aois_2d) - def __del__(self): - pass + # set dimension member + self.dimension = 2 - def inside(self, pointer): - """Store pointer position if it is inside AOIs.""" + def look_at(self, gaze_position: GazeFeatures.GazePosition): + """Store gaze position as a pointer inside looked AOIs.""" - for name in self.keys(): + for name in self.areas(): aoi2D = self[name] - if mpath.Path(aoi2D.vertices).contains_points([pointer])[0]: + if mpath.Path(aoi2D.vertices).contains_points([(gaze_position.x, gaze_position.y)])[0]: - aoi2D.pointer = pointer + # TODO : edit area relative pointer position + aoi2D.pointer = (gaze_position.x, gaze_position.y) else: @@ -47,7 +45,7 @@ class AOI2DScene(DataStructures.DictObject): def draw(self, frame): """Draw AOI polygons on frame.""" - for name in self.keys(): + for name in self.areas(): aoi2D = self[name] inside = aoi2D.pointer != None @@ -60,13 +58,3 @@ class AOI2DScene(DataStructures.DictObject): cv.line(frame, aoi2D.vertices[-1], aoi2D.vertices[0], color, 1) for A, B in zip(aoi2D.vertices, aoi2D.vertices[1:]): cv.line(frame, A, B, color, 1) - -class TimeStampedAOI2DScenes(DataStructures.TimeStampedBuffer): - """Define timestamped buffer to store AOI2D scenes""" - - def __setitem__(self, key, value: AOI2DScene): - """Force value to be a AOI2DScene""" - if type(value) != AOI2DScene: - raise ValueError('value must be a AOI2DScene') - - super().__setitem__(key, value) diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py index 73fc755..2930983 100644 --- a/src/argaze/AreaOfInterest/AOI3DScene.py +++ b/src/argaze/AreaOfInterest/AOI3DScene.py @@ -4,32 +4,20 @@ import math import re from argaze import DataStructures -from argaze.AreaOfInterest import AOI2DScene +from argaze.AreaOfInterest import AOIFeatures, AOI2DScene import numpy import cv2 as cv -class AOI3D(DataStructures.DictObject): - """Define Area Of Interest 3D - ``` - { - 'vertices': array of (x, y, z) tuples - } - ``` - """ - - def __init__(self, vertices): - - super().__init__(type(self).__name__, **{'vertices': vertices}) - -class AOI3DScene(DataStructures.DictObject): - """Define AOI 3D scene as dictionnary of named AOI3Ds. +class AOI3DScene(AOIFeatures.AOIScene): + """Define AOI 3D scene as: ``` { + 'dimension': 3, 'rotation': (x, y, z) tuples, 'translation': (x, y, z) tuples, - 'AOI name 1': AOI3D, - 'AOI name 2': AOI3D, + 'name 1': AOI 1, + 'name 2': AOI 2, ... } ``` @@ -41,10 +29,14 @@ class AOI3DScene(DataStructures.DictObject): aois_3d['rotation'] = numpy.asarray([0., 0., 0.]) aois_3d['translation'] = numpy.asarray([0., 0., 0.]) - super().__init__(type(self).__name__, **aois_3d) + super().__init__(**aois_3d) - def __del__(self): - pass + # set dimension member + self.dimension = 3 + + def areas(self): + """Get areas names""" + return self.keys()[3::] def load(self, obj_filepath: str): """Load AOI3D scene from .obj file.""" @@ -111,7 +103,7 @@ class AOI3DScene(DataStructures.DictObject): # retreive all aoi3D vertices for name, face in faces.items(): - self.append(name, AOI3D(**{'vertices': [ vertices[i-1] for i in face ]})) + self.append(name, AOIFeatures.AreaOfInterest(**{'vertices': [ vertices[i-1] for i in face ]})) except IOError: raise IOError(f'File not found: {obj_filepath}') @@ -122,10 +114,7 @@ class AOI3DScene(DataStructures.DictObject): aoi2D_scene = {} - for name in self.keys(): - - if name == 'rotation' or name == 'translation': - continue + for name in self.areas(): aoi3D = self[name] @@ -134,7 +123,7 @@ class AOI3DScene(DataStructures.DictObject): vertices_2D, J = cv.projectPoints(vertices_3D, self.rotation, self.translation, K, D) vertices_2D = vertices_2D.astype('int').reshape((len(vertices_2D), 2)).tolist() - aoi2D = AOI2DScene.AOI2D(vertices_2D) + aoi2D = AOIFeatures.AreaOfInterest(vertices_2D) aoi2D_scene[name] = aoi2D diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py new file mode 100644 index 0000000..4788100 --- /dev/null +++ b/src/argaze/AreaOfInterest/AOIFeatures.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +from argaze import DataStructures + +class AreaOfInterest(DataStructures.DictObject): + """Define 2D/3D Area Of Interest + ``` + { + 'vertices': array of (x, y(,z)) tuples for each vertices of the area + 'pointer': None or (x, y(,z)) tuple to set where the area is looked + } + ``` + """ + + def __init__(self, vertices, pointer = None): + + super().__init__(type(self).__name__, **{'dimension': len(vertices[0]),'vertices': vertices, 'pointer': pointer}) + +class AOIScene(DataStructures.DictObject): + """Define AOI scene as: + ``` + { + 'dimension': 2 or 3, + 'name 1': AOI 1, + 'name 2': AOI 2, + ... + } + ``` + """ + + def __init__(self, **aois): + + # append dimension member + aois['dimension'] = None + + super().__init__(type(self).__name__, **aois) + + def areas(self): + """Get areas names.""" + return self.keys()[:-1] + +class TimeStampedAOIScenes(DataStructures.TimeStampedBuffer): + """Define timestamped buffer to store AOI scenes in time.""" + + def __setitem__(self, key, value): + """Force value to inherit from AOIScene.""" + if type(value).__bases__[0] != AOIScene: + raise ValueError(f'value must inherit from AOIScene') + + super().__setitem__(key, value) diff --git a/src/argaze/AreaOfInterest/__init__.py b/src/argaze/AreaOfInterest/__init__.py index 57ce80a..a1e9f47 100644 --- a/src/argaze/AreaOfInterest/__init__.py +++ b/src/argaze/AreaOfInterest/__init__.py @@ -2,4 +2,4 @@ .. include:: README.md """ __docformat__ = "restructuredtext" -__all__ = ['AOI2DScene', 'AOI3DScene'] \ No newline at end of file +__all__ = ['AOIFeatures', 'AOI2DScene', 'AOI3DScene'] \ No newline at end of file diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py index 2f1d167..13bd9f5 100644 --- a/src/argaze/GazeFeatures.py +++ b/src/argaze/GazeFeatures.py @@ -3,20 +3,25 @@ import math from argaze import DataStructures +from argaze.AreaOfInterest import AOIFeatures import numpy FIXATION_MAX_DURATION = 1000 class GazePosition(DataStructures.DictObject): - """Define gaze position""" + """Define gaze position.""" def __init__(self, x, y): super().__init__(type(self).__name__, **{'x': x, 'y': y}) + def __iter__(self): + yield self.x + yield self.y + class TimeStampedGazePositions(DataStructures.TimeStampedBuffer): - """Define timestamped buffer to store gaze positions""" + """Define timestamped buffer to store gaze positions.""" def __setitem__(self, key, value: GazePosition): """Force value to be a GazePosition""" @@ -33,7 +38,7 @@ class Fixation(DataStructures.DictObject): super().__init__(type(self).__name__, **{'duration': duration, 'dispersion': dispersion, 'centroid': [cx, cy]}) class TimeStampedFixations(DataStructures.TimeStampedBuffer): - """Define timestamped buffer to store fixations""" + """Define timestamped buffer to store fixations.""" def __setitem__(self, key, value: Fixation): """Force value to be a Fixation""" @@ -43,7 +48,7 @@ class TimeStampedFixations(DataStructures.TimeStampedBuffer): super().__setitem__(key, value) class FixationIdentifier(): - """Abstract class to define what should provide a fixation identifier""" + """Abstract class to define what should provide a fixation identifier.""" def __init__(self, ts_gaze_positions: TimeStampedGazePositions): @@ -187,3 +192,65 @@ class DispersionBasedFixationIdentifier(FixationIdentifier): raise StopIteration return -1, None + +class VisualScan(): + """Abstract class to define when an aoi starts to be looked and when it stops.""" + + def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes): + + if type(ts_aoi_scenes) != AOIFeatures.TimeStampedAOIScenes: + raise ValueError('argument must be a TimeStampedAOIScenes') + + def __iter__(self): + raise NotImplementedError('__iter__() method not implemented') + + def __next__(self): + raise NotImplementedError('__next__() method not implemented') + +class PointerBasedVisualScan(VisualScan): + """Build visual scan on the basis of AOI's pointer information.""" + + def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes, tolerance_to_lacking: int): + + super().__init__(ts_aoi_scenes) + + # process identification on a copy + self.__ts_aoi_scenes = ts_aoi_scenes.copy() + + def __iter__(self): + """Start to build visual scan.""" + return self + + def __next__(self): + + # while there is aoi scene to process + if len(self.__ts_aoi_scenes) > 0: + + #if not ts_aoi.looked: + # raise ValueError('TimeStampedAOIScenes must be looked using look_at method.') + + return # start timestamp, AOI name, duration + +class FixationBasedVisualScan(VisualScan): + """Build visual scan on the basis of timestamped fixations.""" + + def __init__(self, ts_aoi_scenes: AOIFeatures.TimeStampedAOIScenes, ts_fixations: TimeStampedFixations): + + super().__init__(ts_aoi_scenes) + + if type(ts_fixations) != TimeStampedFixations: + raise ValueError('second argument must be a GazeFeatures.TimeStampedFixations') + + # process identification on a copy + self.__ts_aoi_scenes = ts_aoi_scenes.copy() + self.__ts_fixations = ts_fixations.copy() + + def __iter__(self): + """Start to build visual scan.""" + return self + + def __next__(self): + + # while there is aoi scene to process + if len(self.__ts_aoi_scenes) > 0: + return \ No newline at end of file diff --git a/src/argaze/utils/export_tobii_segment_aruco_aois.py b/src/argaze/utils/export_tobii_segment_aruco_aois.py index 8f8412d..8e8c3fd 100644 --- a/src/argaze/utils/export_tobii_segment_aruco_aois.py +++ b/src/argaze/utils/export_tobii_segment_aruco_aois.py @@ -29,7 +29,7 @@ def main(): parser.add_argument('-r', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)') parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') parser.add_argument('-a', '--aoi_scene', metavar='AOI_SCENE', type=str, default='aoi3D_scene.obj', help='obj aoi scene filepath') - parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') + parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)') parser.add_argument('-m', '--marker_size', metavar='MARKER_SIZE', type=float, default=6, help='aruco marker size (cm)') parser.add_argument('-i', '--markers_id', metavar='MARKERS_ID', nargs='*', type=int, default=[], help='markers id to track') parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)') @@ -87,10 +87,13 @@ def main(): # Create AOIs 3D scene aoi3D_scene = AOI3DScene.AOI3DScene() aoi3D_scene.load(args.aoi_scene) - print(f'AOIs names: {aoi3D_scene.keys()[2::]}') + print(f'AOIs names: {aoi3D_scene.areas()}') - # Create Timestamped buffer to store 2D AOIs - aoi2D_timestamped_buffer = AOI2DScene.TimeStampedAOI2DScenes() + # Create timestamped buffer to store AOIs scene in time + ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes() + + # Create timestamped buffer to store gaze positions in time + ts_gaze_positions = GazeFeatures.TimeStampedGazePositions() # Video and data replay loop try: @@ -106,14 +109,17 @@ def main(): # Get closest gaze position before video timestamp and remove all gaze positions before closest_gaze_ts, closest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts) - # Draw video synchronized gaze pointer - pointer = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height)) - cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1) + # Draw video synchronized gaze position + gaze_position = GazeFeatures.GazePosition(int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height)) + cv.circle(video_frame.matrix, tuple(gaze_position), 4, (0, 255, 255), -1) + + # Store gaze position at this time + ts_gaze_positions[video_ts] = gaze_position # When expected values can't be found except (KeyError, AttributeError, ValueError): - pass # keep last pointer position + pass # keep last gaze position # Track markers with pose estimation and draw them aruco_tracker.track(video_frame.matrix) @@ -141,15 +147,15 @@ def main(): # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0) - # Check if gaze is inside 2D aois - if pointer != None: - aoi2D_scene.inside(pointer) + # Check which 2D aois is looked + if gaze_position != None: + aoi2D_scene.look_at(gaze_position) # Draw 2D aois aoi2D_scene.draw(video_frame.matrix) - # Store 2D aois - aoi2D_timestamped_buffer[video_ts] = aoi2D_scene + # Store 2D aois scene at this time + ts_aois_scenes[video_ts] = aoi2D_scene # Close window using 'Esc' key if cv.waitKey(1) == 27: @@ -162,7 +168,7 @@ def main(): output_video.write(video_frame.matrix) # Update Progress Bar - progress = video_ts - tobii_segment_video.get_vts_offset() - int(args.time_range[0] * 1000000) + progress = video_ts - int(args.time_range[0] * 1000000) # - tobii_segment_video.get_vts_offset() ? MiscFeatures.printProgressBar(progress, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100) # Exit on 'ctrl+C' interruption @@ -178,7 +184,7 @@ def main(): print(f'\nAOIs video saved into {video_filepath}') # Export 2D aois - aoi2D_timestamped_buffer.export_as_json(aois_filepath) + ts_aois_scenes.export_as_json(aois_filepath) print(f'Timestamped AOIs positions saved into {aois_filepath}') diff --git a/src/argaze/utils/export_tobii_segment_fixations.py b/src/argaze/utils/export_tobii_segment_fixations.py index 91a44e9..f232495 100644 --- a/src/argaze/utils/export_tobii_segment_fixations.py +++ b/src/argaze/utils/export_tobii_segment_fixations.py @@ -67,7 +67,7 @@ def main(): fixation_analyser = GazeFeatures.DispersionBasedFixationIdentifier(generic_ts_gaze_positions, args.dispersion_threshold, args.duration_threshold) # Start fixation identification - fixations = GazeFeatures.TimeStampedFixations() + ts_fixations = GazeFeatures.TimeStampedFixations() MiscFeatures.printProgressBar(0, int(tobii_segment_video.get_duration()/1000), prefix = 'Progress:', suffix = 'Complete', length = 100) @@ -78,14 +78,14 @@ def main(): if item.get_type() == 'Fixation': - fixations[ts] = item + ts_fixations[ts] = item MiscFeatures.printProgressBar(ts-int(args.time_range[0]*1000), int(tobii_segment_video.get_duration()/1000), prefix = 'Progress:', suffix = 'Complete', length = 100) - print(f'\n{len(fixations)} fixations found') + print(f'\n{len(ts_fixations)} fixations found') # Export fixations analysis results - fixations.export_as_json(fixations_filepath) + ts_fixations.export_as_json(fixations_filepath) print(f'Fixations saved into {fixations_filepath}') diff --git a/src/argaze/utils/live_tobii_aruco_aois.py b/src/argaze/utils/live_tobii_aruco_aois.py index e718cee..1bc79eb 100644 --- a/src/argaze/utils/live_tobii_aruco_aois.py +++ b/src/argaze/utils/live_tobii_aruco_aois.py @@ -24,7 +24,7 @@ def main(): parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip') parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') parser.add_argument('-a', '--aoi_scene', metavar='AOI_SCENE', type=str, default='aoi3D_scene.obj', help='obj aoi scene filepath') - parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') + parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)') parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)') parser.add_argument('-i', '--markers_id', metavar='MARKERS_ID', nargs='*', type=int, default=[], help='markers id to track') args = parser.parse_args() @@ -57,6 +57,7 @@ def main(): # Create AOIs 3D scene aoi3D_scene = AOI3DScene.AOI3DScene() aoi3D_scene.load(args.aoi_scene) + print(f'AOIs names: {aoi3D_scene.areas()}') # Start streaming tobii_controller.start_streaming() @@ -81,14 +82,14 @@ def main(): # Get last gaze position before video timestamp and remove all former gaze positions earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts) - # Draw video synchronized gaze pointer - pointer = (int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height)) - cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1) + # Draw video synchronized gaze position + gaze_position = GazeFeatures.GazePosition((int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height))) + cv.circle(video_frame.matrix, gaze_position, 4, (0, 255, 255), -1) # When expected values aren't in data stream except (KeyError, AttributeError, ValueError): - pass # keep last pointer position + pass # keep last gaze position # Track markers with pose estimation and draw them aruco_tracker.track(video_frame.matrix) @@ -116,9 +117,9 @@ def main(): # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0) - # Check if gaze is inside 2D aois - if pointer != None: - aoi2D_scene.inside(pointer) + # Check which 2D aois is looked + if gaze_position != None: + aoi2D_scene.look_at(gaze_position) # Draw 2D aois aoi2D_scene.draw(video_frame.matrix) -- cgit v1.1