From 1f0a4bf66289a6d85e3896248deeb156e6899d2a Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Wed, 13 Apr 2022 23:21:35 +0200 Subject: Using Area Of Interest (AOI) instead of Region Of Interest (ROI) every where --- README.md | 2 +- src/argaze/AreaOfInterest/AOI2DScene.py | 72 +++++++++ src/argaze/AreaOfInterest/AOI3DScene.py | 141 ++++++++++++++++ src/argaze/AreaOfInterest/README.md | 4 + src/argaze/AreaOfInterest/__init__.py | 5 + src/argaze/RegionOfInterest/README.md | 4 - src/argaze/RegionOfInterest/ROI2DScene.py | 72 --------- src/argaze/RegionOfInterest/ROI3DScene.py | 141 ---------------- src/argaze/RegionOfInterest/__init__.py | 5 - src/argaze/__init__.py | 2 +- src/argaze/utils/README.md | 8 +- .../utils/export_tobii_segment_aruco_aois.py | 178 +++++++++++++++++++++ .../utils/export_tobii_segment_aruco_rois.py | 178 --------------------- src/argaze/utils/live_tobii_aruco_aois.py | 135 ++++++++++++++++ src/argaze/utils/live_tobii_aruco_rois.py | 135 ---------------- 15 files changed, 541 insertions(+), 541 deletions(-) create mode 100644 src/argaze/AreaOfInterest/AOI2DScene.py create mode 100644 src/argaze/AreaOfInterest/AOI3DScene.py create mode 100644 src/argaze/AreaOfInterest/README.md create mode 100644 src/argaze/AreaOfInterest/__init__.py delete mode 100644 src/argaze/RegionOfInterest/README.md delete mode 100644 src/argaze/RegionOfInterest/ROI2DScene.py delete mode 100644 src/argaze/RegionOfInterest/ROI3DScene.py delete mode 100644 src/argaze/RegionOfInterest/__init__.py create mode 100644 src/argaze/utils/export_tobii_segment_aruco_aois.py delete mode 100644 src/argaze/utils/export_tobii_segment_aruco_rois.py create mode 100644 src/argaze/utils/live_tobii_aruco_aois.py delete mode 100644 src/argaze/utils/live_tobii_aruco_rois.py diff --git a/README.md b/README.md index 07ccc8f..2da1489 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ An open-source python toolkit to deal with gaze tracking and analysis in Augment The ArGaze toolkit provides some generics data structures and algorithms to process gaze analysis and it is divided in submodules dedicated to various specifics features: * ArUcoMarkers: ArUco markers generator, traking, camera calibration, ... -* RegionOfInterest: Region Of Interest (ROI) scene management for 2D and 3D environment. +* AreaOfInterest: Area Of Interest (AOI) scene management for 2D and 3D environment. * TobiiGlassesPro2: A gaze tracking device interface. * utils: Collection of command-line high level features scripts based on ArGaze toolkit. diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py new file mode 100644 index 0000000..fc4b3b2 --- /dev/null +++ b/src/argaze/AreaOfInterest/AOI2DScene.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +from argaze import DataStructures + +import cv2 as cv +import matplotlib.path as mpath + +class AOI2D(DataStructures.DictObject): + """Define Area Of Interest 2D + ``` + { + 'vertices': array of (x, y) tuples, + 'pointer': (x, y) tuple or None + } + ``` + """ + + def __init__(self, vertices, pointer = None): + + super().__init__(type(self).__name__, **{'vertices': vertices, 'pointer': pointer}) + +class AOI2DScene(DataStructures.DictObject): + """Define AOI 2D scene as dictionnary of named AOI2Ds.""" + + def __init__(self, **aois_2d): + + super().__init__(type(self).__name__, **aois_2d) + + def __del__(self): + pass + + def inside(self, pointer): + """Store pointer position if it is inside AOIs.""" + + for name in self.keys(): + + aoi2D = self[name] + + if mpath.Path(aoi2D.vertices).contains_points([pointer])[0]: + + aoi2D.pointer = pointer + + else: + + aoi2D.pointer = None + + def draw(self, frame): + """Draw AOI polygons on frame.""" + + for name in self.keys(): + + aoi2D = self[name] + inside = aoi2D.pointer != None + + color = (0, 255, 0) if inside else (0, 0, 255) + + if inside: + cv.putText(frame, name, (aoi2D.vertices[3][0], aoi2D.vertices[3][1]), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA) + + cv.line(frame, aoi2D.vertices[-1], aoi2D.vertices[0], color, 1) + for A, B in zip(aoi2D.vertices, aoi2D.vertices[1:]): + cv.line(frame, A, B, color, 1) + +class TimeStampedAOI2DScenes(DataStructures.TimeStampedBuffer): + """Define timestamped buffer to store AOI2D scenes""" + + def __setitem__(self, key, value: AOI2DScene): + """Force value to be a AOI2DScene""" + if type(value) != AOI2DScene: + raise ValueError('value must be a AOI2DScene') + + super().__setitem__(key, value) diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py new file mode 100644 index 0000000..d2369b4 --- /dev/null +++ b/src/argaze/AreaOfInterest/AOI3DScene.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python + +import math +import re + +from argaze import DataStructures +from argaze.AreaOfInterest import AOI2DScene + +import numpy +import cv2 as cv + +class AOI3D(DataStructures.DictObject): + """Define Area Of Interest 3D + ``` + { + 'vertices': array of (x, y, z) tuples + } + ``` + """ + + def __init__(self, vertices): + + super().__init__(type(self).__name__, **{'vertices': vertices}) + +class AOI3DScene(DataStructures.DictObject): + """Define AOI 3D scene as dictionnary of named AOI3Ds. + ``` + { + 'rotation': (x, y, z) tuples, + 'translation': (x, y, z) tuples, + 'AOI name 1': AOI3D, + 'AOI name 2': AOI3D, + ... + } + ``` + """ + + def __init__(self, **aois_3d): + + # append rotation and translation matrix + aois_3d['rotation'] = [0, 0, 0] + aois_3d['translation'] = [0, 0, 0] + + super().__init__(type(self).__name__, **aois_3d) + + def __del__(self): + pass + + def load(self, obj_filepath: str): + """Load AOI3D scene from .obj file.""" + + # regex rules for .obj file parsing + OBJ_RX_DICT = { + 'comment': re.compile(r'#(.*)\n'), + 'name': re.compile(r'o (\w+)(.*)\n'), + 'vertice': re.compile(r'v ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+)\n'), + 'face': re.compile(r'f (.*)\n') + } + + # regex .obj line parser + def __parse_obj_line(line): + + for key, rx in OBJ_RX_DICT.items(): + match = rx.search(line) + if match: + return key, match + + # if there are no matches + return None, None + + # start parsing + try: + + name = None + vertices = [] + faces = {} + + # open the file and read through it line by line + with open(obj_filepath, 'r') as file: + + line = file.readline() + + while line: + + # at each line check for a match with a regex + key, match = __parse_obj_line(line) + + # extract comment + if key == 'comment': + pass + + # extract aoi3D name + elif key == 'name': + + name = str(match.group(1)) + + # fill vertices array + elif key == 'vertice': + + vertices.append(tuple([float(match.group(1)), float(match.group(2)), float(match.group(3))])) + + # extract aoi3D vertice id + elif key == 'face': + + faces[name] = [int(i) for i in match.group(1).split()] + + # go to next line + line = file.readline() + + file.close() + + # retreive all aoi3D vertices + for name, face in faces.items(): + self.append(name, AOI3D(**{'vertices': [ vertices[i-1] for i in face ]})) + + except IOError: + raise IOError(f'File not found: {obj_filepath}') + + def project(self, K, D): + """Project 3D scene onto 2D scene according optical parameters. + **Returns:** AOI2DScene""" + + aoi2D_scene = {} + + for name in self.keys(): + + if name == 'rotation' or name == 'translation': + continue + + aoi3D = self[name] + + vertices_3D = numpy.array(aoi3D.vertices).astype('float32') + + vertices_2D, J = cv.projectPoints(vertices_3D, self.rotation, self.translation, K, D) + vertices_2D = vertices_2D.astype('int').reshape((len(vertices_2D), 2)).tolist() + + aoi2D = AOI2DScene.AOI2D(vertices_2D) + + aoi2D_scene[name] = aoi2D + + return AOI2DScene.AOI2DScene(**aoi2D_scene) diff --git a/src/argaze/AreaOfInterest/README.md b/src/argaze/AreaOfInterest/README.md new file mode 100644 index 0000000..1b1fb9d --- /dev/null +++ b/src/argaze/AreaOfInterest/README.md @@ -0,0 +1,4 @@ +Class interface to manage Areas of Interest (AOI). + + + diff --git a/src/argaze/AreaOfInterest/__init__.py b/src/argaze/AreaOfInterest/__init__.py new file mode 100644 index 0000000..57ce80a --- /dev/null +++ b/src/argaze/AreaOfInterest/__init__.py @@ -0,0 +1,5 @@ +""" +.. include:: README.md +""" +__docformat__ = "restructuredtext" +__all__ = ['AOI2DScene', 'AOI3DScene'] \ No newline at end of file diff --git a/src/argaze/RegionOfInterest/README.md b/src/argaze/RegionOfInterest/README.md deleted file mode 100644 index 7c22479..0000000 --- a/src/argaze/RegionOfInterest/README.md +++ /dev/null @@ -1,4 +0,0 @@ -Class interface to manage [ROIs](https://en.wikipedia.org/wiki/Region_of_interest). - - - diff --git a/src/argaze/RegionOfInterest/ROI2DScene.py b/src/argaze/RegionOfInterest/ROI2DScene.py deleted file mode 100644 index 066d8f1..0000000 --- a/src/argaze/RegionOfInterest/ROI2DScene.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python - -from argaze import DataStructures - -import cv2 as cv -import matplotlib.path as mpath - -class ROI2D(DataStructures.DictObject): - """Define Region Of Interest 2D - ``` - { - 'vertices': array of (x, y) tuples, - 'pointer': (x, y) tuple or None - } - ``` - """ - - def __init__(self, vertices, pointer = None): - - super().__init__(type(self).__name__, **{'vertices': vertices, 'pointer': pointer}) - -class ROI2DScene(DataStructures.DictObject): - """Define ROI 2D scene as dictionnary of named ROI2Ds.""" - - def __init__(self, **rois_2d): - - super().__init__(type(self).__name__, **rois_2d) - - def __del__(self): - pass - - def inside(self, pointer): - """Store pointer position if it is inside ROIs.""" - - for name in self.keys(): - - roi2D = self[name] - - if mpath.Path(roi2D.vertices).contains_points([pointer])[0]: - - roi2D.pointer = pointer - - else: - - roi2D.pointer = None - - def draw(self, frame): - """Draw ROI polygons on frame.""" - - for name in self.keys(): - - roi2D = self[name] - inside = roi2D.pointer != None - - color = (0, 255, 0) if inside else (0, 0, 255) - - if inside: - cv.putText(frame, name, (roi2D.vertices[3][0], roi2D.vertices[3][1]), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA) - - cv.line(frame, roi2D.vertices[-1], roi2D.vertices[0], color, 1) - for A, B in zip(roi2D.vertices, roi2D.vertices[1:]): - cv.line(frame, A, B, color, 1) - -class TimeStampedROI2DScenes(DataStructures.TimeStampedBuffer): - """Define timestamped buffer to store ROI2D scenes""" - - def __setitem__(self, key, value: ROI2DScene): - """Force value to be a ROI2DScene""" - if type(value) != ROI2DScene: - raise ValueError('value must be a ROI2DScene') - - super().__setitem__(key, value) diff --git a/src/argaze/RegionOfInterest/ROI3DScene.py b/src/argaze/RegionOfInterest/ROI3DScene.py deleted file mode 100644 index 9b753a3..0000000 --- a/src/argaze/RegionOfInterest/ROI3DScene.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python - -import math -import re - -from argaze import DataStructures -from argaze.RegionOfInterest import ROI2DScene - -import numpy -import cv2 as cv - -class ROI3D(DataStructures.DictObject): - """Define Region Of Interest 3D - ``` - { - 'vertices': array of (x, y, z) tuples - } - ``` - """ - - def __init__(self, vertices): - - super().__init__(type(self).__name__, **{'vertices': vertices}) - -class ROI3DScene(DataStructures.DictObject): - """Define ROI 3D scene as dictionnary of named ROI3Ds. - ``` - { - 'rotation': (x, y, z) tuples, - 'translation': (x, y, z) tuples, - 'ROI name 1': ROI3D, - 'ROI name 2': ROI3D, - ... - } - ``` - """ - - def __init__(self, **rois_3d): - - # append rotation and translation matrix - rois_3d['rotation'] = [0, 0, 0] - rois_3d['translation'] = [0, 0, 0] - - super().__init__(type(self).__name__, **rois_3d) - - def __del__(self): - pass - - def load(self, obj_filepath: str): - """Load ROI3D scene from .obj file.""" - - # regex rules for .obj file parsing - OBJ_RX_DICT = { - 'comment': re.compile(r'#(.*)\n'), - 'name': re.compile(r'o (\w+)(.*)\n'), - 'vertice': re.compile(r'v ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+) ([+-]?[0-9]*[.]?[0-9]+)\n'), - 'face': re.compile(r'f (.*)\n') - } - - # regex .obj line parser - def __parse_obj_line(line): - - for key, rx in OBJ_RX_DICT.items(): - match = rx.search(line) - if match: - return key, match - - # if there are no matches - return None, None - - # start parsing - try: - - name = None - vertices = [] - faces = {} - - # open the file and read through it line by line - with open(obj_filepath, 'r') as file: - - line = file.readline() - - while line: - - # at each line check for a match with a regex - key, match = __parse_obj_line(line) - - # extract comment - if key == 'comment': - pass - - # extract roi3D name - elif key == 'name': - - name = str(match.group(1)) - - # fill vertices array - elif key == 'vertice': - - vertices.append(tuple([float(match.group(1)), float(match.group(2)), float(match.group(3))])) - - # extract roi3D vertice id - elif key == 'face': - - faces[name] = [int(i) for i in match.group(1).split()] - - # go to next line - line = file.readline() - - file.close() - - # retreive all roi3D vertices - for name, face in faces.items(): - self.append(name, ROI3D(**{'vertices': [ vertices[i-1] for i in face ]})) - - except IOError: - raise IOError(f'File not found: {obj_filepath}') - - def project(self, K, D): - """Project 3D scene onto 2D scene according optical parameters. - **Returns:** ROI2DScene""" - - roi2D_scene = {} - - for name in self.keys(): - - if name == 'rotation' or name == 'translation': - continue - - roi3D = self[name] - - vertices_3D = numpy.array(roi3D.vertices).astype('float32') - - vertices_2D, J = cv.projectPoints(vertices_3D, self.rotation, self.translation, K, D) - vertices_2D = vertices_2D.astype('int').reshape((len(vertices_2D), 2)).tolist() - - roi2D = ROI2DScene.ROI2D(vertices_2D) - - roi2D_scene[name] = roi2D - - return ROI2DScene.ROI2DScene(**roi2D_scene) diff --git a/src/argaze/RegionOfInterest/__init__.py b/src/argaze/RegionOfInterest/__init__.py deleted file mode 100644 index 8c16ff2..0000000 --- a/src/argaze/RegionOfInterest/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -.. include:: README.md -""" -__docformat__ = "restructuredtext" -__all__ = ['ROI2DScene', 'ROI3DScene'] \ No newline at end of file diff --git a/src/argaze/__init__.py b/src/argaze/__init__.py index dfc3e99..a57d280 100644 --- a/src/argaze/__init__.py +++ b/src/argaze/__init__.py @@ -2,4 +2,4 @@ .. include:: ../../README.md """ __docformat__ = "restructuredtext" -__all__ = ['utils','GazeFeatures','RegionOfInterest','TobiiGlassesPro2','ArUcoMarkers','DataStructures'] \ No newline at end of file +__all__ = ['utils','GazeFeatures','AreaOfInterest','TobiiGlassesPro2','ArUcoMarkers','DataStructures'] \ No newline at end of file diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md index ccf5f3f..8e5341c 100644 --- a/src/argaze/utils/README.md +++ b/src/argaze/utils/README.md @@ -72,14 +72,14 @@ python ./src/argaze/utils/replay_tobii_session.py -s SEGMENT_PATH python ./src/argaze/utils/export_tobii_segment_fixations.py -s SEGMENT_PATH ``` -- Track ArUco markers into a Tobii camera video segment (replace SEGMENT_PATH). Load an roi scene (replace ROI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI. Export ROIs video and data. +- Track ArUco markers into a Tobii camera video segment (replace SEGMENT_PATH). Load an roi scene (replace AOI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any AOI. Export AOIs video and data. ``` -python ./src/argaze/utils/export_tobii_segment_aruco_rois.py -s SEGMENT_PATH -c export/tobii_camera.json -m 7.5 -a ROI_SCENE +python ./src/argaze/utils/export_tobii_segment_aruco_aois.py -s SEGMENT_PATH -c export/tobii_camera.json -m 7.5 -a AOI_SCENE ``` -- Track ArUco markers into Tobii camera video stream (replace IP_ADDRESS). Load an roi scene (replace ROI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any ROI. +- Track ArUco markers into Tobii camera video stream (replace IP_ADDRESS). Load an roi scene (replace AOI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any AOI. ``` -python ./src/argaze/utils/live_tobii_aruco_rois.py -t IP_ADDRESS -c export/tobii_camera.json -m 7.5 -a ROI_SCENE +python ./src/argaze/utils/live_tobii_aruco_aois.py -t IP_ADDRESS -c export/tobii_camera.json -m 7.5 -a AOI_SCENE ``` diff --git a/src/argaze/utils/export_tobii_segment_aruco_aois.py b/src/argaze/utils/export_tobii_segment_aruco_aois.py new file mode 100644 index 0000000..f833111 --- /dev/null +++ b/src/argaze/utils/export_tobii_segment_aruco_aois.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python + +import argparse +import bisect +import os + +from argaze import DataStructures +from argaze import GazeFeatures +from argaze.TobiiGlassesPro2 import TobiiEntities, TobiiVideo +from argaze.ArUcoMarkers import * +from argaze.AreaOfInterest import * +from argaze.utils import MiscFeatures + +import numpy + +import cv2 as cv + +def main(): + """ + Track any ArUco marker into Tobii Glasses Pro 2 segment video file. + From a loaded AOI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame. + Then, detect if Tobii gaze point is inside any AOI. + Export AOIs video and data. + """ + + # Manage arguments + parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) + parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path') + parser.add_argument('-r', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)') + parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') + parser.add_argument('-a', '--roi_scene', metavar='AOI_SCENE', type=str, default='aoi3D_scene.obj', help='obj roi scene filepath') + parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') + parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)') + parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)') + args = parser.parse_args() + + if args.segment_path != None: + + # Manage destination path + if args.output != None: + + if not os.path.exists(os.path.dirname(args.output)): + + os.makedirs(os.path.dirname(args.output)) + print(f'{os.path.dirname(args.output)} folder created') + + aois_filepath = f'{args.output}/aois.json' + video_filepath = f'{args.output}/fullstream+visu.mp4' + + else: + + aois_filepath = f'{args.segment_path}/aois.json' + video_filepath = f'{args.segment_path}/fullstream+visu.mp4' + + # Load a tobii segment + tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1000000), int(args.time_range[1] * 1000000) if args.time_range[1] != None else None) + + # Load a tobii segment video + tobii_segment_video = tobii_segment.load_video() + print(f'Video duration: {tobii_segment_video.get_duration()/1000000}, width: {tobii_segment_video.get_width()}, height: {tobii_segment_video.get_height()}') + + # Load a tobii segment data + tobii_segment_data = tobii_segment.load_data() + print(f'Data keys: {tobii_segment_data.keys()}') + + # Access to timestamped gaze position data buffer + tobii_ts_gaze_positions = tobii_segment_data.gidx_l_gp + print(f'{len(tobii_ts_gaze_positions)} gaze positions loaded') + + # Prepare video exportation at the same format than segment video + output_video = TobiiVideo.TobiiVideoOutput(video_filepath, tobii_segment_video.get_stream()) + + # Create aruco camera + aruco_camera = ArUcoCamera.ArUcoCamera() + aruco_camera.load_calibration_file(args.camera_calibration) + + # Create aruco tracker + aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera) + + # Create AOIs 3D scene + aoi3D_scene = AOI3DScene.AOI3DScene() + aoi3D_scene.load(args.roi_scene) + print(f'AOIs names: {aoi3D_scene.keys()[2::]}') + + # Create Timestamped buffer to store 2D AOIs + aoi2D_timestamped_buffer = AOI2DScene.TimeStampedAOI2DScenes() + + # Video and data replay loop + try: + + # Count frame to display a progress bar + MiscFeatures.printProgressBar(0, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100) + + # Iterate on video frames activating video / data synchronisation through vts data buffer + for video_ts, video_frame in tobii_segment_video.frames(tobii_segment_data.vts): + + try: + + # Get closest gaze position before video timestamp and remove all gaze positions before + closest_gaze_ts, closest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts) + + # Draw video synchronized gaze pointer + pointer = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height)) + cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1) + + # When expected values can't be found + except (KeyError, AttributeError, ValueError): + + pass # keep last pointer position + + # Track markers with pose estimation and draw them + aruco_tracker.track(video_frame.matrix) + aruco_tracker.draw(video_frame.matrix) + + # Project 3D scenes related to each aruco markers + if aruco_tracker.get_markers_number(): + + for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()): + + # TODO : Select different 3D scenes depending on aruco id + + marker_rotation = aruco_tracker.get_marker_rotation(i) + marker_translation = aruco_tracker.get_marker_translation(i) + + aoi3D_scene.rotation = marker_rotation + aoi3D_scene.translation = marker_translation + + # Edit Zero distorsion matrix + D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0]) + + # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it + # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. + aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0) + + # Check if gaze is inside 2D aois + if pointer != None: + aoi2D_scene.inside(pointer) + + # Draw 2D aois + aoi2D_scene.draw(video_frame.matrix) + + # Store 2D aois + aoi2D_timestamped_buffer[video_ts] = aoi2D_scene + + # Close window using 'Esc' key + if cv.waitKey(1) == 27: + break + + # Display video + cv.imshow(f'Segment {tobii_segment.get_id()} video', video_frame.matrix) + + # Write video + output_video.write(video_frame.matrix) + + # Update Progress Bar + progress = video_ts - tobii_segment_video.get_vts_offset() - int(args.time_range[0] * 1000000) + MiscFeatures.printProgressBar(progress, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100) + + # Exit on 'ctrl+C' interruption + except KeyboardInterrupt: + pass + + # Stop frame display + cv.destroyAllWindows() + + # End output video file + output_video.close() + + print(f'\nAOIs video saved into {video_filepath}') + + # Export 2D aois + aoi2D_timestamped_buffer.export_as_json(aois_filepath) + + print(f'Timestamped AOIs positions saved into {aois_filepath}') + +if __name__ == '__main__': + + main() \ No newline at end of file diff --git a/src/argaze/utils/export_tobii_segment_aruco_rois.py b/src/argaze/utils/export_tobii_segment_aruco_rois.py deleted file mode 100644 index d31ebb3..0000000 --- a/src/argaze/utils/export_tobii_segment_aruco_rois.py +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env python - -import argparse -import bisect -import os - -from argaze import DataStructures -from argaze import GazeFeatures -from argaze.TobiiGlassesPro2 import TobiiEntities, TobiiVideo -from argaze.ArUcoMarkers import * -from argaze.RegionOfInterest import * -from argaze.utils import MiscFeatures - -import numpy - -import cv2 as cv - -def main(): - """ - Track any ArUco marker into Tobii Glasses Pro 2 segment video file. - From a loaded ROI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame. - Then, detect if Tobii gaze point is inside any ROI. - Export ROIs video and data. - """ - - # Manage arguments - parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) - parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path') - parser.add_argument('-r', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)') - parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') - parser.add_argument('-a', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath') - parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') - parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)') - parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)') - args = parser.parse_args() - - if args.segment_path != None: - - # Manage destination path - if args.output != None: - - if not os.path.exists(os.path.dirname(args.output)): - - os.makedirs(os.path.dirname(args.output)) - print(f'{os.path.dirname(args.output)} folder created') - - rois_filepath = f'{args.output}/rois.json' - video_filepath = f'{args.output}/fullstream+visu.mp4' - - else: - - rois_filepath = f'{args.segment_path}/rois.json' - video_filepath = f'{args.segment_path}/fullstream+visu.mp4' - - # Load a tobii segment - tobii_segment = TobiiEntities.TobiiSegment(args.segment_path, int(args.time_range[0] * 1000000), int(args.time_range[1] * 1000000) if args.time_range[1] != None else None) - - # Load a tobii segment video - tobii_segment_video = tobii_segment.load_video() - print(f'Video duration: {tobii_segment_video.get_duration()/1000000}, width: {tobii_segment_video.get_width()}, height: {tobii_segment_video.get_height()}') - - # Load a tobii segment data - tobii_segment_data = tobii_segment.load_data() - print(f'Data keys: {tobii_segment_data.keys()}') - - # Access to timestamped gaze position data buffer - tobii_ts_gaze_positions = tobii_segment_data.gidx_l_gp - print(f'{len(tobii_ts_gaze_positions)} gaze positions loaded') - - # Prepare video exportation at the same format than segment video - output_video = TobiiVideo.TobiiVideoOutput(video_filepath, tobii_segment_video.get_stream()) - - # Create aruco camera - aruco_camera = ArUcoCamera.ArUcoCamera() - aruco_camera.load_calibration_file(args.camera_calibration) - - # Create aruco tracker - aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera) - - # Create ROIs 3D scene - roi3D_scene = ROI3DScene.ROI3DScene() - roi3D_scene.load(args.roi_scene) - print(f'ROIs names: {roi3D_scene.keys()[2::]}') - - # Create Timestamped buffer to store 2D ROIs - roi2D_timestamped_buffer = ROI2DScene.TimeStampedROI2DScenes() - - # Video and data replay loop - try: - - # Count frame to display a progress bar - MiscFeatures.printProgressBar(0, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100) - - # Iterate on video frames activating video / data synchronisation through vts data buffer - for video_ts, video_frame in tobii_segment_video.frames(tobii_segment_data.vts): - - try: - - # Get closest gaze position before video timestamp and remove all gaze positions before - closest_gaze_ts, closest_gaze_position = tobii_ts_gaze_positions.pop_first_until(video_ts) - - # Draw video synchronized gaze pointer - pointer = (int(closest_gaze_position.gp[0] * video_frame.width), int(closest_gaze_position.gp[1] * video_frame.height)) - cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1) - - # When expected values can't be found - except (KeyError, AttributeError, ValueError): - - pass # keep last pointer position - - # Track markers with pose estimation and draw them - aruco_tracker.track(video_frame.matrix) - aruco_tracker.draw(video_frame.matrix) - - # Project 3D scenes related to each aruco markers - if aruco_tracker.get_markers_number(): - - for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()): - - # TODO : Select different 3D scenes depending on aruco id - - marker_rotation = aruco_tracker.get_marker_rotation(i) - marker_translation = aruco_tracker.get_marker_translation(i) - - roi3D_scene.rotation = marker_rotation - roi3D_scene.translation = marker_translation - - # Edit Zero distorsion matrix - D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0]) - - # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it - # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable. - roi2D_scene = roi3D_scene.project(aruco_camera.get_K(), D0) - - # Check if gaze is inside 2D rois - if pointer != None: - roi2D_scene.inside(pointer) - - # Draw 2D rois - roi2D_scene.draw(video_frame.matrix) - - # Store 2D rois - roi2D_timestamped_buffer[video_ts] = roi2D_scene - - # Close window using 'Esc' key - if cv.waitKey(1) == 27: - break - - # Display video - cv.imshow(f'Segment {tobii_segment.get_id()} video', video_frame.matrix) - - # Write video - output_video.write(video_frame.matrix) - - # Update Progress Bar - progress = video_ts - tobii_segment_video.get_vts_offset() - int(args.time_range[0] * 1000000) - MiscFeatures.printProgressBar(progress, tobii_segment_video.get_duration(), prefix = 'Progress:', suffix = 'Complete', length = 100) - - # Exit on 'ctrl+C' interruption - except KeyboardInterrupt: - pass - - # Stop frame display - cv.destroyAllWindows() - - # End output video file - output_video.close() - - print(f'\nROIs video saved into {video_filepath}') - - # Export 2D rois - roi2D_timestamped_buffer.export_as_json(rois_filepath) - - print(f'Timestamped ROIs positions saved into {rois_filepath}') - -if __name__ == '__main__': - - main() \ No newline at end of file diff --git a/src/argaze/utils/live_tobii_aruco_aois.py b/src/argaze/utils/live_tobii_aruco_aois.py new file mode 100644 index 0000000..51c244c --- /dev/null +++ b/src/argaze/utils/live_tobii_aruco_aois.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python + +import argparse +import os + +from argaze import DataStructures +from argaze.TobiiGlassesPro2 import * +from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera +from argaze.AreaOfInterest import * +from argaze.TobiiGlassesPro2 import * + +import cv2 as cv +import numpy + +def main(): + """ + Track any ArUco marker into Tobii Glasses Pro 2 camera video stream. + From a loaded AOI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame. + Then, detect if Tobii gaze point is inside any AOI. + """ + + # Manage arguments + parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) + parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip') + parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') + parser.add_argument('-r', '--roi_scene', metavar='AOI_SCENE', type=str, default='aoi3D_scene.obj', help='obj roi scene filepath') + parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') + parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)') + args = parser.parse_args() + + # Create tobii controller + tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf') + + # Calibrate tobii glasses + tobii_controller.calibrate() + + # Enable tobii data stream + tobii_data_stream = tobii_controller.enable_data_stream() + + # Enable tobii video stream + tobii_video_stream = tobii_controller.enable_video_stream() + + # create aruco camera + aruco_camera = ArUcoCamera.ArUcoCamera() + aruco_camera.load_calibration_file(args.camera_calibration) + + # Create aruco tracker + aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera) + + # Create AOIs 3D scene + aoi3D_scene = AOI3DScene.AOI3DScene() + aoi3D_scene.load(args.roi_scene) + + # Start streaming + tobii_controller.start_streaming() + + # Live video stream capture loop + try: + + past_gaze_positions = DataStructures.TimeStampedBuffer() + + while tobii_video_stream.is_alive(): + + video_ts, video_frame = tobii_video_stream.read() + + try: + + # Read data stream + data_stream = tobii_data_stream.read() + + # Store received gaze positions + past_gaze_positions.append(data_stream.gidx_l_gp) + + # Get last gaze position before video timestamp and remove all former gaze positions + earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts) + + # Draw video synchronized gaze pointer + pointer = (int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height)) + cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1) + + # When expected values aren't in data stream + except (KeyError, AttributeError, ValueError): + + pass # keep last pointer position + + # Track markers with pose estimation and draw them + aruco_tracker.track(video_frame.matrix) + aruco_tracker.draw(video_frame.matrix) + + # Project 3D scenes related to each aruco markers + if aruco_tracker.get_markers_number(): + + for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()): + + # TODO : Select different 3D scenes depending on aruco id + + marker_rotation = aruco_tracker.get_marker_rotation(i) + marker_translation = aruco_tracker.get_marker_translation(i) + + aoi3D_scene.rotation = marker_rotation + aoi3D_scene.translation = marker_translation + + # Edit Zero distorsion matrix + D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0]) + + # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it + # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. + aoi2D_scene = aoi3D_scene.project(aruco_camera.get_K(), D0) + + # Check if gaze is inside 2D aois + if pointer != None: + aoi2D_scene.inside(pointer) + + # Draw 2D aois + aoi2D_scene.draw(video_frame.matrix) + + # Close window using 'Esc' key + if cv.waitKey(1) == 27: + break + + cv.imshow('Live Scene', video_frame.matrix) + + # Exit on 'ctrl+C' interruption + except KeyboardInterrupt: + pass + + # Stop frame display + cv.destroyAllWindows() + + # Stop streaming + tobii_controller.stop_streaming() + +if __name__ == '__main__': + + main() \ No newline at end of file diff --git a/src/argaze/utils/live_tobii_aruco_rois.py b/src/argaze/utils/live_tobii_aruco_rois.py deleted file mode 100644 index b32690e..0000000 --- a/src/argaze/utils/live_tobii_aruco_rois.py +++ /dev/null @@ -1,135 +0,0 @@ -#!/usr/bin/env python - -import argparse -import os - -from argaze import DataStructures -from argaze.TobiiGlassesPro2 import * -from argaze.ArUcoMarkers import ArUcoTracker, ArUcoCamera -from argaze.RegionOfInterest import * -from argaze.TobiiGlassesPro2 import * - -import cv2 as cv -import numpy - -def main(): - """ - Track any ArUco marker into Tobii Glasses Pro 2 camera video stream. - From a loaded ROI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame. - Then, detect if Tobii gaze point is inside any ROI. - """ - - # Manage arguments - parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0]) - parser.add_argument('-t', '--tobii_ip', metavar='TOBII_IP', type=str, default='192.168.1.10', help='tobii glasses ip') - parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath') - parser.add_argument('-r', '--roi_scene', metavar='ROI_SCENE', type=str, default='roi3D_scene.obj', help='obj roi scene filepath') - parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary') - parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)') - args = parser.parse_args() - - # Create tobii controller - tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf') - - # Calibrate tobii glasses - tobii_controller.calibrate() - - # Enable tobii data stream - tobii_data_stream = tobii_controller.enable_data_stream() - - # Enable tobii video stream - tobii_video_stream = tobii_controller.enable_video_stream() - - # create aruco camera - aruco_camera = ArUcoCamera.ArUcoCamera() - aruco_camera.load_calibration_file(args.camera_calibration) - - # Create aruco tracker - aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera) - - # Create ROIs 3D scene - roi3D_scene = ROI3DScene.ROI3DScene() - roi3D_scene.load(args.roi_scene) - - # Start streaming - tobii_controller.start_streaming() - - # Live video stream capture loop - try: - - past_gaze_positions = DataStructures.TimeStampedBuffer() - - while tobii_video_stream.is_alive(): - - video_ts, video_frame = tobii_video_stream.read() - - try: - - # Read data stream - data_stream = tobii_data_stream.read() - - # Store received gaze positions - past_gaze_positions.append(data_stream.gidx_l_gp) - - # Get last gaze position before video timestamp and remove all former gaze positions - earliest_ts, earliest_gaze_position = past_gaze_positions.pop_first_until(video_ts) - - # Draw video synchronized gaze pointer - pointer = (int(earliest_gaze_position.gp[0] * video_frame.width), int(earliest_gaze_position.gp[1] * video_frame.height)) - cv.circle(video_frame.matrix, pointer, 4, (0, 255, 255), -1) - - # When expected values aren't in data stream - except (KeyError, AttributeError, ValueError): - - pass # keep last pointer position - - # Track markers with pose estimation and draw them - aruco_tracker.track(video_frame.matrix) - aruco_tracker.draw(video_frame.matrix) - - # Project 3D scenes related to each aruco markers - if aruco_tracker.get_markers_number(): - - for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()): - - # TODO : Select different 3D scenes depending on aruco id - - marker_rotation = aruco_tracker.get_marker_rotation(i) - marker_translation = aruco_tracker.get_marker_translation(i) - - roi3D_scene.rotation = marker_rotation - roi3D_scene.translation = marker_translation - - # Edit Zero distorsion matrix - D0 = numpy.asarray([0.0, 0.0, 0.0, 0.0, 0.0]) - - # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it - # This hack isn't realistic but as the gaze will mainly focus on centered ROI, where the distorsion is low, it is acceptable. - roi2D_scene = roi3D_scene.project(aruco_camera.get_K(), D0) - - # Check if gaze is inside 2D rois - if pointer != None: - roi2D_scene.inside(pointer) - - # Draw 2D rois - roi2D_scene.draw(video_frame.matrix) - - # Close window using 'Esc' key - if cv.waitKey(1) == 27: - break - - cv.imshow('Live Scene', video_frame.matrix) - - # Exit on 'ctrl+C' interruption - except KeyboardInterrupt: - pass - - # Stop frame display - cv.destroyAllWindows() - - # Stop streaming - tobii_controller.stop_streaming() - -if __name__ == '__main__': - - main() \ No newline at end of file -- cgit v1.1