aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py6
-rw-r--r--src/argaze/GazeFeatures.py5
-rw-r--r--src/argaze/utils/README.md8
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py75
-rw-r--r--src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py51
5 files changed, 87 insertions, 58 deletions
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index a120a12..6106005 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -48,8 +48,10 @@ class AOI2DScene(AOIFeatures.AOIScene):
top_left_corner_pixel = numpy.rint(aoi2D.clockwise()[0]).astype(int)
cv.putText(frame, name, top_left_corner_pixel, cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv.LINE_AA)
- looked_pixel = aoi2D.looked_pixel(aoi2D.look_at(gaze_position))
- cv.circle(frame, looked_pixel, 8, color, 2)
+ # 4 corners aoi only
+ if len(aoi2D) == 4:
+ looked_pixel = aoi2D.looked_pixel(aoi2D.look_at(gaze_position))
+ cv.circle(frame, looked_pixel, 8, color, 2)
# Draw form
aoi2D.draw(frame, color)
diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py
index d7466df..8d3d93f 100644
--- a/src/argaze/GazeFeatures.py
+++ b/src/argaze/GazeFeatures.py
@@ -364,8 +364,9 @@ class PointerBasedVisualScan(VisualScanGenerator):
'look_at': DataStructures.TimeStampedBuffer()
}
- # store where the aoi is looked
- self.__step_dict[name]['look_at'][round(ts_current)] = aoi.look_at(gaze_position)
+ # store where the aoi is looked for 4 corners aoi
+ if len(aoi) == 4:
+ self.__step_dict[name]['look_at'][round(ts_current)] = aoi.look_at(gaze_position)
elif name in self.__step_dict.keys():
diff --git a/src/argaze/utils/README.md b/src/argaze/utils/README.md
index 2c4f5e7..abc44dc 100644
--- a/src/argaze/utils/README.md
+++ b/src/argaze/utils/README.md
@@ -72,16 +72,16 @@ python ./src/argaze/utils/replay_tobii_session.py -s SEGMENT_PATH -r IN OUT
python ./src/argaze/utils/export_tobii_segment_movements.py -s SEGMENT_PATH -r IN OUT
```
-- Track ArUco markers (-i MARKERS_ID) into a Tobii camera video segment (-s SEGMENT_PATH) into a time range selection (-r IN OUT). Load an aoi scene (-a AOI_SCENE) .obj file, position it virtually relatively to the detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is focusing onto AOIs to build the segment visual scan and export it as a visual_scan.csv, visual_scan.jpg, visual_scan.mp4 files:
+- Track ArUco markers into a Tobii camera video segment (-s SEGMENT_PATH) into a time range selection (-r IN OUT). Load aoi scene .obj file related to each marker (-mi MARKER_ID_SCENE), position it virtually relatively to the detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is focusing onto AOIs to build the segment visual scan and export it as a visual_scan.csv, visual_scan.jpg, visual_scan.mp4 files:
```
-python ./src/argaze/utils/export_tobii_segment_aruco_visual_scan.py -s SEGMENT_PATH -c export/tobii_camera.json -m 7.5 -a AOI_SCENE -r IN OUT -i MARKERS_ID
+python ./src/argaze/utils/export_tobii_segment_aruco_visual_scan.py -s SEGMENT_PATH -c export/tobii_camera.json -ms 5 -mi MARKER_ID_SCENE -r IN OUT
```
-- Track ArUco marker (-i MARKER_ID) into Tobii camera video stream (-t IP_ADDRESS). Load an aoi scene (-a AOI_SCENE) .obj file, position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any AOI and send the look at pointer over Ivy default bus:
+- Track ArUco markers into Tobii camera video stream (-t IP_ADDRESS). Load aoi scene .obj file related to each marker (-mi MARKER_ID_SCENE), position it virtually relatively to any detected ArUco markers and project the scene into camera frame. Then, detect if Tobii gaze point is inside any AOI and send the look at pointer over Ivy default bus:
```
-python ./src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py -t IP_ADDRESS -c export/tobii_camera.json -m 6 -a AOI_SCENE -i MARKER_ID
+python ./src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py -t IP_ADDRESS -c export/tobii_camera.json -ms 5 -mi MARKER_ID_SCENE
```
- Define AOI scene from a ArUco marker (-a AOI_SCENE) and bind to Ivy default bus to receive live look at pointer data.:
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index e830ed6..c4865f9 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -2,6 +2,7 @@
import argparse
import os
+import json
from argaze import DataStructures
from argaze import GazeFeatures
@@ -15,8 +16,8 @@ import cv2 as cv
def main():
"""
- Track any ArUco marker into Tobii Glasses Pro 2 segment video file.
- From a loaded AOI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame.
+ Track ArUco markers into Tobii Glasses Pro 2 segment video file.
+ For each loaded AOI scene .obj file, position the scene virtually relatively to each detected ArUco markers and project the scene into camera frame.
Then, detect if Tobii gaze point is inside any AOI.
Export AOIs video and data.
"""
@@ -26,11 +27,10 @@ def main():
parser.add_argument('-s', '--segment_path', metavar='SEGMENT_PATH', type=str, default=None, help='segment path')
parser.add_argument('-r', '--time_range', metavar=('START_TIME', 'END_TIME'), nargs=2, type=float, default=(0., None), help='start and end time (in second)')
parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default=None, help='json camera calibration filepath')
- parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
- parser.add_argument('-a', '--aoi_scene', metavar='AOI_SCENE', type=str, default=None, help='obj aoi scene filepath')
- parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
- parser.add_argument('-m', '--marker_size', metavar='MARKER_SIZE', type=float, default=6, help='aruco marker size (cm)')
- parser.add_argument('-i', '--markers_id', metavar='MARKERS_ID', nargs='*', type=int, default=[], help='markers id to track')
+ parser.add_argument('-p', '--aruco_tracker_configuration', metavar='TRACK_CONFIG', type=str, default=None, help='json aruco tracker configuration filepath')
+ parser.add_argument('-md', '--marker_dictionary', metavar='MARKER_DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
+ parser.add_argument('-ms', '--marker_size', metavar='MARKER_SIZE', type=float, default=6, help='aruco marker size (cm)')
+ parser.add_argument('-mi', '--marker_id_scene', metavar='MARKER_ID_SCENE', type=json.loads, help='{"marker": "aoi scene filepath"} dictionary')
parser.add_argument('-o', '--output', metavar='OUT', type=str, default=None, help='destination folder path (segment folder by default)')
parser.add_argument('-w', '--window', metavar='DISPLAY', type=bool, default=True, help='enable window display', action=argparse.BooleanOptionalAction)
args = parser.parse_args()
@@ -38,11 +38,10 @@ def main():
if args.segment_path != None:
# Manage markers id to track
- empty_marker_set = len(args.markers_id) == 0
- if empty_marker_set:
- print(f'Track any Aruco markers from the {args.dictionary} dictionary')
+ if args.marker_id_scene != None:
+ print(f'Track any Aruco markers from the {args.marker_dictionary} dictionary')
else:
- print(f'Track Aruco markers {args.markers_id} from the {args.dictionary} dictionary')
+ print(f'Track Aruco markers {args.marker_id_scene.keys()} from the {args.marker_dictionary} dictionary')
# Manage destination path
destination_path = '.'
@@ -100,7 +99,7 @@ def main():
raise ValueError('.json camera calibration filepath required. Use -c option.')
# Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)
+ aruco_tracker = ArUcoTracker.ArUcoTracker(args.marker_dictionary, args.marker_size, aruco_camera)
# Load specific configuration file
if args.aruco_tracker_configuration != None:
@@ -110,28 +109,29 @@ def main():
print(f'ArUcoTracker configuration for {aruco_tracker.get_markers_dictionay().get_markers_format()} markers detection:')
aruco_tracker.print_configuration()
- # Create AOIs 3D scene
- aoi3D_scene = AOI3DScene.AOI3DScene()
+ # Load AOI 3D scene for each marker
+ aoi3D_scenes = {}
- # Load AOI 3D scene file
- if args.aoi_scene != None:
+ for marker_id, aoi_scene_filepath in args.marker_id_scene.items():
- aoi3D_scene.load(args.aoi_scene)
+ marker_id = int(marker_id)
+
+ aoi3D_scenes[marker_id] = AOI3DScene.AOI3DScene()
+ aoi3D_scenes[marker_id].load(aoi_scene_filepath)
- print(f'AOI names:')
- for name in aoi3D_scene.keys():
- print(f'\t{name}')
+ print(f'AOI in {os.path.basename(aoi_scene_filepath)} scene related to marker #{marker_id}')
+ for aoi in aoi3D_scenes[marker_id].keys():
+ print(f'\t{aoi}')
- else:
-
- raise ValueError('.json AOI scene filepath required. Use -a option.')
+ def aoi3D_scene_selector(marker_id):
+ return aoi3D_scenes.get(marker_id, None)
# Create timestamped buffer to store AOIs scene in time
ts_aois_scenes = AOIFeatures.TimeStampedAOIScenes()
# Create timestamped buffer to store gaze positions in time
ts_gaze_positions = GazeFeatures.TimeStampedGazePositions()
-
+ '''
# Create a visual scan visualisation frame
visu_width = 1920
visu_height = 1080
@@ -155,7 +155,7 @@ def main():
for name, aoi in aoi2D_visu_scene.items():
aoi.draw(visu_frame, (0, 0, 0))
-
+ '''
# Video and data replay loop
try:
@@ -195,11 +195,10 @@ def main():
for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
- # TODO : Select different 3D scene depending on aruco id
+ # Select 3D scene related to detected marker
+ aoi3D_scene = aoi3D_scene_selector(marker_id)
- in_marker_set = marker_id in list(args.markers_id)
-
- if not empty_marker_set and not in_marker_set:
+ if aoi3D_scene == None:
continue
aoi3D_scene.rotation = aruco_tracker.get_marker_rotation(i)
@@ -214,13 +213,15 @@ def main():
# Store 2D scene at this time in millisecond
ts_aois_scenes[round(video_ts_ms)] = aoi2D_video_scene
+ '''
+ # Draw gaze path for 4 corners aoi
+ if len(aoi) == 4:
- # Draw gaze path
- look_at = aoi2D_video_scene[ref_aoi].look_at(video_gaze_pixel)
-
- visu_gaze_pixel = aoi2D_visu_scene[ref_aoi].looked_pixel(look_at)
- cv.circle(visu_frame, visu_gaze_pixel, 4, (0, 0, 255), -1)
+ look_at = aoi2D_video_scene[ref_aoi].look_at(video_gaze_pixel)
+ visu_gaze_pixel = aoi2D_visu_scene[ref_aoi].looked_pixel(look_at)
+ cv.circle(visu_frame, visu_gaze_pixel, 4, (0, 0, 255), -1)
+ '''
if args.window:
# Close window using 'Esc' key
@@ -231,7 +232,7 @@ def main():
cv.imshow(f'Segment {tobii_segment.get_id()} ArUco AOI', video_frame.matrix)
# Display visual scan frame
- cv.imshow(f'Segment {tobii_segment.get_id()} visual scan', visu_frame)
+ #cv.imshow(f'Segment {tobii_segment.get_id()} visual scan', visu_frame)
# Write video
output_video.write(video_frame.matrix)
@@ -266,8 +267,8 @@ def main():
print(f'Visual scan data saved into {vs_data_filepath}')
# Export visual scan image
- cv.imwrite(vs_visu_filepath, visu_frame)
- print(f'Visual scan image saved into {vs_visu_filepath}')
+ #cv.imwrite(vs_visu_filepath, visu_frame)
+ #print(f'Visual scan image saved into {vs_visu_filepath}')
# Notify where the visual scan video has been exported
print(f'Visual scan video saved into {vs_video_filepath}')
diff --git a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
index 13efe2d..253ae2d 100644
--- a/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
+++ b/src/argaze/utils/live_tobii_aruco_aoi_ivy_controller.py
@@ -2,6 +2,7 @@
import argparse
import os
+import json
from argaze import DataStructures, GazeFeatures
from argaze.TobiiGlassesPro2 import *
@@ -17,7 +18,7 @@ from ivy.std_api import *
def main():
"""
Track any ArUco marker into Tobii Glasses Pro 2 camera video stream.
- From a loaded AOI scene .obj file, position the scene virtually relatively to any detected ArUco markers and project the scene into camera frame.
+ For each loaded AOI scene .obj file, position the scene virtually relatively to each detected ArUco markers and project the scene into camera frame.
Then, detect if Tobii gaze point is inside any AOI and send the look at pointer over Ivy default bus.
"""
@@ -27,12 +28,16 @@ def main():
parser.add_argument('-c', '--camera_calibration', metavar='CAM_CALIB', type=str, default='tobii_camera.json', help='json camera calibration filepath')
parser.add_argument('-y', '--ivy_bus', metavar='IVY_BUS', type=str, default='0.0.0.0:2010', help='Ivy bus ip and port')
parser.add_argument('-a', '--aoi_scene', metavar='AOI_SCENE', type=str, default='aoi3D_scene.obj', help='obj aoi scene filepath')
- parser.add_argument('-d', '--dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
- parser.add_argument('-m', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)')
- parser.add_argument('-i', '--marker_id', metavar='MARKER_ID', type=int, default=0, help='marker id to track')
+ parser.add_argument('-md', '--marker_dictionary', metavar='DICT', type=str, default='DICT_ARUCO_ORIGINAL', help='aruco marker dictionnary (DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL,DICT_APRILTAG_16h5, DICT_APRILTAG_25h9, DICT_APRILTAG_36h10, DICT_APRILTAG_36h11)')
+ parser.add_argument('-ms', '--marker_size', metavar='MKR', type=float, default=6, help='aruco marker size (cm)')
+ parser.add_argument('-mi', '--marker_id_scene', metavar='MARKER_ID_SCENE', type=json.loads, help='{"marker": "aoi scene filepath"} dictionary')
args = parser.parse_args()
- print(f'Track Aruco markers {args.marker_id} from the {args.dictionary} dictionary')
+ # Manage markers id to track
+ if args.marker_id_scene != None:
+ print(f'Track any Aruco markers from the {args.marker_dictionary} dictionary')
+ else:
+ print(f'Track Aruco markers {args.marker_id_scene.keys()} from the {args.marker_dictionary} dictionary')
# Enable Ivy bus
IvyInit(os.path.basename(__file__))
@@ -42,7 +47,7 @@ def main():
tobii_controller = TobiiController.TobiiController(args.tobii_ip, 'myProject', 'mySelf')
# Calibrate tobii glasses
- tobii_controller.calibrate()
+ #tobii_controller.calibrate()
# Enable tobii data stream
tobii_data_stream = tobii_controller.enable_data_stream()
@@ -55,12 +60,24 @@ def main():
aruco_camera.load_calibration_file(args.camera_calibration)
# Create aruco tracker
- aruco_tracker = ArUcoTracker.ArUcoTracker(args.dictionary, args.marker_size, aruco_camera)
+ aruco_tracker = ArUcoTracker.ArUcoTracker(args.marker_dictionary, args.marker_size, aruco_camera)
- # Create AOIs 3D scene
- aoi3D_scene = AOI3DScene.AOI3DScene()
- aoi3D_scene.load(args.aoi_scene)
- print(f'AOIs names: {aoi3D_scene.keys()}')
+ # Load AOI 3D scene for each marker
+ aoi3D_scenes = {}
+
+ for marker_id, aoi_scene_filepath in args.marker_id_scene.items():
+
+ marker_id = int(marker_id)
+
+ aoi3D_scenes[marker_id] = AOI3DScene.AOI3DScene()
+ aoi3D_scenes[marker_id].load(aoi_scene_filepath)
+
+ print(f'AOI in {os.path.basename(aoi_scene_filepath)} scene related to marker #{marker_id}')
+ for aoi in aoi3D_scenes[marker_id].keys():
+ print(f'\t{aoi}')
+
+ def aoi3D_scene_selector(marker_id):
+ return aoi3D_scenes.get(marker_id, None)
# Start streaming
tobii_controller.start_streaming()
@@ -102,7 +119,10 @@ def main():
for (i, marker_id) in enumerate(aruco_tracker.get_markers_ids()):
- if marker_id != args.marker_id:
+ # Select 3D scene related to detected marker
+ aoi3D_scene = aoi3D_scene_selector(marker_id)
+
+ if aoi3D_scene == None:
continue
aoi3D_scene.rotation = aruco_tracker.get_marker_rotation(i)
@@ -121,7 +141,12 @@ def main():
# Send look at aoi pointer
for name, aoi in aoi2D_scene.items():
if aoi.looked(video_gaze_pixel):
- IvySendMsg(f'looking {name} at {aoi.look_at(video_gaze_pixel)}')
+
+ # 4 corners aoi
+ if len(aoi) == 4:
+ IvySendMsg(f'looking {name} at {aoi.look_at(video_gaze_pixel)}')
+ else:
+ IvySendMsg(f'looking {name}')
# Close window using 'Esc' key
if cv.waitKey(1) == 27: