aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2022-08-31 22:43:30 +0200
committerThéo de la Hogue2022-08-31 22:43:30 +0200
commitaa02c1d073b5ae6d727ca7d475579a14f3178852 (patch)
tree4c6020e9ee28deb33d0890854b79e0514844f1cc
parent65c04d8c0d918a1a517e8a31e7216fff25c21527 (diff)
downloadargaze-aa02c1d073b5ae6d727ca7d475579a14f3178852.zip
argaze-aa02c1d073b5ae6d727ca7d475579a14f3178852.tar.gz
argaze-aa02c1d073b5ae6d727ca7d475579a14f3178852.tar.bz2
argaze-aa02c1d073b5ae6d727ca7d475579a14f3178852.tar.xz
Working on rotation validation. It is still not available as the problem needs to pass extra data to do such validation.
-rw-r--r--src/argaze/ArUcoMarkers/ArUcoTracker.py108
-rw-r--r--src/argaze/utils/export_tobii_segment_aruco_visual_scan.py7
2 files changed, 104 insertions, 11 deletions
diff --git a/src/argaze/ArUcoMarkers/ArUcoTracker.py b/src/argaze/ArUcoMarkers/ArUcoTracker.py
index 0f03363..a9cb4d5 100644
--- a/src/argaze/ArUcoMarkers/ArUcoTracker.py
+++ b/src/argaze/ArUcoMarkers/ArUcoTracker.py
@@ -69,6 +69,13 @@ class ArUcoTracker():
self.__tvecs = []
self.__points = []
+ # define rejected markers data
+ self.__rejected_markers_corners = []
+ self.__rejected_markers_ids = []
+ self.__rejected_rvecs = []
+ self.__rejected_tvecs = []
+ self.__rejected_points = []
+
# define tracked board data
self.__board = None
self.__board_corners_number = 0
@@ -78,6 +85,7 @@ class ArUcoTracker():
# define track metrics data
self.__track_count = 0
self.__tracked_markers = []
+ self.__rejected_markers = []
def load_configuration_file(self, configuration_filepath):
"""Load aruco detection parameters from .json file."""
@@ -104,7 +112,7 @@ class ArUcoTracker():
elif print_all:
print(f'\t{parameter}: {getattr(self.__detector_parameters, parameter)}')
- def track(self, frame, estimate_pose = True):
+ def track(self, frame, estimate_pose = True, check_rotation = False):
"""Track ArUco markers in frame."""
# DON'T MIRROR FRAME : it makes the markers detection to fail
@@ -112,23 +120,93 @@ class ArUcoTracker():
# detect markers from gray picture
gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
self.__markers_corners, self.__markers_ids, rejectedPoints = aruco.detectMarkers(gray, self.__aruco_dict.get_markers(), parameters = self.__detector_parameters)
-
- # update track metrics
- self.__track_count += 1
- for marker_id in self.get_markers_ids():
- self.__tracked_markers.append(marker_id)
+ self.__rejected_markers_corners, __rejected_markers_ids = [], []
if len(self.__markers_corners) > 0 and estimate_pose:
# markers pose estimation
self.__rvecs, self.__tvecs, self.__points = aruco.estimatePoseSingleMarkers(self.__markers_corners, self.__marker_length, self.__camera.get_K(), self.__camera.get_D())
+ # optional: check marker rotation as described in [this issue](https://github.com/opencv/opencv/issues/8813)
+ if check_rotation:
+
+ valid_rotation_markers = []
+ bad_rotation_markers = []
+ for i, rvec in enumerate(self.__rvecs):
+
+ tvec = self.__tvecs[i][0]
+ R, _ = cv.Rodrigues(rvec)
+
+ zAxisPoint = (tvec.dot(R) + numpy.array([0., 0., 1.])).dot(R.T)
+ zAxis = zAxisPoint - tvec
+
+ # TODO: How to describe the expected Z axis orientation ?
+ # In some situations, you can't provide such information.
+
+ # !!! Here a description specific to SimOne cockpit !!!
+ zAxisExpectedDict = {
+ 1: numpy.array([0.5, 0.5, -1]),
+ 2: numpy.array([0.5, 0.5, -1]),
+ 3: numpy.array([1, -1, -1]),
+ 4: numpy.array([1, -1, -1]),
+ 5: numpy.array([1, -1, -1]),
+ 6: numpy.array([1, 1, -1]),
+ 7: numpy.array([1, 1, -1]),
+ 8: numpy.array([1, -1, -1])
+ }
+
+ zAxisExpected = zAxisExpectedDict[self.__markers_ids[i][0]]
+
+ cosine_angle = numpy.dot(zAxis/numpy.linalg.norm(zAxis), zAxisExpected)
+ degree_angle = numpy.rad2deg(numpy.arccos(cosine_angle))
+ '''
+ print(self.__markers_ids[i][0])
+ print('marker position: ', tvec)
+ print('zAxisPoint: ', zAxisPoint)
+ print('zAxis: ', zAxis)
+ print('zAxisExpected: ', zAxisExpected)
+ print('cosine_angle: ', cosine_angle)
+ print('degree_angle: ', degree_angle)
+ '''
+ # Is the marker oriented as expected ?
+ if cosine_angle < 0 or cosine_angle > 1:
+
+ #print('valid')
+ valid_rotation_markers.append(i)
+
+ else:
+
+ #print('bad')
+ bad_rotation_markers.append(i)
+
+ # update track metrics
+ self.__rejected_markers.append(self.__markers_ids[i][0])
+
+ # keep markers with bad rotation
+ self.__rejected_markers_corners = tuple([self.__markers_corners[i] for i in bad_rotation_markers])
+ self.__rejected_markers_ids = self.__markers_ids[bad_rotation_markers]
+ self.__rejected_rvecs = self.__rvecs[bad_rotation_markers]
+ self.__rejected_tvecs = self.__tvecs[bad_rotation_markers]
+ self.__rejected_points = self.__points[bad_rotation_markers]
+
+ # keep markers with valid rotation
+ self.__markers_corners = tuple([self.__markers_corners[i] for i in valid_rotation_markers])
+ self.__markers_ids = self.__markers_ids[valid_rotation_markers]
+ self.__rvecs = self.__rvecs[valid_rotation_markers]
+ self.__tvecs = self.__tvecs[valid_rotation_markers]
+ self.__points = self.__points[valid_rotation_markers]
+
else:
self.__rvecs = []
self.__tvecs = []
self.__points = []
+ # update track metrics
+ self.__track_count += 1
+ for marker_id in self.get_markers_ids():
+ self.__tracked_markers.append(marker_id)
+
def track_board(self, frame, board, expected_markers_number):
"""Track ArUco markers board in frame setting up the number of detected markers needed to agree detection."""
@@ -157,8 +235,6 @@ class ArUcoTracker():
# draw detected markers square
if len(self.__markers_corners) > 0:
- aruco.drawDetectedMarkers(frame, self.__markers_corners, self.__markers_ids)
-
# draw marker axis if pose has been estimated
if len(self.__rvecs) > 0:
@@ -166,6 +242,20 @@ class ArUcoTracker():
cv.drawFrameAxes(frame, self.__camera.get_K(), self.__camera.get_D(), self.__rvecs[i], self.__tvecs[i], self.__marker_length)
+ aruco.drawDetectedMarkers(frame, self.__markers_corners, self.__markers_ids)
+
+ # draw rejected markers square
+ if len(self.__rejected_markers_corners) > 0:
+
+ # draw marker axis if pose has been estimated
+ if len(self.__rejected_rvecs) > 0:
+
+ for (i, marker_id) in enumerate(self.__rejected_markers_ids):
+
+ cv.drawFrameAxes(frame, self.__camera.get_K(), self.__camera.get_D(), self.__rejected_rvecs[i], self.__rejected_tvecs[i], self.__marker_length)
+
+ aruco.drawDetectedMarkers(frame, self.__rejected_markers_corners, self.__rejected_markers_ids, borderColor=(0, 255, 255))
+
def draw_board(self, frame):
"""Draw tracked board corners in frame."""
@@ -180,7 +270,7 @@ class ArUcoTracker():
def get_track_metrics(self):
"""Get marker tracking metrics."""
- return self.__track_count, Counter(self.__tracked_markers)
+ return self.__track_count, Counter(self.__tracked_markers), Counter(self.__rejected_markers)
def get_markers_dictionay(self):
"""Get tracked aruco markers dictionary."""
diff --git a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
index 25babf6..6b0a41f 100644
--- a/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
+++ b/src/argaze/utils/export_tobii_segment_aruco_visual_scan.py
@@ -212,7 +212,7 @@ def main():
continue
# Track markers with pose estimation and draw them
- aruco_tracker.track(video_frame.matrix)
+ aruco_tracker.track(video_frame.matrix, check_rotation=False)
aruco_tracker.draw(video_frame.matrix)
# Draw focus area
@@ -312,11 +312,14 @@ def main():
# Print aruco tracking metrics
print('\nAruco marker tracking metrics')
- try_count, tracked_counts = aruco_tracker.get_track_metrics()
+ try_count, tracked_counts, rejected_counts = aruco_tracker.get_track_metrics()
for marker_id, tracked_count in tracked_counts.items():
print(f'Markers {marker_id} has been detected in {tracked_count} / {try_count} frames ({round(100 * tracked_count / try_count, 2)} %)')
+ for marker_id, rejected_count in rejected_counts.items():
+ print(f'Markers {marker_id} has been rejected in {rejected_count} / {try_count} frames ({round(100 * rejected_count / try_count, 2)} %)')
+
# Build visual scan based on a pointer position
visual_scan = GazeFeatures.PointerBasedVisualScan(ts_aois_scenes, ts_gaze_positions)
print(f'{len(visual_scan.steps())} visual scan steps found')