aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTheo De La Hogue2023-09-22 22:06:20 +0200
committerTheo De La Hogue2023-09-22 22:06:20 +0200
commitb947573f7dbccb5b2b13b64677192145f2dbb864 (patch)
tree20cd0cb471b245445bd493c3e8e24fa7baf45d8b
parent1f36c34242791145a1b33dd17cf351018456310f (diff)
downloadargaze-b947573f7dbccb5b2b13b64677192145f2dbb864.zip
argaze-b947573f7dbccb5b2b13b64677192145f2dbb864.tar.gz
argaze-b947573f7dbccb5b2b13b64677192145f2dbb864.tar.bz2
argaze-b947573f7dbccb5b2b13b64677192145f2dbb864.tar.xz
Working on AOI frame feature: now 2D AOI in scene frame are merged into 3D AOI in scene layer.
-rw-r--r--src/argaze.test/AreaOfInterest/AOIFeatures.py16
-rw-r--r--src/argaze/ArFeatures.py94
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py31
-rw-r--r--src/argaze/AreaOfInterest/AOI3DScene.py9
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py81
-rw-r--r--src/argaze/utils/demo_data/aoi_2d_scene.json5
-rw-r--r--src/argaze/utils/demo_data/aoi_3d_scene.obj50
-rw-r--r--src/argaze/utils/demo_data/demo_aruco_markers_setup.json12
-rw-r--r--src/argaze/utils/demo_data/demo_gaze_analysis_setup.json7
-rw-r--r--src/argaze/utils/demo_gaze_analysis_run.py20
10 files changed, 175 insertions, 150 deletions
diff --git a/src/argaze.test/AreaOfInterest/AOIFeatures.py b/src/argaze.test/AreaOfInterest/AOIFeatures.py
index bced0aa..6df33ca 100644
--- a/src/argaze.test/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze.test/AreaOfInterest/AOIFeatures.py
@@ -118,13 +118,17 @@ class TestAreaOfInterestClass(unittest.TestCase):
aoi_2D = AOIFeatures.AreaOfInterest([[0, 0], [0, 2], [2, 2], [2, 0]])
- self.assertEqual(aoi_2D.inner_axis((1, 1)), (0.5, 0.5))
+ self.assertEqual(aoi_2D.inner_axis(1, 1), (0.5, 0.5))
def test_outter_axis(self):
aoi_2D = AOIFeatures.AreaOfInterest([[0, 0], [0, 2], [2, 2], [2, 0]])
- self.assertEqual(aoi_2D.outter_axis((0.5, 0.5)), (1, 1))
+ self.assertEqual(aoi_2D.outter_axis(0.5, 0.5), (1, 1))
+
+ aoi_3D = AOIFeatures.AreaOfInterest([[1, 0, 0], [1, 0, 2], [1, 2, 2], [1, 2, 0]])
+
+ self.assertEqual(aoi_3D.outter_axis(0.5, 0.5), (1, 1, 1))
def test_circle_intersection(self):
@@ -181,21 +185,15 @@ class TestAOISceneClass(unittest.TestCase):
aoi_2d_scene_AB["A"] = AOIFeatures.AreaOfInterest([[0, 0], [0, 1], [1, 1], [1, 0]])
aoi_2d_scene_AB["B"] = AOIFeatures.AreaOfInterest([[0, 0], [0, 2], [2, 2], [2, 0]])
- print('aoi_2d_scene_AB vars: ', vars(aoi_2d_scene_AB))
-
# Create second scene with C and D aoi
aoi_2d_scene_CD = AOIFeatures.AOIScene(2,)
aoi_2d_scene_CD["C"] = AOIFeatures.AreaOfInterest([[0, 0], [0, 3], [3, 3], [3, 0]])
aoi_2d_scene_CD["D"] = AOIFeatures.AreaOfInterest([[0, 0], [0, 4], [4, 4], [4, 0]])
- print('aoi_2d_scene_CD vars: ', vars(aoi_2d_scene_CD))
-
# Merge first scene and second scene into a third scene
aoi_2d_scene_ABCD = aoi_2d_scene_AB | aoi_2d_scene_CD
- print('aoi_2d_scene_ABCD vars: ', vars(aoi_2d_scene_ABCD))
-
# Check third scene
self.assertEqual(aoi_2d_scene_ABCD.dimension, 2)
self.assertEqual(len(aoi_2d_scene_ABCD.items()), 4)
@@ -205,8 +203,6 @@ class TestAOISceneClass(unittest.TestCase):
# Merge second scene into first scene
aoi_2d_scene_AB |= aoi_2d_scene_CD
- print('aoi_2d_scene_AB vars: ', vars(aoi_2d_scene_AB))
-
# Check first scene
self.assertEqual(aoi_2d_scene_AB.dimension, 2)
self.assertEqual(len(aoi_2d_scene_AB.items()), 4)
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 96976c2..ad17df2 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -192,6 +192,8 @@ class ArLayer():
except KeyError:
+ pass
+
# Add AOI 2D Scene by default
new_aoi_scene = AOI2DScene.AOI2DScene()
@@ -484,7 +486,7 @@ class ArLayer():
# Draw aoi if required
if draw_aoi_scene is not None:
-
+
self.aoi_scene.draw(image, **draw_aoi_scene)
# Draw aoi matching if required
@@ -728,11 +730,6 @@ class ArFrame():
# Create layer
new_layer = ArLayer.from_dict(layer_data, working_directory)
- # Project 3D aoi scene layer to get only 2D aoi scene
- if new_layer.aoi_scene.dimension == 3:
-
- new_layer.aoi_scene = new_layer.aoi_scene.orthogonal_projection * new_frame_size
-
# Append new layer
new_layers[layer_name] = new_layer
@@ -1099,13 +1096,6 @@ class ArScene():
frame.parent = self
- # Preprocess orthogonal projection to speed up further processings
- self.__orthogonal_projection_cache = {}
-
- for layer_name, layer in self.layers.items():
-
- self.__orthogonal_projection_cache[layer_name] = layer.aoi_scene.orthogonal_projection
-
def __str__(self) -> str:
"""
Returns:
@@ -1184,54 +1174,70 @@ class ArScene():
for frame_name, frame_data in scene_data.pop('frames').items():
- # Append name
- frame_data['name'] = frame_name
+ # str: relative path to file
+ if type(frame_data) == str:
+
+ filepath = os.path.join(working_directory, frame_data)
+ file_format = filepath.split('.')[-1]
+
+ # JSON file format for 2D or 3D dimension
+ if file_format == 'json':
+
+ new_frame = ArFrame.from_json(filepath)
- # Create frame
- new_frame = ArFrame.from_dict(frame_data, working_directory)
+ # dict:
+ else:
- # Look for AOI with same frame name
- aoi_frame = None
- aoi_frame_found = False
- for layer_name, layer in new_layers.items():
+ # Append name
+ frame_data['name'] = frame_name
+
+ new_frame = ArFrame.from_dict(frame_data, working_directory)
+
+ # Look for a scene layer with an AOI named like the frame
+ for scene_layer_name, scene_layer in new_layers.items():
try:
- aoi_frame = layer.aoi_scene[frame_name]
- aoi_frame_found = True
+ frame_3d = scene_layer.aoi_scene[frame_name]
- except KeyError:
+ # Check that the frame have a layer named like this scene layer
+ aoi_2d_scene = new_frame.layers[scene_layer_name].aoi_scene
+
+ # Transform 2D frame layer AOIs into 3D scene layer AOIs
+ # Then, add them to scene layer
+ scene_layer.aoi_scene |= aoi_2d_scene.dimensionalize(frame_3d, new_frame.size)
- # AOI name should be unique
- break
+ '''DEPRECATED: but maybe still usefull?
+ # Project and reframe each layers into corresponding frame layers
+ for frame_layer_name, frame_layer in new_frame.layers.items():
- if aoi_frame_found:
+ try:
- # Project and reframe each layers into corresponding frame layers
- for frame_layer_name, frame_layer in new_frame.layers.items():
+ layer = new_layers[frame_layer_name]
+
+ layer_aoi_scene_projection = layer.aoi_scene.orthogonal_projection
+ aoi_frame_projection = layer_aoi_scene_projection[frame_name]
- try:
+ frame_layer.aoi_scene = layer_aoi_scene_projection.reframe(aoi_frame_projection, new_frame.size)
- layer = new_layers[frame_layer_name]
-
- layer_aoi_scene_projection = layer.aoi_scene.orthogonal_projection
- aoi_frame_projection = layer_aoi_scene_projection[frame_name]
+ if frame_layer.aoi_scan_path is not None:
- frame_layer.aoi_scene = layer_aoi_scene_projection.reframe(aoi_frame_projection, new_frame.size)
+ # Edit expected AOI list by removing AOI with name equals to frame layer name
+ expected_aois = list(layer.aoi_scene.keys())
- if frame_layer.aoi_scan_path is not None:
+ if frame_layer_name in expected_aois:
+ expected_aois.remove(frame_layer_name)
- # Edit expected AOI list by removing AOI with name equals to frame layer name
- expected_aois = list(layer.aoi_scene.keys())
+ frame_layer.aoi_scan_path.expected_aois = expected_aois
- if frame_layer_name in expected_aois:
- expected_aois.remove(frame_layer_name)
+ except KeyError:
- frame_layer.aoi_scan_path.expected_aois = expected_aois
+ continue
+ '''
- except KeyError:
+ except KeyError as e:
- continue
+ print(e)
# Append new frame
new_frames[frame_name] = new_frame
@@ -1437,7 +1443,7 @@ class ArCamera(ArFrame):
# TODO?: Should we prefer to use camera frame AOIMatcher object?
if aoi_2d.contains_point(gaze_position.value):
- inner_x, inner_y = aoi_2d.clockwise().inner_axis(gaze_position.value)
+ inner_x, inner_y = aoi_2d.clockwise().inner_axis(*gaze_position.value)
# QUESTION: How to project gaze precision?
inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y))
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index 73c977f..f6b8dcb 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -10,7 +10,7 @@ __license__ = "BSD"
from typing import TypeVar, Tuple
from argaze import DataStructures
-from argaze.AreaOfInterest import AOIFeatures
+from argaze.AreaOfInterest import AOIFeatures, AOI3DScene
from argaze import GazeFeatures
import cv2
@@ -19,6 +19,9 @@ import numpy
AOI2DSceneType = TypeVar('AOI2DScene', bound="AOI2DScene")
# Type definition for type annotation convenience
+AOI3DSceneType = TypeVar('AOI3DScene', bound="AOI3DScene")
+# Type definition for type annotation convenience
+
class AOI2DScene(AOIFeatures.AOIScene):
"""Define AOI 2D scene."""
@@ -89,6 +92,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
yield name, aoi, matched_region, aoi_ratio, circle_ratio
+ '''DEPRECATED: but maybe still usefull?
def reframe(self, aoi: AOIFeatures.AreaOfInterest, size: tuple) -> AOI2DSceneType:
"""
Reframe whole scene to a scene bounded by a 4 vertices 2D AOI.
@@ -120,3 +124,28 @@ class AOI2DScene(AOIFeatures.AOIScene):
aoi2D_scene[name] = numpy.matmul(aoi2D - Src_origin, M.T)
return aoi2D_scene
+ '''
+ def dimensionalize(self, frame_3d: AOIFeatures.AreaOfInterest, size: tuple) -> AOI3DSceneType:
+ """
+ Convert to 3D scene considering it is inside of 3D rectangular frame.
+
+ Parameters:
+ aoi_frame_3d: rectangle 3D AOI to use as referential plane
+ size: size of the frame in pixel
+
+ Returns:
+ AOI 3D scene
+ """
+
+ # Vectorize outter_axis function
+ vfunc = numpy.vectorize(frame_3d.outter_axis)
+
+ # Prepare new AOI 3D scene
+ aoi3D_scene = AOI3DScene.AOI3DScene()
+
+ for name, aoi2D in self.items():
+
+ X, Y = (aoi2D / size).T
+ aoi3D_scene[name] = numpy.array(vfunc(X, Y)).T.view(AOIFeatures.AreaOfInterest)
+
+ return aoi3D_scene
diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py
index 8ea6048..bfe189a 100644
--- a/src/argaze/AreaOfInterest/AOI3DScene.py
+++ b/src/argaze/AreaOfInterest/AOI3DScene.py
@@ -108,9 +108,9 @@ class AOI3DScene(AOIFeatures.AOIScene):
file.close()
- # retreive all aoi3D vertices
+ # retreive all aoi3D vertices and sort them in clockwise order
for name, face in faces.items():
- aoi3D = AOIFeatures.AreaOfInterest([ vertices[i-1] for i in face ])
+ aoi3D = AOIFeatures.AreaOfInterest([ vertices[i-1] for i in reversed(face) ])
aois_3d[name] = aoi3D
except IOError:
@@ -149,8 +149,9 @@ class AOI3DScene(AOIFeatures.AOIScene):
file.write('s off\n')
file.write(vertices_ids + '\n')
+ '''DEPRECATED: but maybe still usefull?
@property
- def orthogonal_projection(self) -> AOI2DScene.AOI2DScene:
+ def orthogonal_projection(self) -> AOI2DSceneType:
"""
Orthogonal projection of whole scene.
@@ -169,7 +170,7 @@ class AOI3DScene(AOIFeatures.AOIScene):
K = numpy.array([[scene_size[1]/scene_size[0], 0.0, 0.5], [0.0, 1., 0.5], [0.0, 0.0, 1.0]])
return self.project(tvec, rvec, K)
-
+ '''
def vision_cone(self, cone_radius, cone_height, cone_tip=[0., 0., 0.], cone_direction=[0., 0., 1.]) -> Tuple[AOI3DSceneType, AOI3DSceneType]:
"""Get AOI which are inside and out a given cone field.
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index e5585c5..ffaf882 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -127,8 +127,8 @@ class AreaOfInterest(numpy.ndarray):
return mpath.Path(self).contains_points([point])[0]
- def inner_axis(self, point: tuple) -> tuple:
- """Transform the coordinates from the global axis to the AOI's axis.
+ def inner_axis(self, x: float, y: float) -> tuple:
+ """Transform a point coordinates from global axis to AOI axis.
!!! warning
Available for 2D AOI only.
!!! danger
@@ -143,35 +143,30 @@ class AreaOfInterest(numpy.ndarray):
Dst = numpy.array([[0., 0.], [1., 0.], [1., 1.], [0., 1.]]).astype(numpy.float32)
P = cv2.getPerspectiveTransform(Src, Dst)
- X = numpy.append(numpy.array(numpy.array(point) - Src_origin), [1.0]).astype(numpy.float32)
+ X = numpy.append(numpy.array(numpy.array([x, y]) - Src_origin), [1.0]).astype(numpy.float32)
Y = numpy.dot(P, X)
La = (Y/Y[2])[:-1]
return tuple(numpy.around(La, 4))
- def outter_axis(self, point: tuple) -> tuple:
- """Transform the coordinates from the AOI's axis to the global axis.
- !!! warning
- Available for 2D AOI only.
+ def outter_axis(self, x: float, y: float) -> tuple:
+ """Transform a point coordinates from AOI axis to global axis.
!!! danger
- The AOI points must be sorted in clockwise order."""
-
- assert(self.dimension == 2)
+ The AOI points must be sorted in clockwise order.
+ !!! danger
+ The AOI must be a rectangle."""
- Src = numpy.array([[0., 0.], [1., 0.], [1., 1.], [0., 1.]]).astype(numpy.float32)
+ # Origin point
+ O = self[0]
- Dst = self.astype(numpy.float32)
- Dst_origin = Dst[0]
- Dst = (Dst - Dst_origin).reshape((len(Dst)), 2)
+ # Horizontal axis vector
+ H = self[1] - self[0]
- P = cv2.getPerspectiveTransform(Src, Dst)
- X = numpy.array([point[0], point[1], 1.0]).astype(numpy.float32)
- Y = numpy.dot(P, X)
+ # Vertical axis vector
+ V = self[3] - self[0]
- Lp = Dst_origin + (Y/Y[2])[:-1]
-
- return tuple(numpy.rint(Lp).astype(int))
+ return tuple(O + x * H + y * V)
def circle_intersection(self, center: tuple, radius: float) -> Tuple[numpy.array, float, float]:
"""Get intersection shape with a circle, intersection area / AOI area ratio and intersection area / circle area ratio.
@@ -353,6 +348,42 @@ class AOIScene():
return output
+ def __add__(self, add_vector) -> AOISceneType:
+ """Add vector to scene."""
+
+ assert(len(add_vector) == self.__dimension)
+
+ for name, area in self.__areas.items():
+
+ self.__areas[name] = self.__areas[name] + add_vector
+
+ return self
+
+ # Allow n + scene operation
+ __radd__ = __add__
+
+ def __sub__(self, sub_vector) -> AOISceneType:
+ """Sub vector to scene."""
+
+ assert(len(sub_vector) == self.__dimension)
+
+ for name, area in self.__areas.items():
+
+ self.__areas[name] = self.__areas[name] - sub_vector
+
+ return self
+
+ def __rsub__(self, rsub_vector) -> AOISceneType:
+ """RSub vector to scene."""
+
+ assert(len(rsub_vector) == self.__dimension)
+
+ for name, area in self.__areas.items():
+
+ self.__areas[name] = rsub_vector - self.__areas[name]
+
+ return self
+
def __mul__(self, scale_vector) -> AOISceneType:
"""Scale scene by a vector."""
@@ -367,6 +398,16 @@ class AOIScene():
# Allow n * scene operation
__rmul__ = __mul__
+ def __truediv__(self, div_vector) -> AOISceneType:
+
+ assert(len(div_vector) == self.__dimension)
+
+ for name, area in self.__areas.items():
+
+ self.__areas[name] = self.__areas[name] / div_vector
+
+ return self
+
def items(self) -> Tuple[str, AreaOfInterest]:
"""Iterate over areas."""
diff --git a/src/argaze/utils/demo_data/aoi_2d_scene.json b/src/argaze/utils/demo_data/aoi_2d_scene.json
new file mode 100644
index 0000000..a0726e8
--- /dev/null
+++ b/src/argaze/utils/demo_data/aoi_2d_scene.json
@@ -0,0 +1,5 @@
+{
+ "RedSquare": [[268, 203], [576, 203], [576, 510], [268, 510]],
+ "BlueTriangle":[[960, 664], [1113, 971], [806, 971]],
+ "GreenCircle":[[1497, 203], [1527, 206], [1556, 215], [1582, 229], [1605, 248], [1624, 271], [1639, 298], [1647, 327], [1650, 357], [1647, 387], [1639, 415], [1624, 442], [1605, 465], [1582, 484], [1556, 498], [1527, 507], [1497, 510], [1467, 507], [1438, 498], [1411, 484], [1388, 465], [1369, 442], [1355, 415], [1346, 387], [1343, 357], [1346, 327], [1355, 298], [1369, 271], [1388, 248], [1411, 229], [1438, 215], [1467, 206]]
+} \ No newline at end of file
diff --git a/src/argaze/utils/demo_data/aoi_3d_scene.obj b/src/argaze/utils/demo_data/aoi_3d_scene.obj
index d32e235..0ce97de 100644
--- a/src/argaze/utils/demo_data/aoi_3d_scene.obj
+++ b/src/argaze/utils/demo_data/aoi_3d_scene.obj
@@ -1,5 +1,3 @@
-# Blender v3.0.1 OBJ File: 'ar_camera.blend'
-# www.blender.org
o GrayRectangle
v 0.000000 0.000000 0.000000
v 25.000000 0.000000 0.000000
@@ -7,51 +5,3 @@ v 0.000000 14.960000 0.000000
v 25.000000 14.960000 0.000000
s off
f 1 2 4 3
-o RedSquare
-v 3.497026 8.309391 0.000000
-v 7.504756 8.309391 0.000000
-v 3.497026 12.314838 0.001030
-v 7.504756 12.314838 0.001030
-s off
-f 5 6 8 7
-o BlueTriangle
-v 10.500295 2.307687 0.000000
-v 14.503224 2.306344 0.000000
-v 12.502419 6.312207 0.001030
-s off
-f 9 10 11
-o GreenCircle
-v 19.495552 12.311101 0.000000
-v 19.105371 12.272672 0.000000
-v 18.730185 12.158860 0.000000
-v 18.384411 11.974040 0.000000
-v 18.081339 11.725314 0.000000
-v 17.832613 11.422241 0.000000
-v 17.647793 11.076468 0.000000
-v 17.533981 10.701282 0.000000
-v 17.495552 10.311101 0.000000
-v 17.533981 9.920920 0.000000
-v 17.647793 9.545734 0.000000
-v 17.832613 9.199961 0.000000
-v 18.081339 8.896888 0.000000
-v 18.384411 8.648162 0.000000
-v 18.730185 8.463342 0.000000
-v 19.105371 8.349530 0.000000
-v 19.495552 8.311101 0.000000
-v 19.885733 8.349530 0.000000
-v 20.260920 8.463342 0.000000
-v 20.606693 8.648162 0.000000
-v 20.909765 8.896887 0.000000
-v 21.158491 9.199960 0.000000
-v 21.343311 9.545733 0.000000
-v 21.457123 9.920920 0.000000
-v 21.495552 10.311101 0.000000
-v 21.457123 10.701282 0.000000
-v 21.343311 11.076468 0.000000
-v 21.158491 11.422241 0.000000
-v 20.909765 11.725314 0.000000
-v 20.606693 11.974040 0.000000
-v 20.260920 12.158860 0.000000
-v 19.885733 12.272672 0.000000
-s off
-f 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 12
diff --git a/src/argaze/utils/demo_data/demo_aruco_markers_setup.json b/src/argaze/utils/demo_data/demo_aruco_markers_setup.json
index 9a3b79f..5168297 100644
--- a/src/argaze/utils/demo_data/demo_aruco_markers_setup.json
+++ b/src/argaze/utils/demo_data/demo_aruco_markers_setup.json
@@ -53,7 +53,7 @@
},
"frames": {
"GrayRectangle": {
- "size": [640, 383],
+ "size": [1920, 1149],
"background": "frame_background.jpg",
"gaze_movement_identifier": {
"DispersionThresholdIdentification": {
@@ -65,12 +65,10 @@
"duration_max": 10000
},
"layers": {
- "GrayRectangle": {
- "aoi_scene": "aoi_3d_scene.obj",
+ "main_layer": {
+ "aoi_scene": "aoi_2d_scene.json",
"aoi_matcher": {
- "FocusPointInside": {
- "exclude": ["GrayRectangle"]
- }
+ "FocusPointInside": {}
}
}
},
@@ -91,7 +89,7 @@
}
},
"draw_layers": {
- "GrayRectangle": {
+ "main_layer": {
"draw_aoi_scene": {
"draw_aoi": {
"color": [255, 255, 255],
diff --git a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
index 414a6fe..52945ae 100644
--- a/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
+++ b/src/argaze/utils/demo_data/demo_gaze_analysis_setup.json
@@ -25,11 +25,10 @@
"size": [320, 240]
},
"layers": {
- "GrayRectangle": {
- "aoi_scene": "aoi_3d_scene.obj",
+ "main_layer": {
+ "aoi_scene": "aoi_2d_scene.json",
"aoi_matcher": {
"DeviationCircleCoverage": {
- "exclude": ["GrayRectangle"],
"coverage_threshold": 0.5
}
},
@@ -64,7 +63,7 @@
"deepness": 0
},
"draw_layers": {
- "GrayRectangle": {
+ "main_layer": {
"draw_aoi_scene": {
"draw_aoi": {
"color": [255, 255, 255],
diff --git a/src/argaze/utils/demo_gaze_analysis_run.py b/src/argaze/utils/demo_gaze_analysis_run.py
index 465c5db..789657b 100644
--- a/src/argaze/utils/demo_gaze_analysis_run.py
+++ b/src/argaze/utils/demo_gaze_analysis_run.py
@@ -74,18 +74,18 @@ def main():
# Write last 5 steps of aoi scan path
path = ''
- for step in ar_frame.layers["GrayRectangle"].aoi_scan_path[-5:]:
+ for step in ar_frame.layers["main_layer"].aoi_scan_path[-5:]:
path += f'> {step.aoi} '
- path += f'> {ar_frame.layers["GrayRectangle"].aoi_scan_path.current_aoi}'
+ path += f'> {ar_frame.layers["main_layer"].aoi_scan_path.current_aoi}'
cv2.putText(frame_image, path, (20, ar_frame.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
# Display Transition matrix analysis if loaded
try:
- transition_matrix_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.TransitionMatrix"]
+ transition_matrix_analyzer = ar_frame.layers["main_layer"].aoi_scan_path_analyzers["argaze.GazeAnalysis.TransitionMatrix"]
cv2.putText(frame_image, f'Transition matrix density: {transition_matrix_analyzer.transition_matrix_density:.2f}', (20, ar_frame.size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
@@ -97,8 +97,8 @@ def main():
if from_aoi != to_aoi and probability > 0.0:
- from_center = ar_frame.layers['GrayRectangle'].aoi_scene[from_aoi].center.astype(int)
- to_center = ar_frame.layers['GrayRectangle'].aoi_scene[to_aoi].center.astype(int)
+ from_center = ar_frame.layers["main_layer"].aoi_scene[from_aoi].center.astype(int)
+ to_center = ar_frame.layers["main_layer"].aoi_scene[to_aoi].center.astype(int)
start_line = (0.5 * from_center + 0.5 * to_center).astype(int)
color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55]
@@ -112,7 +112,7 @@ def main():
# Display aoi scan path basic metrics analysis if loaded
try:
- basic_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.Basic"]
+ basic_analyzer = ar_frame.layers["main_layer"].aoi_scan_path_analyzers["argaze.GazeAnalysis.Basic"]
# Write basic analysis
cv2.putText(frame_image, f'Step number: {basic_analyzer.steps_number}', (20, ar_frame.size[1]-440), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
@@ -141,7 +141,7 @@ def main():
# Display aoi scan path K-modified coefficient analysis if loaded
try:
- aoi_kc_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.KCoefficient"]
+ aoi_kc_analyzer = ar_frame.layers["main_layer"].aoi_scan_path_analyzers["argaze.GazeAnalysis.KCoefficient"]
# Write aoi Kc analysis
if aoi_kc_analyzer.K < 0.:
@@ -158,7 +158,7 @@ def main():
# Display Lempel-Ziv complexity analysis if loaded
try:
- lzc_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.LempelZivComplexity"]
+ lzc_analyzer = ar_frame.layers["main_layer"].aoi_scan_path_analyzers["argaze.GazeAnalysis.LempelZivComplexity"]
cv2.putText(frame_image, f'Lempel-Ziv complexity: {lzc_analyzer.lempel_ziv_complexity}', (20, ar_frame.size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
@@ -168,7 +168,7 @@ def main():
# Display N-Gram analysis if loaded
try:
- ngram_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.NGram"]
+ ngram_analyzer = ar_frame.layers["main_layer"].aoi_scan_path_analyzers["argaze.GazeAnalysis.NGram"]
# Display only 3-gram analysis
start = ar_frame.size[1] - ((len(ngram_analyzer.ngrams_count[3]) + 1) * 40)
@@ -188,7 +188,7 @@ def main():
# Display Entropy analysis if loaded
try:
- entropy_analyzer = ar_frame.layers['GrayRectangle'].aoi_scan_path_analyzers["argaze.GazeAnalysis.Entropy"]
+ entropy_analyzer = ar_frame.layers["main_layer"].aoi_scan_path_analyzers["argaze.GazeAnalysis.Entropy"]
cv2.putText(frame_image, f'Stationary entropy: {entropy_analyzer.stationary_entropy:.3f},', (20, ar_frame.size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
cv2.putText(frame_image, f'Transition entropy: {entropy_analyzer.transition_entropy:.3f},', (20, ar_frame.size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)