aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/user_guide/areas_of_interest/aoi_frame.md43
-rw-r--r--docs/user_guide/areas_of_interest/heatmap.md40
-rw-r--r--mkdocs.yml2
-rw-r--r--src/argaze/ArFeatures.py90
-rw-r--r--src/argaze/AreaOfInterest/AOI2DScene.py2
-rw-r--r--src/argaze/AreaOfInterest/AOI3DScene.py21
-rw-r--r--src/argaze/AreaOfInterest/AOIFeatures.py41
-rw-r--r--src/argaze/utils/demo_ar_features_run.py28
-rw-r--r--src/argaze/utils/demo_environment/aoi_scene.obj2
-rw-r--r--src/argaze/utils/demo_environment/setup.json5
-rw-r--r--src/argaze/utils/demo_heatmap_run.py11
11 files changed, 170 insertions, 115 deletions
diff --git a/docs/user_guide/areas_of_interest/aoi_frame.md b/docs/user_guide/areas_of_interest/aoi_frame.md
deleted file mode 100644
index 350efa8..0000000
--- a/docs/user_guide/areas_of_interest/aoi_frame.md
+++ /dev/null
@@ -1,43 +0,0 @@
----
-title: AOI frame
----
-
-AOI Frame
-=========
-
-[AOIFeatures](../../../argaze/#argaze/AreaOfInterest.AOIFeatures) provides [AOIFrame](../../../argaze/#argaze/AreaOfInterest.AOIFeatures.AOIFrame) class to draw into an 2D AOI.
-
-## Point spread
-
-The **point_spread** method draw a gaussian point spread into the frame at a given pointer position.
-
-![Point spread](../../img/point_spread.png)
-
-## Heatmap
-
-Heatmap visualisation allows to show where a pointer is most of the time.
-
-![Heatmap](../../img/heatmap.png)
-
-```python
-from argaze.AreaOfInterest import AOIFeatures
-
-# Create an AOI
-aoi = AOIFeatures.AreaOfInterest([[0, 0], [1, 0], [1, 1], [0, 1]])
-
-# Create AOIFrame related to this AOI with 800px * 600px resolution
-aoi_frame = AOIFeatures.AOIFrame(aoi, (800, 600))
-
-# Initialize heatmap
-aoi_frame.heatmap_init()
-
-# Assuming a pointer position (x, y) is moving inside frame
-...:
-
- # Update heatmap at pointer position
- aoi_frame.heatmap_update((x, y), sigma=0.05)
-
- # Do something with heatmap picture
- ... aoi_frame.heatmap
-
-``` \ No newline at end of file
diff --git a/docs/user_guide/areas_of_interest/heatmap.md b/docs/user_guide/areas_of_interest/heatmap.md
new file mode 100644
index 0000000..6142c5f
--- /dev/null
+++ b/docs/user_guide/areas_of_interest/heatmap.md
@@ -0,0 +1,40 @@
+---
+title: Heatmap
+---
+
+Heatmap
+=========
+
+[AOIFeatures](../../../argaze/#argaze.AreaOfInterest.AOIFeatures) provides [Heatmap](../../../argaze/#argaze.AreaOfInterest.AOIFeatures.Heatmap) class to draw heatmap image.
+
+## Point spread
+
+The **point_spread** method draw a gaussian point spread into heatmap image at a given pointer position.
+
+![Point spread](../../img/point_spread.png)
+
+## Heatmap
+
+Heatmap visualisation allows to show where a pointer is most of the time.
+
+![Heatmap](../../img/heatmap.png)
+
+```python
+from argaze.AreaOfInterest import AOIFeatures
+
+# Create heatmap of 800px * 600px resolution
+heatmap = AOIFeatures.Heatmap((800, 600))
+
+# Initialize heatmap
+heatmap.init()
+
+# Assuming a pointer position (x, y) is moving inside frame
+...:
+
+ # Update heatmap at pointer position
+ heatmap.update((x, y), sigma=0.05)
+
+ # Do something with heatmap image
+ ... heatmap.image
+
+``` \ No newline at end of file
diff --git a/mkdocs.yml b/mkdocs.yml
index 4912cc5..a77d2ad 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -18,7 +18,7 @@ nav:
- user_guide/areas_of_interest/aoi_scene_projection.md
- user_guide/areas_of_interest/vision_cone_filtering.md
- user_guide/areas_of_interest/aoi_matching.md
- - user_guide/areas_of_interest/aoi_frame.md
+ - user_guide/areas_of_interest/heatmap.md
- Augmented Reality environment:
- user_guide/ar_environment/introduction.md
- user_guide/ar_environment/environment_setup.md
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 4aedb2b..3101a45 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -24,6 +24,9 @@ ArEnvironmentType = TypeVar('ArEnvironment', bound="ArEnvironment")
ArSceneType = TypeVar('ArScene', bound="ArScene")
# Type definition for type annotation convenience
+ArScreenType = TypeVar('ArScreen', bound="ArScreen")
+# Type definition for type annotation convenience
+
@dataclass
class ArEnvironment():
"""
@@ -95,6 +98,7 @@ class ArEnvironment():
new_aruco_detector = ArUcoDetector.ArUcoDetector(new_aruco_dictionary, new_marker_size, new_optic_parameters, new_aruco_detector_parameters)
+ # Build scenes
new_scenes = {}
for scene_name, scene_data in data.pop('scenes').items():
@@ -129,7 +133,17 @@ class ArEnvironment():
new_aoi_scene = AOI3DScene.AOI3DScene(aoi_scene_value)
- new_scenes[scene_name] = ArScene(new_aruco_scene, new_aoi_scene, **scene_data)
+ # Build screens
+ new_screens = {}
+ for screen_name, screen_data in scene_data.pop('screens').items():
+
+ new_screen_size = screen_data.pop('size')
+
+ # Append new screen
+ new_screens[screen_name] = ArScreen.from_scene(new_aoi_scene, screen_name, new_screen_size)
+
+ # Append new scene
+ new_scenes[scene_name] = ArScene(new_aruco_scene, new_aoi_scene, new_screens, **scene_data)
return ArEnvironment(new_name, new_aruco_detector, new_scenes)
@@ -184,6 +198,8 @@ class ArScene():
aoi_scene: AOI 3D scene description that will be projected onto estimated scene once its pose will be estimated : see [project][argaze.ArFeatures.ArScene.project] function below.
+ screens: All scene screens
+
aruco_axis: Optional dictionary to define orthogonal axis where each axis is defined by list of 3 markers identifier (first is origin). \
This pose estimation strategy is used by [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function when at least 3 markers are detected.
@@ -196,6 +212,7 @@ class ArScene():
aruco_scene: ArUcoScene.ArUcoScene = field(default_factory=ArUcoScene.ArUcoScene)
aoi_scene: AOI3DScene.AOI3DScene = field(default_factory=AOI3DScene.AOI3DScene)
+ screens: dict = field(default_factory=dict)
aruco_axis: dict = field(default_factory=dict)
aruco_aoi: dict = field(default_factory=dict)
angle_tolerance: float = field(default=0.)
@@ -207,7 +224,11 @@ class ArScene():
self._environment = None
# Preprocess orthogonal projection to speed up further aruco aoi processings
- self.__orthogonal_projection_cache = self.orthogonal_projection
+ self.__orthogonal_projection_cache = self.aoi_scene.orthogonal_projection
+
+ # Setup screens scene after screen creation
+ for name, screen in self.screens.items():
+ screen._scene = self
def __str__(self) -> str:
"""
@@ -221,27 +242,6 @@ class ArScene():
return output
- @property
- def orthogonal_projection(self) -> AOI2DScene.AOI2DScene:
- """
- Orthogonal projection of whole AOI scene.
-
- Returns:
- projected AOI 2D scene
- """
-
- scene_size = self.aoi_scene.size
- scene_center = self.aoi_scene.center
-
- # Center, step back and rotate pose to get whole scene into field of view
- tvec = scene_center*[-1, 1, 0] + [0, 0, scene_size[1]]
- rvec = numpy.array([[-numpy.pi, 0.0, 0.0]])
-
- # Edit optic intrinsic parameter to capture whole scene
- K = numpy.array([[scene_size[1]/scene_size[0], 0.0, 0.5], [0.0, 1., 0.5], [0.0, 0.0, 1.0]])
-
- return self.aoi_scene.project(tvec, rvec, K)
-
def estimate_pose(self, detected_markers) -> Tuple[numpy.array, numpy.array, str, dict]:
"""Estimate scene pose from detected ArUco markers.
@@ -405,4 +405,46 @@ class ArScene():
self.aruco_scene.draw_places(image, self._environment.aruco_detector.optic_parameters.K, self._environment.aruco_detector.optic_parameters.D)
-
+@dataclass
+class ArScreen():
+ """
+ Define Augmented Reality screen as an AOI2DScene made from a projected then reframed parent AOI3DScene.
+
+ Parameters:
+ name: name of the screen
+ size: screen dimension in pixel.
+ aoi_screen: AOI 2D scene description ... : see [orthogonal_projection][argaze.ArFeatures.ArScene.orthogonal_projection] and [reframe][argaze.AreaOfInterest.AOI2DScene.reframe] functions.
+ """
+
+ name: str
+ size: tuple[int] = field(default=(1, 1))
+ aoi_screen: AOI2DScene.AOI2DScene = field(default_factory=AOI2DScene.AOI2DScene)
+
+ def __post_init__(self):
+
+ # Define scene attribute: it will be setup by parent scene later
+ self._scene = None
+
+ # Init screen
+ self.init()
+
+ @classmethod
+ def from_scene(self, aoi_scene, aoi_name, size) -> ArScreenType:
+
+ return ArScreen(aoi_name, size, aoi_scene.orthogonal_projection.reframe(aoi_name, size))
+
+ @property
+ def image(self):
+ """Get screen image."""
+
+ return self.__image
+
+ def init(self) -> ArScreenType:
+ """Initialize screen image."""
+
+ self.__image = numpy.zeros((self.size[1], self.size[0], 3)).astype(numpy.uint8)
+
+ def draw_aoi(self, color=(255, 255, 255)) -> ArScreenType:
+ """Draw aoi into screen image."""
+
+ self.aoi_screen.draw(self.__image, color)
diff --git a/src/argaze/AreaOfInterest/AOI2DScene.py b/src/argaze/AreaOfInterest/AOI2DScene.py
index 1d4624a..cb9dd1d 100644
--- a/src/argaze/AreaOfInterest/AOI2DScene.py
+++ b/src/argaze/AreaOfInterest/AOI2DScene.py
@@ -133,7 +133,7 @@ class AOI2DScene(AOIFeatures.AOIScene):
M = cv2.getAffineTransform(Src[:3], Dst[:3])[:, :2]
- # Apply perspective matrix to each AOI
+ # Apply affine transformationto each AOI
aoi2D_scene = AOI2DScene()
for name, aoi2D in self.items():
diff --git a/src/argaze/AreaOfInterest/AOI3DScene.py b/src/argaze/AreaOfInterest/AOI3DScene.py
index cb5b5a3..3888d75 100644
--- a/src/argaze/AreaOfInterest/AOI3DScene.py
+++ b/src/argaze/AreaOfInterest/AOI3DScene.py
@@ -149,6 +149,27 @@ class AOI3DScene(AOIFeatures.AOIScene):
file.write('s off\n')
file.write(vertices_ids + '\n')
+ @property
+ def orthogonal_projection(self) -> AOI2DScene.AOI2DScene:
+ """
+ Orthogonal projection of whole scene.
+
+ Returns:
+ projected AOI 2D scene
+ """
+
+ scene_size = self.size
+ scene_center = self.center
+
+ # Center, step back and rotate pose to get whole scene into field of view
+ tvec = scene_center*[-1, 1, 0] + [0, 0, scene_size[1]]
+ rvec = numpy.array([[-numpy.pi, 0.0, 0.0]])
+
+ # Edit optic intrinsic parameter to capture whole scene
+ K = numpy.array([[scene_size[1]/scene_size[0], 0.0, 0.5], [0.0, 1., 0.5], [0.0, 0.0, 1.0]])
+
+ return self.project(tvec, rvec, K)
+
def vision_cone(self, cone_radius, cone_height, cone_tip=[0., 0., 0.], cone_direction=[0., 0., 1.]) -> Tuple[AOI3DSceneType, AOI3DSceneType]:
"""Get AOI which are inside and out a given cone field.
diff --git a/src/argaze/AreaOfInterest/AOIFeatures.py b/src/argaze/AreaOfInterest/AOIFeatures.py
index 07ef7c4..68a5b6b 100644
--- a/src/argaze/AreaOfInterest/AOIFeatures.py
+++ b/src/argaze/AreaOfInterest/AOIFeatures.py
@@ -369,18 +369,13 @@ class TimeStampedAOIScenes(DataStructures.TimeStampedBuffer):
super().__setitem__(ts, scene)
-AOIFrameType = TypeVar('AOIFrame', bound="AOIFrame")
+HeatmapType = TypeVar('Heatmap', bound="Heatmap")
# Type definition for type annotation convenience
-class AOIFrame():
- """Define image to draw into 2D AOI."""
+class Heatmap():
+ """Define image to draw heatmap."""
- def __init__(self, aoi: AreaOfInterestType, size: tuple):
- """
- !!! warning
- Available for 2D AOI only."""
-
- assert(aoi.dimension == 2)
+ def __init__(self, size: tuple):
self.__rX, self.__rY = size
@@ -389,7 +384,7 @@ class AOIFrame():
self.__Sy = numpy.linspace(0., 1., self.__rY)
# Init heatmap
- self.heatmap_init()
+ self.init()
def point_spread(self, point: tuple, sigma: float):
"""Draw gaussian point spread into image."""
@@ -406,18 +401,18 @@ class AOIFrame():
return numpy.exp((v_dX + v_dY) / div).reshape(self.__rY, self.__rX)
- def heatmap_init(self, buffer_size: int = 0):
- """Initialize heatmap matrix."""
+ def init(self, buffer_size: int = 0):
+ """Initialize heatmap image."""
self.__point_spread_sum = numpy.zeros((self.__rY, self.__rX))
self.__point_spread_buffer = []
self.__point_spread_buffer_size = buffer_size
- def heatmap_update(self, point: tuple, sigma: float):
- """Update heatmap matrix.
+ def update(self, point: tuple, sigma: float):
+ """Update heatmap image.
!!! danger
- Call heatmap_init() method before any update."""
+ Call init() method before any update."""
point_spread = self.point_spread(point, sigma)
@@ -435,29 +430,29 @@ class AOIFrame():
self.__point_spread_sum -= self.__point_spread_buffer.pop(0)
# Edit heatmap
- heatmap_gray = (255 * self.__point_spread_sum / numpy.max(self.__point_spread_sum)).astype(numpy.uint8)
- self.__heatmap_matrix = cv2.applyColorMap(heatmap_gray, cv2.COLORMAP_JET)
+ gray = (255 * self.__point_spread_sum / numpy.max(self.__point_spread_sum)).astype(numpy.uint8)
+ self.__image = cv2.applyColorMap(gray, cv2.COLORMAP_JET)
@property
- def heatmap_buffer(self) -> int:
+ def buffer(self) -> int:
"""Get size of heatmap buffer."""
return self.__point_spread_buffer_size
- @heatmap_buffer.setter
- def heatmap_buffer(self, size: int):
+ @buffer.setter
+ def buffer(self, size: int):
"""Set size of heatmap buffer (0 means no buffering)."""
self.__point_spread_buffer = []
self.__point_spread_buffer_size = size
@property
- def heatmap(self):
- """Get heatmap matrix."""
+ def image(self):
+ """Get heatmap image."""
try:
- return self.__heatmap_matrix
+ return self.__image
except AttributeError:
diff --git a/src/argaze/utils/demo_ar_features_run.py b/src/argaze/utils/demo_ar_features_run.py
index 990e234..b241b26 100644
--- a/src/argaze/utils/demo_ar_features_run.py
+++ b/src/argaze/utils/demo_ar_features_run.py
@@ -37,13 +37,11 @@ def main():
# Access to main AR scene
demo_scene = demo_environment.scenes["AR Scene Demo"]
- # Reframe AR scene to a scene bounded by screen AOI
- screen_name = "Screen"
- screen_size = (320, 240)
- screen_scene = demo_scene.orthogonal_projection.reframe(screen_name, screen_size)
+ # Access to main AR screen
+ demo_screen = demo_scene.screens["GrayRectangle"]
- # Create a window to display screen projection
- cv2.namedWindow(screen_name, cv2.WINDOW_AUTOSIZE)
+ # Create a window to display AR screen
+ cv2.namedWindow(demo_screen.name, cv2.WINDOW_AUTOSIZE)
# Init mouse interaction
pointer = (0, 0)
@@ -71,8 +69,9 @@ def main():
# Read video image
success, video_image = video_capture.read()
- # Create screen image
- screen_image = numpy.zeros((240, 320, 3)).astype(numpy.uint8)
+ # Reset screen image
+ demo_screen.init()
+ demo_screen.draw_aoi()
if success:
@@ -103,12 +102,12 @@ def main():
# Draw AOI scene projection
aoi_scene_projection.draw(video_image, color=(255, 255, 255))
- # Project pointer into screen
- if aoi_scene_projection[screen_name].contains_point(pointer):
+ # Project pointer into screen image
+ if aoi_scene_projection[demo_screen.name].contains_point(pointer):
- inner_x, inner_y = aoi_scene_projection[screen_name].clockwise().inner_axis(pointer)
+ inner_x, inner_y = aoi_scene_projection[demo_screen.name].clockwise().inner_axis(pointer)
- cv2.circle(screen_image, (int(inner_x * screen_size[0]), int(inner_y * screen_size[1])), 5, (255, 255, 255), -1)
+ cv2.circle(demo_screen.image, (int(inner_x * demo_screen.size[0]), int(inner_y * demo_screen.size[1])), 5, (255, 255, 255), -1)
# Catch exceptions raised by estimate_pose and project methods
except (ArFeatures.PoseEstimationFailed, ArFeatures.SceneProjectionFailed) as e:
@@ -119,11 +118,8 @@ def main():
# Draw video image
cv2.imshow(demo_environment.name, video_image)
- # Draw screen scene
- screen_scene.draw(screen_image, color=(255, 255, 255))
-
# Draw screen image
- cv2.imshow(screen_name, screen_image)
+ cv2.imshow(demo_screen.name, demo_screen.image)
# Stop by pressing 'Esc' key
if cv2.waitKey(10) == 27:
diff --git a/src/argaze/utils/demo_environment/aoi_scene.obj b/src/argaze/utils/demo_environment/aoi_scene.obj
index e29d94c..8922e78 100644
--- a/src/argaze/utils/demo_environment/aoi_scene.obj
+++ b/src/argaze/utils/demo_environment/aoi_scene.obj
@@ -1,6 +1,6 @@
# Blender v3.0.1 OBJ File: 'ar_environment.blend'
# www.blender.org
-o Screen
+o GrayRectangle
v 0.000000 0.000000 0.000000
v 25.000000 0.000000 0.000000
v 0.000000 14.960000 0.000000
diff --git a/src/argaze/utils/demo_environment/setup.json b/src/argaze/utils/demo_environment/setup.json
index 58c7c0d..582c231 100644
--- a/src/argaze/utils/demo_environment/setup.json
+++ b/src/argaze/utils/demo_environment/setup.json
@@ -16,6 +16,11 @@
"AR Scene Demo" : {
"aruco_scene": "aruco_scene.obj",
"aoi_scene": "aoi_scene.obj",
+ "screens": {
+ "GrayRectangle": {
+ "size": [320, 240]
+ }
+ },
"aruco_axis": {
"lower_left_corner": {
"origin_marker": 2,
diff --git a/src/argaze/utils/demo_heatmap_run.py b/src/argaze/utils/demo_heatmap_run.py
index e4672d4..be31d31 100644
--- a/src/argaze/utils/demo_heatmap_run.py
+++ b/src/argaze/utils/demo_heatmap_run.py
@@ -12,30 +12,29 @@ def main():
window_name = 'Heatmap'
image_size = (800, 600)
- aoi = AOIFeatures.AreaOfInterest([[0, 0], [1, 0], [1, 1], [0, 1]])
- aoi_image = AOIFeatures.AOIFrame(aoi, image_size)
+ heatmap = AOIFeatures.Heatmap(image_size)
- aoi_image.heatmap_init()
+ heatmap.init()
cv2.namedWindow(window_name, cv2.WINDOW_AUTOSIZE)
# Update pointer position
def on_mouse_event(event, x, y, flags, param):
- aoi_image.heatmap_update((x, y), sigma=0.05)
+ heatmap.update((x, y), sigma=0.05)
# Attach mouse callback to window
cv2.setMouseCallback(window_name, on_mouse_event)
while True:
- cv2.imshow(window_name, aoi_image.heatmap)
+ cv2.imshow(window_name, heatmap.image)
# Stop and save picture by pressing 'Esc' key
if cv2.waitKey(10) == 27:
current_directory = os.path.dirname(os.path.abspath(__file__))
- cv2.imwrite(os.path.join(current_directory,'heatmap.png'), aoi_image.heatmap)
+ cv2.imwrite(os.path.join(current_directory,'heatmap.png'), heatmap.image)
break