diff options
author | Théo de la Hogue | 2023-06-28 15:47:41 +0200 |
---|---|---|
committer | Théo de la Hogue | 2023-06-28 15:47:41 +0200 |
commit | 9078d44f26c0234a9b2cfdf1d5e0bb280ac6e75b (patch) | |
tree | d791fb6bb7d9f0fceb16451548707a2be9d38483 /src | |
parent | 40c339f7f153537a419dade38fd7cbafa2d411ba (diff) | |
download | argaze-9078d44f26c0234a9b2cfdf1d5e0bb280ac6e75b.zip argaze-9078d44f26c0234a9b2cfdf1d5e0bb280ac6e75b.tar.gz argaze-9078d44f26c0234a9b2cfdf1d5e0bb280ac6e75b.tar.bz2 argaze-9078d44f26c0234a9b2cfdf1d5e0bb280ac6e75b.tar.xz |
Renaming aoi_scene into aoi_3d_scene into ArScene class.
Diffstat (limited to 'src')
-rw-r--r-- | src/argaze/ArFeatures.py | 68 | ||||
-rw-r--r-- | src/argaze/utils/demo_environment/aoi_3d_scene.obj (renamed from src/argaze/utils/demo_environment/aoi_scene.obj) | 0 | ||||
-rw-r--r-- | src/argaze/utils/demo_environment/demo_ar_features_setup.json | 6 | ||||
-rw-r--r-- | src/argaze/utils/demo_environment/demo_gaze_features_setup.json | 2 |
4 files changed, 38 insertions, 38 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py index 76d048d..86feb48 100644 --- a/src/argaze/ArFeatures.py +++ b/src/argaze/ArFeatures.py @@ -52,7 +52,7 @@ class ArEnvironment(): scene._environment = self # Init AOI scene projections - self.__aoi_scene_projections = {} + self.__aoi_2d_scenes = {} @classmethod def from_json(self, json_filepath: str) -> ArSceneType: @@ -137,19 +137,19 @@ class ArEnvironment(): new_aruco_scene = None - # Check aoi_scene value type - aoi_scene_value = scene_data.pop('aoi_scene') + # Check aoi_3d_scene value type + aoi_3d_scene_value = scene_data.pop('aoi_3d_scene') # str: relative path to .obj file - if type(aoi_scene_value) == str: + if type(aoi_3d_scene_value) == str: - obj_filepath = os.path.join(working_directory, aoi_scene_value) - new_aoi_scene = AOI3DScene.AOI3DScene.from_obj(obj_filepath) + obj_filepath = os.path.join(working_directory, aoi_3d_scene_value) + new_aoi_3d_scene = AOI3DScene.AOI3DScene.from_obj(obj_filepath) # dict: else: - new_aoi_scene = AOI3DScene.AOI3DScene(aoi_scene_value) + new_aoi_3d_scene = AOI3DScene.AOI3DScene(aoi_3d_scene_value) # Build screens new_screens = {} @@ -184,10 +184,10 @@ class ArEnvironment(): gaze_movement_identifier = None # Append new screen - new_screens[screen_name] = ArScreen.from_scene(new_aoi_scene, screen_name, new_screen_size, new_screen_background, gaze_movement_identifier, **screen_data) + new_screens[screen_name] = ArScreen.from_scene(new_aoi_3d_scene, screen_name, new_screen_size, new_screen_background, gaze_movement_identifier, **screen_data) # Append new scene - new_scenes[scene_name] = ArScene(new_aruco_scene, new_aoi_scene, new_screens, **scene_data) + new_scenes[scene_name] = ArScene(new_aruco_scene, new_aoi_3d_scene, new_screens, **scene_data) return ArEnvironment(new_name, new_aruco_detector, new_scenes) @@ -213,10 +213,10 @@ class ArEnvironment(): self.aruco_detector.draw_detected_markers(self.__image) # Draw each AOI scene - for scene_name, aoi_scene_projection in self.__aoi_scene_projections.items(): + for scene_name, aoi_2d_scene in self.__aoi_2d_scenes.items(): # Draw AOI scene projection - aoi_scene_projection.draw(self.__image, color=(255, 255, 255)) + aoi_2d_scene.draw(self.__image, color=(255, 255, 255)) return self.__image @@ -241,14 +241,14 @@ class ArEnvironment(): self.aruco_detector.detect_markers(self.__image) # Project each AOI scene - self.__aoi_scene_projections = {} + self.__aoi_2d_scenes = {} for scene_name, scene in self.scenes.items(): # Project scene try: # Try to build AOI scene from detected ArUco marker corners - self.__aoi_scene_projections[scene_name] = scene.build_aruco_aoi_scene(self.aruco_detector.detected_markers) + self.__aoi_2d_scenes[scene_name] = scene.build_aruco_aoi_scene(self.aruco_detector.detected_markers) except: @@ -259,7 +259,7 @@ class ArEnvironment(): tvec, rmat, _, _ = scene.estimate_pose(self.aruco_detector.detected_markers) # Project AOI scene into video image according estimated pose - self.__aoi_scene_projections[scene_name] = scene.project(tvec, rmat) + self.__aoi_2d_scenes[scene_name] = scene.project(tvec, rmat) def look(self, timestamp: int|float, gaze_position: GazeFeatures.GazePosition): """Project gaze position into environment at particular time.""" @@ -269,15 +269,15 @@ class ArEnvironment(): try: - aoi_scene_projection = self.__aoi_scene_projections[scene_name] + aoi_2d_scene = self.__aoi_2d_scenes[scene_name] # For each scene screens for screen_name, screen in scene.screens.items(): # TODO: Add option to use gaze precision circle - if aoi_scene_projection[screen.name].contains_point(gaze_position.value): + if aoi_2d_scene[screen.name].contains_point(gaze_position.value): - inner_x, inner_y = self.__aoi_scene_projections[scene_name][screen.name].clockwise().inner_axis(gaze_position.value) + inner_x, inner_y = self.__aoi_2d_scenes[scene_name][screen.name].clockwise().inner_axis(gaze_position.value) # QUESTION: How to project gaze precision? inner_gaze_position = GazeFeatures.GazePosition((inner_x, inner_y)) @@ -324,7 +324,7 @@ class ArScene(): Parameters: aruco_scene: ArUco markers 3D scene description used to estimate scene pose from detected markers: see [estimate_pose][argaze.ArFeatures.ArScene.estimate_pose] function below. - aoi_scene: AOI 3D scene description that will be projected onto estimated scene once its pose will be estimated : see [project][argaze.ArFeatures.ArScene.project] function below. + aoi_3d_scene: AOI 3D scene description that will be projected onto estimated scene once its pose will be estimated : see [project][argaze.ArFeatures.ArScene.project] function below. screens: All scene screens @@ -339,7 +339,7 @@ class ArScene(): """ aruco_scene: ArUcoScene.ArUcoScene = field(default_factory=ArUcoScene.ArUcoScene) - aoi_scene: AOI3DScene.AOI3DScene = field(default_factory=AOI3DScene.AOI3DScene) + aoi_3d_scene: AOI3DScene.AOI3DScene = field(default_factory=AOI3DScene.AOI3DScene) screens: dict = field(default_factory=dict) aruco_axis: dict = field(default_factory=dict) aruco_aoi: dict = field(default_factory=dict) @@ -352,7 +352,7 @@ class ArScene(): self._environment = None # Preprocess orthogonal projection to speed up further aruco aoi processings - self.__orthogonal_projection_cache = self.aoi_scene.orthogonal_projection + self.__orthogonal_projection_cache = self.aoi_3d_scene.orthogonal_projection # Setup screens scene after screen creation for name, screen in self.screens.items(): @@ -366,7 +366,7 @@ class ArScene(): output = f'ArEnvironment:\n{self._environment.name}\n' output += f'ArUcoScene:\n{self.aruco_scene}\n' - output += f'AOIScene:\n{self.aoi_scene}\n' + output += f'AOI3DScene:\n{self.aoi_3d_scene}\n' return output @@ -446,29 +446,29 @@ class ArScene(): if visual_hfov > 0: # Transform scene into camera referential - aoi_scene_camera_ref = self.aoi_scene.transform(tvec, rvec) + aoi_3d_scene_camera_ref = self.aoi_3d_scene.transform(tvec, rvec) # Get aoi inside vision cone field cone_vision_height_cm = 200 # cm cone_vision_radius_cm = numpy.tan(numpy.deg2rad(visual_hfov / 2)) * cone_vision_height_cm - _, aoi_outside = aoi_scene_camera_ref.vision_cone(cone_vision_radius_cm, cone_vision_height_cm) + _, aoi_outside = aoi_3d_scene_camera_ref.vision_cone(cone_vision_radius_cm, cone_vision_height_cm) # Keep only aoi inside vision cone field - aoi_scene_copy = self.aoi_scene.copy(exclude=aoi_outside.keys()) + aoi_3d_scene_copy = self.aoi_3d_scene.copy(exclude=aoi_outside.keys()) else: - aoi_scene_copy = self.aoi_scene.copy() + aoi_3d_scene_copy = self.aoi_3d_scene.copy() - aoi_scene_projection = aoi_scene_copy.project(tvec, rvec, self._environment.aruco_detector.optic_parameters.K) + aoi_2d_scene = aoi_3d_scene_copy.project(tvec, rvec, self._environment.aruco_detector.optic_parameters.K) # Warn user when the projected scene is empty - if len(aoi_scene_projection) == 0: + if len(aoi_2d_scene) == 0: raise SceneProjectionFailed('AOI projection is empty') - return aoi_scene_projection + return aoi_2d_scene def build_aruco_aoi_scene(self, detected_markers) -> AOI2DScene.AOI2DScene: """ @@ -504,7 +504,7 @@ class ArScene(): aruco_aoi_scene[aruco_aoi_name] = AOIFeatures.AreaOfInterest(aoi_corners) # Then each inner aoi is projected from the current aruco aoi - for inner_aoi_name, inner_aoi in self.aoi_scene.items(): + for inner_aoi_name, inner_aoi in self.aoi_3d_scene.items(): if aruco_aoi_name != inner_aoi_name: @@ -567,17 +567,17 @@ class ArScreen(): self.heatmap.init() @classmethod - def from_scene(self, aoi_scene, aoi_name, size, background, gaze_movement_identifier, scan_path: bool = False, aoi_scan_path: bool = False, heatmap: bool = False) -> ArScreenType: + def from_scene(self, aoi_3d_scene, aoi_name, size, background, gaze_movement_identifier, scan_path: bool = False, aoi_scan_path: bool = False, heatmap: bool = False) -> ArScreenType: - aoi_scene_projection = aoi_scene.orthogonal_projection.reframe(aoi_name, size) + aoi_2d_scene = aoi_3d_scene.orthogonal_projection.reframe(aoi_name, size) return ArScreen(aoi_name, \ size, \ background, \ - aoi_scene_projection, \ + aoi_2d_scene, \ gaze_movement_identifier, \ GazeFeatures.ScanPath() if scan_path else None, \ - GazeFeatures.AOIScanPath(aoi_scene_projection.keys()) if aoi_scan_path else None, \ + GazeFeatures.AOIScanPath(aoi_2d_scene.keys()) if aoi_scan_path else None, \ AOIFeatures.Heatmap(size) if heatmap else None \ ) @@ -667,4 +667,4 @@ class ArScreen(): # Update heatmap if self.heatmap: - self.heatmap.update(gaze_position.value, sigma=0.05) + self.heatmap.update(self.__gaze_position.value, sigma=0.05) diff --git a/src/argaze/utils/demo_environment/aoi_scene.obj b/src/argaze/utils/demo_environment/aoi_3d_scene.obj index 8922e78..8922e78 100644 --- a/src/argaze/utils/demo_environment/aoi_scene.obj +++ b/src/argaze/utils/demo_environment/aoi_3d_scene.obj diff --git a/src/argaze/utils/demo_environment/demo_ar_features_setup.json b/src/argaze/utils/demo_environment/demo_ar_features_setup.json index 2044e35..3c1d512 100644 --- a/src/argaze/utils/demo_environment/demo_ar_features_setup.json +++ b/src/argaze/utils/demo_environment/demo_ar_features_setup.json @@ -15,15 +15,15 @@ "scenes": { "AR Scene Demo" : { "aruco_scene": "aruco_scene.obj", - "aoi_scene": "aoi_scene.obj", + "aoi_3d_scene": "aoi_3d_scene.obj", "screens": { "GrayRectangle": { - "size": [320, 240], + "size": [640, 480], "background": "screen_background.jpg", "gaze_movement_identifier": { "type": "DispersionThresholdIdentification", "parameters": { - "deviation_max_threshold": 10, + "deviation_max_threshold": 25, "duration_min_threshold": 200 } } diff --git a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json b/src/argaze/utils/demo_environment/demo_gaze_features_setup.json index 02e21bc..56d5c72 100644 --- a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json +++ b/src/argaze/utils/demo_environment/demo_gaze_features_setup.json @@ -2,7 +2,7 @@ "name": "AR Environment Demo", "scenes": { "AR Scene Demo" : { - "aoi_scene": "aoi_scene.obj", + "aoi_3d_scene": "aoi_3d_scene.obj", "screens": { "GrayRectangle": { "size": [1920, 1149], |