#!/usr/bin/env python from typing import TypeVar from dataclasses import dataclass, field import json import os from argaze.ArUcoMarkers import * from argaze.AreaOfInterest import * import numpy ArSceneType = TypeVar('ArScene', bound="ArScene") # Type definition for type annotation convenience @dataclass class ArScene(): """Define an Augmented Reality environnement thanks to ArUco markers and project it onto incoming frames.""" name: str """Project name.""" aruco_dictionary: ArUcoMarkersDictionary.ArUcoMarkersDictionary = field(init=False, default_factory=ArUcoMarkersDictionary.ArUcoMarkersDictionary) """ArUco markers dictionary.""" aruco_marker_size: float = field(init=False) """Size of ArUco markers in centimeter.""" aruco_camera: ArUcoCamera.ArUcoCamera = field(init=False, default_factory=ArUcoCamera.ArUcoCamera) """ArUco camera ...""" aruco_tracker: ArUcoTracker.ArUcoTracker = field(init=False, default_factory=ArUcoTracker.ArUcoTracker) """ArUco tracker ...""" aruco_scene: ArUcoScene.ArUcoScene = field(init=False, default_factory=ArUcoScene.ArUcoScene) """ArUco scene ...""" aoi_scene: AOI3DScene.AOI3DScene = field(init=False, default_factory=AOI3DScene.AOI3DScene) """AOI 3D scene ...""" def __init__(self, **kwargs): self.aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary(kwargs.pop('aruco_dictionary')) self.aruco_marker_size = kwargs.pop('aruco_marker_size') self.aruco_camera = ArUcoCamera.ArUcoCamera(**kwargs.pop('aruco_camera')) self.aruco_tracker = ArUcoTracker.ArUcoTracker(self.aruco_dictionary, self.aruco_marker_size, self.aruco_camera, **kwargs.pop('aruco_tracker')) # Check aruco_scene.places value type aruco_scene_places_value = kwargs['aruco_scene']['places'] # str: relative path to .obj file if type(aruco_scene_places_value) == str: kwargs['aruco_scene']['places'] = os.path.join(self.__current_directory, aruco_scene_places_value) self.aruco_scene = ArUcoScene.ArUcoScene(self.aruco_dictionary, self.aruco_marker_size, **kwargs.pop('aruco_scene')) # Check aoi_scene value type aoi_scene_value = kwargs.pop('aoi_scene') # str: relative path to .obj file if type(aoi_scene_value) == str: obj_filepath = os.path.join(self.__current_directory, aoi_scene_value) self.aoi_scene = AOI3DScene.AOI3DScene.from_obj(obj_filepath) # dict: all AOI else: self.aoi_scene = AOI3DScene.AOI3DScene(aoi_scene_value) self.__dict__.update(kwargs) @classmethod def from_json(self, json_filepath: str) -> ArSceneType: """Load ArGaze project from .json file.""" with open(json_filepath) as configuration_file: # Store current directory to allow relative path loading self.__current_directory = os.path.dirname(os.path.abspath(json_filepath)) return ArScene(**json.load(configuration_file)) def __str__(self) -> str: """String display""" output = '' output += f'\nArUcoCamera: {self.aruco_camera}' output += f'\n\nArUcoTracker tracking data: {self.aruco_tracker.tracking_data}' output += f'\n\nArUcoScene: {self.aruco_scene}' output += f'\n\nAOIScene: {self.aoi_scene}' return output def project(self, frame, consistent_markers_number:int = 1, visual_hfov=0): """Project ArScene into frame.""" # Track markers with pose estimation and draw them self.aruco_tracker.track(frame) # When no marker is detected, no AOI scene projection can't be done if len(self.aruco_tracker.tracked_markers) == 0: raise UserWarning('No marker detected') # Estimate set pose from tracked markers tvec, rvec, success, consistent_markers, unconsistencies = self.aruco_scene.estimate_pose(self.aruco_tracker.tracked_markers) # When pose estimation fails, ignore AOI scene projection if not success: # DEBUG: print unconsistencies distances or angles for key, value in unconsistencies.items(): print(f'Unconsistent {key}: {value}') raise UserWarning('Pose estimation fails') # Consider pose estimation only if theer is a given number of consistent markers at least elif len(consistent_markers) >= consistent_markers_number: # Clip AOI out of the visual horizontal field of view (optional) if visual_hfov > 0: # Transform scene into camera referential aoi_scene_camera_ref = self.aoi_scene.transform(tvec, rvec) # Get aoi inside vision cone field cone_vision_height_cm = 200 # cm cone_vision_radius_cm = numpy.tan(numpy.deg2rad(visual_hfov / 2)) * cone_vision_height_cm _, aoi_outside = aoi_scene_camera_ref.vision_cone(cone_vision_radius_cm, cone_vision_height_cm) # Keep only aoi inside vision cone field aoi_scene_copy = self.aoi_scene.copy(exclude=aoi_outside.keys()) else: aoi_scene_copy = self.aoi_scene.copy() # DON'T APPLY CAMERA DISTORSION : it projects points which are far from the frame into it # This hack isn't realistic but as the gaze will mainly focus on centered AOI, where the distorsion is low, it is acceptable. aoi_scene_projection = aoi_scene_copy.project(tvec, rvec, self.aruco_camera.K) # Warn user when the merged scene is empty if len(aoi_scene_projection.keys()) == 0: raise UserWarning('AOI projection is empty') return aoi_scene_projection