From 913b72b3176dfd4a613f9fb9de1c985fb13b8ad8 Mon Sep 17 00:00:00 2001 From: Théo de la Hogue Date: Tue, 19 Sep 2023 13:49:02 +0200 Subject: Working on aruco markers pipeline. --- .../optic_parameters_calibration.md | 133 +++++++++++++++++++++ .../aruco_markers_pipeline/aoi_description.md | 62 ++++++++++ .../aruco_camera_configuration_and_execution.md | 41 ++++--- .../aruco_markers_description.md | 64 +++++----- .../aruco_markers_pipeline/aruco_scene.md | 128 ++++++++++++++++---- .../aruco_markers_pipeline/introduction.md | 7 +- .../optic_parameters_calibration.md | 133 --------------------- 7 files changed, 357 insertions(+), 211 deletions(-) create mode 100644 docs/user_guide/aruco_markers_pipeline/advanced_topics/optic_parameters_calibration.md create mode 100644 docs/user_guide/aruco_markers_pipeline/aoi_description.md delete mode 100644 docs/user_guide/aruco_markers_pipeline/optic_parameters_calibration.md (limited to 'docs/user_guide/aruco_markers_pipeline') diff --git a/docs/user_guide/aruco_markers_pipeline/advanced_topics/optic_parameters_calibration.md b/docs/user_guide/aruco_markers_pipeline/advanced_topics/optic_parameters_calibration.md new file mode 100644 index 0000000..455d95a --- /dev/null +++ b/docs/user_guide/aruco_markers_pipeline/advanced_topics/optic_parameters_calibration.md @@ -0,0 +1,133 @@ +Calibrate optic parameters +========================== + +A camera device have to be calibrated to compensate its optical distorsion. + +![Optic parameters calibration](../../img/optic_calibration.png) + +## Print calibration board + +The first step to calibrate a camera is to create an [ArUcoBoard](../../argaze.md/#argaze.ArUcoMarkers.ArUcoBoard) like in the code below: + +``` python +from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoBoard + +# Create ArUco dictionary +aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_APRILTAG_16h5') + +# Create an ArUco board of 7 columns and 5 rows with 5 cm squares with 3cm ArUco markers inside +aruco_board = ArUcoBoard.ArUcoBoard(7, 5, 5, 3, aruco_dictionary) + +# Export ArUco board with 300 dpi resolution +aruco_board.save('./calibration_board.png', 300) +``` + +!!! note + There is **A3_DICT_APRILTAG_16h5_3cm_35cmx25cm.pdf** file located in *./src/argaze/ArUcoMarkers/utils/* folder ready to be printed on A3 paper sheet. + +Let's print the calibration board before to go further. + +## Capture board pictures + +Then, the calibration process needs to make many different captures of an [ArUcoBoard](../../argaze.md/#argaze.ArUcoMarkers.ArUcoBoard) through the camera and then, pass them to an [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector.ArUcoDetector) instance to detect board corners and store them as calibration data into an [ArUcoOpticCalibrator](../../argaze.md/#argaze.ArUcoMarkers.ArUcoOpticCalibrator) for final calibration process. + +![Calibration step](../../img/optic_calibration_step.png) + +The sample of code below illustrates how to: + +* load all required ArGaze objects, +* detect board corners into a Full HD camera video stream, +* store detected corners as calibration data then, +* once enough captures are made, process them to find optic parameters and, +* finally, save optic parameters into a JSON file. + +``` python +from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoOpticCalibrator, ArUcoBoard, ArUcoDetector + +# Create ArUco dictionary +aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_APRILTAG_16h5') + +# Create ArUco optic calibrator +aruco_optic_calibrator = ArUcoOpticCalibrator.ArUcoOpticCalibrator() + +# Create ArUco board of 7 columns and 5 rows with 5 cm squares with 3cm aruco markers inside +# Note: This board is the one expected during further board tracking +expected_aruco_board = ArUcoBoard.ArUcoBoard(7, 5, 5, 3, aruco_dictionary) + +# Create ArUco detector +aruco_detector = ArUcoDetector.ArUcoDetector(dictionary=aruco_dictionary, marker_size=3) + +# Assuming that live Full HD (1920x1080) video stream is enabled +... + +# Assuming there is a way to escape the while loop +... + + while video_stream.is_alive(): + + # Capture image from video stream + image = video_stream.read() + + # Detect all board corners in image + aruco_detector.detect_board(image, expected_aruco_board, expected_aruco_board.markers_number) + + # If all board corners are detected + if aruco_detector.board_corners_number == expected_aruco_board.corners_number: + + # Draw board corners to show that board tracking succeeded + aruco_detector.draw_board(image) + + # Append tracked board data for further calibration processing + aruco_optic_calibrator.store_calibration_data(aruco_detector.board_corners, aruco_detector.board_corners_identifier) + +# Start optic calibration processing for Full HD image resolution +print('Calibrating optic...') +optic_parameters = aruco_optic_calibrator.calibrate(aruco_board, dimensions=(1920, 1080)) + +if optic_parameters: + + # Export optic parameters + optic_parameters.to_json('./optic_parameters.json') + + print('Calibration succeeded: optic_parameters.json file exported.') + +else: + + print('Calibration failed.') +``` + +Below, an optic_parameters JSON file example: + +```json +{ + "rms": 0.6688921504088245, + "dimensions": [ + 1920, + 1080 + ], + "K": [ + [ + 1135.6524381415752, + 0.0, + 956.0685325355497 + ], + [ + 0.0, + 1135.9272506869524, + 560.059099810324 + ], + [ + 0.0, + 0.0, + 1.0 + ] + ], + "D": [ + 0.01655492265003404, + 0.1985524264972037, + 0.002129965902489484, + -0.0019528582922179365, + -0.5792910353639452 + ] +} +``` diff --git a/docs/user_guide/aruco_markers_pipeline/aoi_description.md b/docs/user_guide/aruco_markers_pipeline/aoi_description.md new file mode 100644 index 0000000..8c57cd1 --- /dev/null +++ b/docs/user_guide/aruco_markers_pipeline/aoi_description.md @@ -0,0 +1,62 @@ +Describe AOI scene +================== + +Once [ArUco markers are placed into a scene](aruco_markers_description.md), areas of interest need to be described into the same 3D referential. + +In the example scene, each screen is considered as an area of interest more the blue triangle area inside the top screen. + +![AOI description](../../img/aoi_description.png) + +All AOIs need to be described from same origin than markers in a [right-handed 3D axis](https://robotacademy.net.au/lesson/right-handed-3d-coordinate-frame/) where: + +* +X is pointing to the right, +* +Y is pointing to the top, +* +Z is pointing to the backward. + +!!! warning + All AOIs spatial values must be given in **centimeters**. + +### Edit OBJ file description + +OBJ file format could be exported from most 3D editors. + +``` obj +o YellowSquare +v 6.200003 -7.275252 25.246159 +v 31.200003 -7.275252 25.246159 +v 6.200003 1.275252 1.753843 +v 31.200003 1.275252 1.753843 +s off +f 1 2 4 3 +o GrayRectangle +v 2.500000 2.500000 -0.500000 +v 37.500000 2.500000 -0.500000 +v 2.500000 27.500000 -0.500000 +v 37.500000 27.500000 -0.500000 +s off +f 5 6 8 7 +o BlueTriangle +v 12.500002 7.500000 -0.500000 +v 27.500002 7.500000 -0.500000 +v 20.000002 22.500000 -0.500000 +s off +f 9 10 11 +``` + +Here are common OBJ file features needed to describe AOIs: + +* Object lines (starting with *o* key) indicate AOI name. +* Vertice lines (starting with *v* key) indicate AOI vertices. +* Face (starting with *f* key) link vertices together. + +### Edit JSON file description + +JSON file format allows to describe AOIs vertices. + +``` json +{ + "YellowSquare": [[6.2, -7.275252, 25.246159], [31.2, -7.275252, 25.246159], [6.2, 1.275252, 1.753843], [31.2, 1.275252, 1.753843]], + "GrayRectangle": [[2.5, 2.5, -0.5], [37.5, 2.5, -0.5], [2.5, 27.5, -0.5], [37.5, 27.5, -0.5]], + "BlueTriangle": [[12.5, 7.5, -0.5], [27.5, 7.5, -0.5], [20, 22.5, -0.5]] +} +``` diff --git a/docs/user_guide/aruco_markers_pipeline/aruco_camera_configuration_and_execution.md b/docs/user_guide/aruco_markers_pipeline/aruco_camera_configuration_and_execution.md index 7b30fd1..824e466 100644 --- a/docs/user_guide/aruco_markers_pipeline/aruco_camera_configuration_and_execution.md +++ b/docs/user_guide/aruco_markers_pipeline/aruco_camera_configuration_and_execution.md @@ -1,11 +1,11 @@ Configure and execute ArUcoCamera ================================= -Once [ArUco markers are placed into a scene](aruco_scene_creation.md) and [the camera optic have been calibrated](optic_parameters_calibration.md), everything is ready to setup an ArUco marker pipeline thanks to [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) class. +Once [ArUco markers are placed into a scene](aruco_markers_description.md) and [areas of interest are described](aoi_description.md), everything is ready to setup an ArUco marker pipeline thanks to [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) class. As it inherits from [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame), the [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) class benefits from all the services described in [gaze analysis pipeline section](./user_guide/gaze_analysis_pipeline/introduction.md). -![ArUco camera frame](../../img/aruco_camera_frame.png) +![ArUco camera markers detection](../../img/aruco_camera_markers_detection.png) ## Load JSON configuration file @@ -19,11 +19,11 @@ Here is a simple JSON [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCa "size": [1920, 1080], "aruco_detector": { "dictionary": "DICT_APRILTAG_16h5", - "marker_size": 5, - "optic_parameters": "optic_parameters.json", + "marker_size": 5 }, "image_parameters": { "background_weight": 1, + ... "draw_detected_markers": { "color": [0, 255, 0], "draw_axes": { @@ -55,36 +55,39 @@ The size of the [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) ### ArUco Detector -The first [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) pipeline step is to detect ArUco markers inside input image. +The first [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) pipeline step is to detect ArUco markers inside input image and estimate their poses. -The [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector) is in charge to detect ... +The [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector) is in charge to detect all markers from a specific dictionary with a given size in centimeters. !!! warning JSON *aruco_detector* entry is mandatory. -### Image parameters (inherited from ArFrame) +### Image parameters - *inherited from [ArFrame](../../argaze.md/#argaze.ArFeatures.ArFrame)* -... +The usual [ArFrame visualisation parameters](./user_guide/gaze_analysis_pipeline/visualisation.md) plus one additional *draw_detected_markers* field. ## Pipeline execution -Timestamped gaze positions have to be passed one by one to [ArFrame.look](../../argaze.md/#argaze.ArFeatures.ArFrame.look) method to execute the whole intanciated pipeline. +Pass each camera image to [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method to execute the whole intanciated pipeline. ```python -# Assuming that live Full HD (1920x1080) video stream is enabled +# Assuming that Full HD (1920x1080) video stream or file is opened ... # Assuming there is a way to escape the while loop -... - - while video_stream.is_alive(): +while ...: - # Capture image from video stream - image = video_stream.read() + # Capture image from video stream of file + image = video_capture.read() - # Detect ArUco markers in image - aruco_camera.watch(image) + # Detect ArUco markers and more... + aruco_camera.watch(image) - # Do something with ArUcoCamera frame image - ... + # Display ArUcoCamera frame image to check that ArUco markers are well detected and scene is well projected + ... aruco_camera.image() ``` + +!!! warning + ArUco markers pose estimation algorithm can lead to errors due to geometric ambiguities as explain in [this article](https://ieeexplore.ieee.org/document/1717461). To discard such ambiguous cases, markers should **not be parallel to camera plan**. + +At this point, the [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) only detects ArUco markers as no scene description is provided. \ No newline at end of file diff --git a/docs/user_guide/aruco_markers_pipeline/aruco_markers_description.md b/docs/user_guide/aruco_markers_pipeline/aruco_markers_description.md index 061fb19..1c13013 100644 --- a/docs/user_guide/aruco_markers_pipeline/aruco_markers_description.md +++ b/docs/user_guide/aruco_markers_pipeline/aruco_markers_description.md @@ -1,7 +1,11 @@ -Setup ArUco markers into a scene -================================ +Set up ArUco markers +==================== -First of all, ArUco markers needs to be printed and placed into the scene. +First of all, ArUco markers needs to be printed and placed into the scene. + +Here is an example scene where markers are surrounding a multi-screen workspace with a triangle area inside one of them. + +![Scene](../../img/scene.png) ## Print ArUco markers from a ArUco dictionary @@ -40,7 +44,7 @@ Let's print some of them before to go further. Once [ArUcoMarkers](../../argaze.md/#argaze.ArUcoMarkers.ArUcoMarker) pictures are placed into a scene it is possible to describe their 3D places into a file. -![ArUco scene](../../img/aruco_scene.png) +![ArUco markers description](../../img/aruco_markers_description.png) Where ever the origin point is, all markers places need to be described in a [right-handed 3D axis](https://robotacademy.net.au/lesson/right-handed-3d-coordinate-frame/) where: @@ -57,37 +61,29 @@ OBJ file format could be exported from most 3D editors. ``` obj o DICT_APRILTAG_16h5#0_Marker -v -5.000000 14.960000 0.000000 -v 0.000000 14.960000 0.000000 -v -5.000000 19.959999 0.000000 -v 0.000000 19.959999 0.000000 +v 0.000000 0.000000 0.000000 +v 5.000000 0.000000 0.000000 +v 0.000000 5.000000 0.000000 +v 5.000000 5.000000 0.000000 vn 0.0000 0.0000 1.0000 s off f 1//1 2//1 4//1 3//1 o DICT_APRILTAG_16h5#1_Marker -v 25.000000 14.960000 0.000000 -v 30.000000 14.960000 0.000000 -v 25.000000 19.959999 0.000000 -v 30.000000 19.959999 0.000000 -vn 0.0000 0.0000 1.0000 +v -1.767767 23.000002 3.767767 +v 1.767767 23.000002 0.232233 +v -1.767767 28.000002 3.767767 +v 1.767767 28.000002 0.232233 +vn 0.7071 0.0000 0.7071 s off f 5//2 6//2 8//2 7//2 o DICT_APRILTAG_16h5#2_Marker -v -5.000000 -5.000000 0.000000 -v 0.000000 -5.000000 0.000000 -v -5.000000 0.000000 0.000000 -v 0.000000 0.000000 0.000000 -vn 0.0000 0.0000 1.0000 +v 33.000000 -1.767767 4.767767 +v 38.000000 -1.767767 4.767767 +v 33.000000 1.767767 1.232233 +v 38.000000 1.767767 1.232233 +vn 0.0000 0.7071 0.7071 s off f 9//3 10//3 12//3 11//3 -o DICT_APRILTAG_16h5#3_Marker -v 25.000000 -5.000000 0.000000 -v 30.000000 -5.000000 0.000000 -v 25.000000 0.000000 0.000000 -v 30.000000 0.000000 0.000000 -vn 0.0000 0.0000 1.0000 -s off -f 13//4 14//4 16//4 15//4 ``` Here are common OBJ file features needed to describe ArUco markers places: @@ -110,20 +106,16 @@ JSON file format allows to describe markers places using translation and euler a "marker_size": 5, "places": { "0": { - "translation": [-2.5, 17.5, 0], - "rotation": [0.0, 0.0, 0.0] + "translation": [2.5, 2.5, 0], + "rotation": [0, 0, 0] }, "1": { - "translation": [27.5, 17.5, 0], - "rotation": [0.0, 0.0, 0.0] + "translation": [0, 25.5, 2], + "rotation": [0, 45, 0] }, "2": { - "translation": [-2.5, -2.5, 0], - "rotation": [0.0, 0.0, 0.0] - }, - "3": { - "translation": [27.5, -2.5, 0], - "rotation": [0.0, 0.0, 0.0] + "translation": [35.5, 0, 3], + "rotation": [-45, 0, 0] } } } diff --git a/docs/user_guide/aruco_markers_pipeline/aruco_scene.md b/docs/user_guide/aruco_markers_pipeline/aruco_scene.md index 2f37fa7..b47fefb 100644 --- a/docs/user_guide/aruco_markers_pipeline/aruco_scene.md +++ b/docs/user_guide/aruco_markers_pipeline/aruco_scene.md @@ -1,9 +1,7 @@ Add an ArUcoScene ================= -An [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) class defines ... - -Besides, the [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) class projects [ArUcoScenes](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene)'s layers into its own layers thanks to ArUco markers pose estimations made by its [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector). +An [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) class defines a space where [areas of interest description](aoi_description.md) is mapped onto [ArUco markers description](aruco_markers_description.md). ![ArUco scene](../../img/aruco_scene.png) @@ -11,7 +9,7 @@ Besides, the [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) cla An [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) instance can contains multiples [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene). -Here is an extract from the JSON ArUcoCamera configuration file with a sample where one scene is added: +Here is an extract from the JSON [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) configuration file with a sample where one scene is added: ```json { @@ -20,16 +18,25 @@ Here is an extract from the JSON ArUcoCamera configuration file with a sample wh ... "scenes": { "MyScene" : { - "aruco_markers_group": "aruco_description.json" - }, - "layers": { - "MyLayer": { - "aoi_scene": "aoi_description.obj" - } + "aruco_markers_group": { + "dictionary": "DICT_APRILTAG_16h5", + "marker_size": 5, + "places": { + "0": { + "translation": [2.5, 2.5, 0], + "rotation": [0, 0, 0] + }, + "1": { + "translation": [0, 25.5, 2], + "rotation": [0, 45, 0] + }, + "2": { + "translation": [35.5, 0, 3], + "rotation": [-45, 0, 0] + } + } + } } - }, - "layers": { - "MyLayer": {} } ... } @@ -37,20 +44,101 @@ Here is an extract from the JSON ArUcoCamera configuration file with a sample wh Now, let's understand the meaning of each JSON entry. -### "MyLayer" +### "MyScene" The name of the [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene). Basically useful for visualisation purpose. ### ArUco markers group -... +The 3D places of ArUco markers into the scene as defined at [ArUco markers description chapter](aruco_markers_description.md). Thanks to this description, it is possible to estimate the pose of [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) in [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) frame. + +!!! note + + [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) pose estimation is done when calling the [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method. + +## Add ArLayer to ArUcoScene to load AOI + +The [ArLayer](../../argaze.md/#argaze.ArFeatures.ArLayer) class allows to load areas of interest description. + +An [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) instance can contains multiples [ArLayers](../../argaze.md/#argaze.ArFeatures.ArLayer). + +Here is the previous extract where one layer is added to the [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene): + +```json +{ + "name": "My FullHD camera", + "size": [1920, 1080], + ... + "scenes": { + "MyScene" : { + "aruco_markers_group": { + ... + }, + "layers": { + "MyLayer": { + "aoi_scene": { + "YellowSquare": [[6.2, -7.275252, 25.246159], [31.2, -7.275252, 25.246159], [6.2, 1.275252, 1.753843], [31.2, 1.275252, 1.753843]], + "GrayRectangle": [[2.5, 2.5, -0.5], [37.5, 2.5, -0.5], [2.5, 27.5, -0.5], [37.5, 27.5, -0.5]], + "BlueTriangle": [[12.5, 7.5, -0.5], [27.5, 7.5, -0.5], [20, 22.5, -0.5]] + } + } + } + } + } + ... +} +``` + +Now, let's understand the meaning of each JSON entry. + +### "MyLayer" + +The name of the [ArLayer](../../argaze.md/#argaze.ArFeatures.ArLayer). Basically useful for visualisation purpose. + +### AOI Scene + +The [AOIScene](../../argaze.md/#argaze.AreaOfInterest.AOIFeatures.AOIScene) defines a set of 3D [AreaOfInterest](../../argaze.md/#argaze.AreaOfInterest.AOIFeatures.AreaOfInterest) registered by name. + +## Add ArLayer to ArUcoCamera to project AOI + +Here is the previous extract where one layer is added to the [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera): + +```json +{ + "name": "My FullHD camera", + "size": [1920, 1080], + ... + "scenes": { + "MyScene" : { + "aruco_markers_group": { + ... + }, + "layers": { + "MyLayer": { + "aoi_scene": { + ... + } + } + } + } + }, + "layers": { + "MyLayer": {} + } + ... +} +``` + +Now, let's understand the meaning of each JSON entry. + +### "MyLayer" -### Layers +The name of the [ArLayer](../../argaze.md/#argaze.ArFeatures.ArLayer). Basically useful for visualisation purpose. -... +!!! warning -### AOI scene + An [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) layer is projected into [an ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) layer, **provided they have the same name**. -The [AOIScene](../../argaze.md/#argaze.AreaOfInterest.AOIFeatures.AOIScene) defines a set of 2D [AreaOfInterest](../../argaze.md/#argaze.AreaOfInterest.AOIFeatures.AreaOfInterest) registered by name. +!!! note -![AOI Scene](../../img/ar_layer_aoi_scene.png) + [ArUcoScene](../../argaze.md/#argaze.ArUcoMarkers.ArUcoScene) layers are projected into their dedicated [ArUcoCamera](../../argaze.md/#argaze.ArUcoMarkers.ArUcoCamera) layers when calling the [ArUcoCamera.watch](../../argaze.md/#argaze.ArFeatures.ArCamera.watch) method. diff --git a/docs/user_guide/aruco_markers_pipeline/introduction.md b/docs/user_guide/aruco_markers_pipeline/introduction.md index 77b93a2..a10ca28 100644 --- a/docs/user_guide/aruco_markers_pipeline/introduction.md +++ b/docs/user_guide/aruco_markers_pipeline/introduction.md @@ -16,12 +16,13 @@ First, let's look at the schema below: it gives an overview of the main notions To build your own ArUco markers pipeline, you need to know: * [How to setup ArUco markers into a scene](aruco_markers_description.md), -* [How to calibrate optic parameters](optic_parameters_calibration.md), +* [How to describe areas of interest in the same scene](aoi_description.md), * [How to deal with an ArUcoCamera instance](aruco_camera_configuration_and_execution.md), -* [How to add ArLayer instance](ar_layer.md), -* [How to visualize ArUcoCamera and ArLayers](visualisation.md) +* [How to add ArUcoScene instance](aruco_scene.md), +* [How to visualize ArUcoCamera and ArUcoScenes](visualisation.md) More advanced features are also explained like: * [How to script ArUco markers pipeline](advanced_topics/scripting.md) +* [How to calibrate optic parameters](optic_parameters_calibration.md) * [How to improve ArUco markers detection](advanced_topics/aruco_detector_configuration.md) diff --git a/docs/user_guide/aruco_markers_pipeline/optic_parameters_calibration.md b/docs/user_guide/aruco_markers_pipeline/optic_parameters_calibration.md deleted file mode 100644 index 455d95a..0000000 --- a/docs/user_guide/aruco_markers_pipeline/optic_parameters_calibration.md +++ /dev/null @@ -1,133 +0,0 @@ -Calibrate optic parameters -========================== - -A camera device have to be calibrated to compensate its optical distorsion. - -![Optic parameters calibration](../../img/optic_calibration.png) - -## Print calibration board - -The first step to calibrate a camera is to create an [ArUcoBoard](../../argaze.md/#argaze.ArUcoMarkers.ArUcoBoard) like in the code below: - -``` python -from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoBoard - -# Create ArUco dictionary -aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_APRILTAG_16h5') - -# Create an ArUco board of 7 columns and 5 rows with 5 cm squares with 3cm ArUco markers inside -aruco_board = ArUcoBoard.ArUcoBoard(7, 5, 5, 3, aruco_dictionary) - -# Export ArUco board with 300 dpi resolution -aruco_board.save('./calibration_board.png', 300) -``` - -!!! note - There is **A3_DICT_APRILTAG_16h5_3cm_35cmx25cm.pdf** file located in *./src/argaze/ArUcoMarkers/utils/* folder ready to be printed on A3 paper sheet. - -Let's print the calibration board before to go further. - -## Capture board pictures - -Then, the calibration process needs to make many different captures of an [ArUcoBoard](../../argaze.md/#argaze.ArUcoMarkers.ArUcoBoard) through the camera and then, pass them to an [ArUcoDetector](../../argaze.md/#argaze.ArUcoMarkers.ArUcoDetector.ArUcoDetector) instance to detect board corners and store them as calibration data into an [ArUcoOpticCalibrator](../../argaze.md/#argaze.ArUcoMarkers.ArUcoOpticCalibrator) for final calibration process. - -![Calibration step](../../img/optic_calibration_step.png) - -The sample of code below illustrates how to: - -* load all required ArGaze objects, -* detect board corners into a Full HD camera video stream, -* store detected corners as calibration data then, -* once enough captures are made, process them to find optic parameters and, -* finally, save optic parameters into a JSON file. - -``` python -from argaze.ArUcoMarkers import ArUcoMarkersDictionary, ArUcoOpticCalibrator, ArUcoBoard, ArUcoDetector - -# Create ArUco dictionary -aruco_dictionary = ArUcoMarkersDictionary.ArUcoMarkersDictionary('DICT_APRILTAG_16h5') - -# Create ArUco optic calibrator -aruco_optic_calibrator = ArUcoOpticCalibrator.ArUcoOpticCalibrator() - -# Create ArUco board of 7 columns and 5 rows with 5 cm squares with 3cm aruco markers inside -# Note: This board is the one expected during further board tracking -expected_aruco_board = ArUcoBoard.ArUcoBoard(7, 5, 5, 3, aruco_dictionary) - -# Create ArUco detector -aruco_detector = ArUcoDetector.ArUcoDetector(dictionary=aruco_dictionary, marker_size=3) - -# Assuming that live Full HD (1920x1080) video stream is enabled -... - -# Assuming there is a way to escape the while loop -... - - while video_stream.is_alive(): - - # Capture image from video stream - image = video_stream.read() - - # Detect all board corners in image - aruco_detector.detect_board(image, expected_aruco_board, expected_aruco_board.markers_number) - - # If all board corners are detected - if aruco_detector.board_corners_number == expected_aruco_board.corners_number: - - # Draw board corners to show that board tracking succeeded - aruco_detector.draw_board(image) - - # Append tracked board data for further calibration processing - aruco_optic_calibrator.store_calibration_data(aruco_detector.board_corners, aruco_detector.board_corners_identifier) - -# Start optic calibration processing for Full HD image resolution -print('Calibrating optic...') -optic_parameters = aruco_optic_calibrator.calibrate(aruco_board, dimensions=(1920, 1080)) - -if optic_parameters: - - # Export optic parameters - optic_parameters.to_json('./optic_parameters.json') - - print('Calibration succeeded: optic_parameters.json file exported.') - -else: - - print('Calibration failed.') -``` - -Below, an optic_parameters JSON file example: - -```json -{ - "rms": 0.6688921504088245, - "dimensions": [ - 1920, - 1080 - ], - "K": [ - [ - 1135.6524381415752, - 0.0, - 956.0685325355497 - ], - [ - 0.0, - 1135.9272506869524, - 560.059099810324 - ], - [ - 0.0, - 0.0, - 1.0 - ] - ], - "D": [ - 0.01655492265003404, - 0.1985524264972037, - 0.002129965902489484, - -0.0019528582922179365, - -0.5792910353639452 - ] -} -``` -- cgit v1.1