diff options
author | Théo de la Hogue | 2024-09-10 17:03:14 +0200 |
---|---|---|
committer | Théo de la Hogue | 2024-09-10 17:03:14 +0200 |
commit | 53d31973c4ad5f989c750fb41967b89aeff5814b (patch) | |
tree | 3c02a96abfd5ce83f9d11d4ee938dcc7f687b463 | |
parent | baec212c1527259dc3bd0e0ea961e2b8154d4825 (diff) | |
parent | 616f874c393a7500e9032c6b119ae1c350893ad1 (diff) | |
download | argaze-53d31973c4ad5f989c750fb41967b89aeff5814b.zip argaze-53d31973c4ad5f989c750fb41967b89aeff5814b.tar.gz argaze-53d31973c4ad5f989c750fb41967b89aeff5814b.tar.bz2 argaze-53d31973c4ad5f989c750fb41967b89aeff5814b.tar.xz |
Merge branch 'main' of ssh://git.recherche.enac.fr/interne-ihm-aero/eye-tracking/argaze
5 files changed, 20 insertions, 9 deletions
diff --git a/docs/use_cases/air_controller_gaze_study/context.md b/docs/use_cases/air_controller_gaze_study/context.md index 5b13ca5..8bb4ef8 100644 --- a/docs/use_cases/air_controller_gaze_study/context.md +++ b/docs/use_cases/air_controller_gaze_study/context.md @@ -19,4 +19,4 @@ While *segment* entry is specific to the [TobiiProGlasses2.SegmentPlayback](../. } ``` -The [post_processing_pipeline.json](pipeline.md) file mentioned aboved is described in the next chapter. +The [post_processing_pipeline.json](pipeline.md) file mentioned above is described in the next chapter. diff --git a/docs/use_cases/air_controller_gaze_study/pipeline.md b/docs/use_cases/air_controller_gaze_study/pipeline.md index 5b90769..69fdd2c 100644 --- a/docs/use_cases/air_controller_gaze_study/pipeline.md +++ b/docs/use_cases/air_controller_gaze_study/pipeline.md @@ -197,7 +197,7 @@ For this use case we need to detect ArUco markers to enable gaze mapping: **ArGa } ``` -All the files mentioned aboved are described below. +All the files mentioned above are described below. The *ScanPathAnalysisRecorder* and *AOIScanPathAnalysisRecorder* observers objects are defined into the [observers.py](observers.md) file that is described in the next chapter. diff --git a/docs/use_cases/pilot_gaze_monitoring/context.md b/docs/use_cases/pilot_gaze_monitoring/context.md index 477276d..8839cb6 100644 --- a/docs/use_cases/pilot_gaze_monitoring/context.md +++ b/docs/use_cases/pilot_gaze_monitoring/context.md @@ -36,6 +36,6 @@ While *address*, *project*, *participant* and *configuration* entries are specif } ``` -The [live_processing_pipeline.json](pipeline.md) file mentioned aboved is described in the next chapter. +The [live_processing_pipeline.json](pipeline.md) file mentioned above is described in the next chapter. The *IvyBus* observer object is defined into the [observers.py](observers.md) file that is described in a next chapter.
\ No newline at end of file diff --git a/docs/use_cases/pilot_gaze_monitoring/pipeline.md b/docs/use_cases/pilot_gaze_monitoring/pipeline.md index f7c555f..65fccc3 100644 --- a/docs/use_cases/pilot_gaze_monitoring/pipeline.md +++ b/docs/use_cases/pilot_gaze_monitoring/pipeline.md @@ -122,7 +122,7 @@ For this use case we need to detect ArUco markers to enable gaze mapping: **ArGa } ``` -All the files mentioned aboved are described below. +All the files mentioned above are described below. The *ArUcoCameraLogger* observer object is defined into the [observers.py](observers.md) file that is described in the next chapter. diff --git a/src/argaze/utils/contexts/TobiiProGlasses2.py b/src/argaze/utils/contexts/TobiiProGlasses2.py index 081e225..21843a0 100644 --- a/src/argaze/utils/contexts/TobiiProGlasses2.py +++ b/src/argaze/utils/contexts/TobiiProGlasses2.py @@ -599,6 +599,12 @@ class LiveStream(ArFeatures.DataCaptureContext): logging.debug('> starting battery status thread...') self.__check_battery_thread.start() + # Init calibration status + self.__calibration_status = 'uncalibrated' + + # Init recording status + self.__recording_status = 'stopped' + return self @DataFeatures.PipelineStepExit @@ -930,7 +936,7 @@ class LiveStream(ArFeatures.DataCaptureContext): """Handle whole Tobii glasses calibration process.""" # Reset calibration - self.__calibration_status = None + self.__calibration_status = 'uncalibrated' self.__calibration_id = None # Calibration have to be done for a project and a participant @@ -988,7 +994,7 @@ class LiveStream(ArFeatures.DataCaptureContext): """Create a new recording on the Tobii interface's SD Card.""" # Reset recording - self.__recording_status = None + self.__recording_status = 'stopped' self.__recording_id = None # Recording have to be done for a participant @@ -1032,6 +1038,11 @@ class LiveStream(ArFeatures.DataCaptureContext): self.__recording_status = self.__wait_for_recording_status(self.__recording_id, ['paused']) def get_recording_status(self) -> str: + """Get recording status. + + Returns: + status: 'init', 'starting', 'recording', 'pausing', 'paused', 'stopping', 'stopped', 'done', 'stale' or 'failed' string + """ return self.__recording_status @@ -1076,7 +1087,7 @@ class LiveStream(ArFeatures.DataCaptureContext): # Display calibration status calibration_panel = ((int(width/2), 0), (width, 50)) - if self.__calibration_status is None: + if self.__calibration_status == 'uncalibrated': cv2.rectangle(image, calibration_panel[0], calibration_panel[1], (0, 0, 0), -1) cv2.putText(image, 'Calibration required', (calibration_panel[0][0]+20, calibration_panel[0][1]+40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA) @@ -1089,7 +1100,7 @@ class LiveStream(ArFeatures.DataCaptureContext): elif self.__calibration_status != 'calibrated': cv2.rectangle(image, calibration_panel[0], calibration_panel[1], (0, 0, 127), -1) - cv2.putText(image, f'Calibration {calibration_status}', (calibration_panel[0][0]+20, calibration_panel[0][1]+40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) + cv2.putText(image, f'Calibration {self.__calibration_status}', (calibration_panel[0][0]+20, calibration_panel[0][1]+40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA) else: @@ -1114,7 +1125,7 @@ class LiveStream(ArFeatures.DataCaptureContext): cv2.putText(image, f'Battery {self.__battery_level}%', (width - 220, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, text_color, 1, cv2.LINE_AA) # Display recording status - if self.__recording_status is None: + if self.__recording_status == 'stopped': circle_color = (0, 0, 0) |