aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThéo de la Hogue2023-06-28 21:17:58 +0200
committerThéo de la Hogue2023-06-28 21:17:58 +0200
commitdcb7739179acbe7844984b0a65dc46032355e0c8 (patch)
tree5949c261711d287f48917e896763355d5125c356
parentefbca53f612327642a3f4c2bf72ebfdd4da5d5a6 (diff)
downloadargaze-dcb7739179acbe7844984b0a65dc46032355e0c8.zip
argaze-dcb7739179acbe7844984b0a65dc46032355e0c8.tar.gz
argaze-dcb7739179acbe7844984b0a65dc46032355e0c8.tar.bz2
argaze-dcb7739179acbe7844984b0a65dc46032355e0c8.tar.xz
Loading scan path and aoi scan path analyszers from JSON file.
-rw-r--r--src/argaze/ArFeatures.py144
-rw-r--r--src/argaze/GazeAnalysis/Entropy.py40
-rw-r--r--src/argaze/GazeAnalysis/ExploitExploreRatio.py28
-rw-r--r--src/argaze/GazeAnalysis/KCoefficient.py21
-rw-r--r--src/argaze/GazeAnalysis/LempelZivComplexity.py12
-rw-r--r--src/argaze/GazeAnalysis/NGram.py20
-rw-r--r--src/argaze/GazeAnalysis/NearestNeighborIndex.py19
-rw-r--r--src/argaze/GazeAnalysis/TransitionMatrix.py22
-rw-r--r--src/argaze/GazeFeatures.py2
-rw-r--r--src/argaze/utils/demo_environment/demo_ar_features_setup.json7
-rw-r--r--src/argaze/utils/demo_environment/demo_gaze_features_setup.json23
-rw-r--r--src/argaze/utils/demo_gaze_features_run.py513
12 files changed, 422 insertions, 429 deletions
diff --git a/src/argaze/ArFeatures.py b/src/argaze/ArFeatures.py
index 86feb48..7f1618c 100644
--- a/src/argaze/ArFeatures.py
+++ b/src/argaze/ArFeatures.py
@@ -12,6 +12,7 @@ from dataclasses import dataclass, field
import json
import os
import importlib
+from inspect import getmembers
from argaze import DataStructures, GazeFeatures
from argaze.ArUcoMarkers import *
@@ -30,6 +31,15 @@ ArSceneType = TypeVar('ArScene', bound="ArScene")
ArScreenType = TypeVar('ArScreen', bound="ArScreen")
# Type definition for type annotation convenience
+class EnvironmentJSONLoadingFailed(Exception):
+ """
+ Exception raised by ArEnvironment when JSON loading fails.
+ """
+
+ def __init__(self, message):
+
+ super().__init__(message)
+
@dataclass
class ArEnvironment():
"""
@@ -164,7 +174,7 @@ class ArEnvironment():
new_screen_background = cv2.imread(os.path.join(working_directory, new_screen_background_value))
new_screen_background = cv2.resize(new_screen_background, dsize=(new_screen_size[0], new_screen_size[1]), interpolation=cv2.INTER_CUBIC)
- except:
+ except KeyError:
new_screen_background = numpy.zeros((new_screen_size[1], new_screen_size[0], 3)).astype(numpy.uint8)
@@ -173,18 +183,105 @@ class ArEnvironment():
gaze_movement_identifier_value = screen_data.pop('gaze_movement_identifier')
- gaze_movement_identifier_type = gaze_movement_identifier_value['type']
- gaze_movement_identifier_parameters = gaze_movement_identifier_value['parameters']
+ gaze_movement_identifier_type, gaze_movement_identifier_parameters = gaze_movement_identifier_value.popitem()
gaze_movement_identifier_module = importlib.import_module(f'argaze.GazeAnalysis.{gaze_movement_identifier_type}')
gaze_movement_identifier = gaze_movement_identifier_module.GazeMovementIdentifier(**gaze_movement_identifier_parameters)
- except:
+ except KeyError:
gaze_movement_identifier = None
+ # Load scan path analyzers
+ new_scan_path_analyzers = {}
+
+ try:
+
+ new_scan_path_analyzers_value = screen_data.pop('scan_path_analyzers')
+
+ for scan_path_analyzer_type, scan_path_analyzer_parameters in new_scan_path_analyzers_value.items():
+
+ scan_path_analyzer_module = importlib.import_module(f'argaze.GazeAnalysis.{scan_path_analyzer_type}')
+
+ # Check scan path analyzer parameters type
+ members = getmembers(scan_path_analyzer_module.ScanPathAnalyzer)
+
+ for member in members:
+
+ if '__annotations__' in member:
+
+ for parameter, parameter_type in member[1].items():
+
+ # Check if parameter is part of argaze.GazeAnalysis module
+ parameter_module_path = parameter_type.__module__.split('.')
+
+ if len(parameter_module_path) == 3:
+
+ if parameter_module_path[0] == 'argaze' and parameter_module_path[1] == 'GazeAnalysis':
+
+ # Try get existing analyzer instance to append as parameter
+ try:
+
+ scan_path_analyzer_parameters[parameter] = new_scan_path_analyzers[parameter_module_path[2]]
+
+ except KeyError:
+
+ raise EnvironmentJSONLoadingFailed(f'{scan_path_analyzer_type} scan path analyzer loading fails because {parameter_module_path[2]} scan path analyzer is missing.')
+
+ scan_path_analyzer = scan_path_analyzer_module.ScanPathAnalyzer(**scan_path_analyzer_parameters)
+
+ new_scan_path_analyzers[scan_path_analyzer_type] = scan_path_analyzer
+
+ except KeyError:
+
+ pass
+
+ # Load AOI scan path analyzers
+ new_aoi_scan_path_analyzers = {}
+
+ try:
+
+ new_aoi_scan_path_analyzers_value = screen_data.pop('aoi_scan_path_analyzers')
+
+ for aoi_scan_path_analyzer_type, aoi_scan_path_analyzer_parameters in new_aoi_scan_path_analyzers_value.items():
+
+ aoi_scan_path_analyzer_module = importlib.import_module(f'argaze.GazeAnalysis.{aoi_scan_path_analyzer_type}')
+
+ # Check aoi scan path analyzer parameters type
+ members = getmembers(aoi_scan_path_analyzer_module.AOIScanPathAnalyzer)
+
+ for member in members:
+
+ if '__annotations__' in member:
+
+ for parameter, parameter_type in member[1].items():
+
+ # Check if parameter is part of argaze.GazeAnalysis module
+ parameter_module_path = parameter_type.__module__.split('.')
+
+ if len(parameter_module_path) == 3:
+
+ if parameter_module_path[0] == 'argaze' and parameter_module_path[1] == 'GazeAnalysis':
+
+ # Try get existing analyzer instance to append as parameter
+ try:
+
+ aoi_scan_path_analyzer_parameters[parameter] = new_aoi_scan_path_analyzers[parameter_module_path[2]]
+
+ except KeyError:
+
+ raise EnvironmentJSONLoadingFailed(f'{aoi_scan_path_analyzer_type} aoi scan path analyzer loading fails because {parameter_module_path[2]} aoi scan path analyzer is missing.')
+
+ aoi_scan_path_analyzer = aoi_scan_path_analyzer_module.AOIScanPathAnalyzer(**aoi_scan_path_analyzer_parameters)
+
+ new_aoi_scan_path_analyzers[aoi_scan_path_analyzer_type] = aoi_scan_path_analyzer
+
+ except KeyError:
+
+ pass
+
# Append new screen
- new_screens[screen_name] = ArScreen.from_scene(new_aoi_3d_scene, screen_name, new_screen_size, new_screen_background, gaze_movement_identifier, **screen_data)
+ new_screens[screen_name] = ArScreen.from_scene(new_aoi_3d_scene, screen_name, new_screen_size, new_screen_background, gaze_movement_identifier, new_scan_path_analyzers, new_aoi_scan_path_analyzers, **screen_data)
# Append new scene
new_scenes[scene_name] = ArScene(new_aruco_scene, new_aoi_3d_scene, new_screens, **scene_data)
@@ -543,6 +640,7 @@ class ArScreen():
size: screen dimension in pixel.
background: image to draw behind
aoi_2d_scene: AOI 2D scene description ... : see [orthogonal_projection][argaze.ArFeatures.ArScene.orthogonal_projection] and [reframe][argaze.AreaOfInterest.AOI2DScene.reframe] functions.
+ ...
"""
name: str
@@ -551,7 +649,9 @@ class ArScreen():
aoi_2d_scene: AOI2DScene.AOI2DScene = field(default_factory=AOI2DScene.AOI2DScene)
gaze_movement_identifier: GazeFeatures.GazeMovementIdentifier = field(default_factory=GazeFeatures.GazeMovementIdentifier)
scan_path: GazeFeatures.ScanPath = field(default_factory=GazeFeatures.ScanPath)
+ scan_path_analyzers: dict = field(default_factory=dict)
aoi_scan_path: GazeFeatures.AOIScanPath = field(default_factory=GazeFeatures.AOIScanPath)
+ aoi_scan_path_analyzers: dict = field(default_factory=dict)
heatmap: AOIFeatures.Heatmap = field(default_factory=AOIFeatures.Heatmap)
def __post_init__(self):
@@ -567,7 +667,7 @@ class ArScreen():
self.heatmap.init()
@classmethod
- def from_scene(self, aoi_3d_scene, aoi_name, size, background, gaze_movement_identifier, scan_path: bool = False, aoi_scan_path: bool = False, heatmap: bool = False) -> ArScreenType:
+ def from_scene(self, aoi_3d_scene, aoi_name, size, background, gaze_movement_identifier, scan_path_analyzers: list = [], aoi_scan_path_analyzers: list = [], heatmap: bool = False) -> ArScreenType:
aoi_2d_scene = aoi_3d_scene.orthogonal_projection.reframe(aoi_name, size)
@@ -576,8 +676,10 @@ class ArScreen():
background, \
aoi_2d_scene, \
gaze_movement_identifier, \
- GazeFeatures.ScanPath() if scan_path else None, \
- GazeFeatures.AOIScanPath(aoi_2d_scene.keys()) if aoi_scan_path else None, \
+ GazeFeatures.ScanPath() if len(scan_path_analyzers) > 0 else None, \
+ scan_path_analyzers, \
+ GazeFeatures.AOIScanPath(aoi_2d_scene.keys()) if len(aoi_scan_path_analyzers) > 0 else None, \
+ aoi_scan_path_analyzers, \
AOIFeatures.Heatmap(size) if heatmap else None \
)
@@ -621,15 +723,13 @@ class ArScreen():
# Identify gaze movement
gaze_movement = self.gaze_movement_identifier.identify(timestamp, self.__gaze_position)
- # QUESTION: How to notify new gaze movement?
-
if GazeFeatures.is_fixation(gaze_movement):
# Does the fixation match an AOI?
look_at = self.name
for name, aoi in self.aoi_2d_scene.items():
- _, _, circle_ratio = aoi.circle_intersection(gaze_movement.focus, self.gaze_movement_identifier.deviation_max_threshold)
+ _, _, circle_ratio = aoi.circle_intersection(gaze_movement.focus, gaze_movement.deviation_max)
if circle_ratio > 0.25:
@@ -639,28 +739,38 @@ class ArScreen():
break
# Append fixation to scan path
- if self.scan_path:
+ if self.scan_path != None:
self.scan_path.append_fixation(timestamp, gaze_movement)
# Append fixation to aoi scan path
- if self.aoi_scan_path:
+ if self.aoi_scan_path != None:
self.__aoi_scan_step = self.aoi_scan_path.append_fixation(timestamp, gaze_movement, look_at)
- # QUESTION: How to notify new step?
+ # Analyze aoi scan path
+ if self.__aoi_scan_step and len(self.aoi_scan_path) > 1:
+
+ for aoi_scan_path_analyzer_type, aoi_scan_path_analyzer in self.aoi_scan_path_analyzers.items():
+
+ aoi_scan_path_analyzer.analyze(self.aoi_scan_path)
elif GazeFeatures.is_saccade(gaze_movement):
# Append saccade to scan path
- if self.scan_path:
+ if self.scan_path != None:
self.__scan_step = self.scan_path.append_saccade(timestamp, gaze_movement)
- # QUESTION: How to notify new step?
+ # Analyze aoi scan path
+ if self.__scan_step and len(self.scan_path) > 1:
+
+ for scan_path_analyzer_type, scan_path_analyzer in self.scan_path_analyzers.items():
+
+ scan_path_analyzer.analyze(self.scan_path)
# Append saccade to aoi scan path
- if self.aoi_scan_path:
+ if self.aoi_scan_path != None:
self.aoi_scan_path.append_saccade(timestamp, gaze_movement)
diff --git a/src/argaze/GazeAnalysis/Entropy.py b/src/argaze/GazeAnalysis/Entropy.py
index 861c73a..56f78d9 100644
--- a/src/argaze/GazeAnalysis/Entropy.py
+++ b/src/argaze/GazeAnalysis/Entropy.py
@@ -14,21 +14,29 @@ __copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
__license__ = "BSD"
from typing import Tuple
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from argaze import GazeFeatures
+from argaze.GazeAnalysis import TransitionMatrix
import pandas
import numpy
@dataclass
class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
+ """
+ Parameters
+ transition_matrix_analyzer: to get its transition_matrix_probabilities result.
+ """
+
+ transition_matrix_analyzer: TransitionMatrix.AOIScanPathAnalyzer = field(default_factory=TransitionMatrix.AOIScanPathAnalyzer)
def __post_init__(self):
- pass
+ self.__stationary_entropy = -1
+ self.__transition_entropy = -1
- def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType, transition_matrix_probabilities: pandas.DataFrame) -> Tuple[float, float]:
+ def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType):
"""Analyze aoi scan path.
Returns:
@@ -44,23 +52,33 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
stationary_probalities = {aoi: count/scan_fixations_count for aoi, count in aoi_fixations_count.items()}
# Stationary entropy
- stationary_entropy = 0
+ self.__stationary_entropy = 0
for aoi, p in stationary_probalities.items():
- stationary_entropy += p * numpy.log(p + 1e-9)
+ self.__stationary_entropy += p * numpy.log(p + 1e-9)
- stationary_entropy *= -1
+ self.__stationary_entropy *= -1
# Transition entropy
- transition_entropy = 0
+ self.__transition_entropy = 0
- destination_p_log_sum = transition_matrix_probabilities.apply(lambda row: row.apply(lambda p: p * numpy.log(p + 1e-9)).sum(), axis=1)
+ destination_p_log_sum = self.transition_matrix_analyzer.transition_matrix_probabilities.apply(lambda row: row.apply(lambda p: p * numpy.log(p + 1e-9)).sum(), axis=1)
for aoi, s in destination_p_log_sum.items():
- transition_entropy += s * stationary_probalities[aoi]
+ self.__transition_entropy += s * stationary_probalities[aoi]
+
+ self.__transition_entropy *= -1
+
+ @property
+ def stationary_entropy(self) -> float:
+
+ return self.__stationary_entropy
- transition_entropy *= -1
+ @property
+ def transition_entropy(self) -> float:
- return stationary_entropy, transition_entropy
+ return self.__transition_entropy
+
+ \ No newline at end of file
diff --git a/src/argaze/GazeAnalysis/ExploitExploreRatio.py b/src/argaze/GazeAnalysis/ExploitExploreRatio.py
index 7b6a2a6..a1a2e6b 100644
--- a/src/argaze/GazeAnalysis/ExploitExploreRatio.py
+++ b/src/argaze/GazeAnalysis/ExploitExploreRatio.py
@@ -18,7 +18,7 @@ __credits__ = []
__copyright__ = "Copyright 2023, Ecole Nationale de l'Aviation Civile (ENAC)"
__license__ = "BSD"
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from argaze import GazeFeatures
@@ -26,17 +26,19 @@ import numpy
@dataclass
class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer):
+ """
+ Parameters:
+ short_fixation_duration_threshold: time below which a fixation is considered to be short and so as exploratory.
+ """
+
+ short_fixation_duration_threshold: float = field(default=0.)
def __post_init__(self):
- pass
-
- def analyze(self, scan_path: GazeFeatures.ScanPathType, short_fixation_duration_threshold: float = 0.) -> float:
- """Analyze scan path.
+ self.__exploit_explore_ratio = 0.
- Parameters:
- short_fixation_duration_threshold: time below which a fixation is considered to be short and so as exploratory.
- """
+ def analyze(self, scan_path: GazeFeatures.ScanPathType):
+ """Analyze scan path."""
assert(len(scan_path) > 1)
@@ -46,7 +48,7 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer):
for scan_step in scan_path:
- if scan_step.first_fixation.duration > short_fixation_duration_threshold:
+ if scan_step.first_fixation.duration > self.short_fixation_duration_threshold:
long_fixations_durations.append(scan_step.first_fixation.duration)
@@ -62,4 +64,10 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer):
assert(saccades_duration + short_fixations_duration > 0)
- return long_fixations_duration / (saccades_duration + short_fixations_duration)
+ self.__exploit_explore_ratio = long_fixations_duration / (saccades_duration + short_fixations_duration)
+
+ @property
+ def exploit_explore_ratio(self) -> float:
+
+ return self.__exploit_explore_ratio
+ \ No newline at end of file
diff --git a/src/argaze/GazeAnalysis/KCoefficient.py b/src/argaze/GazeAnalysis/KCoefficient.py
index 5768d1b..d384a05 100644
--- a/src/argaze/GazeAnalysis/KCoefficient.py
+++ b/src/argaze/GazeAnalysis/KCoefficient.py
@@ -26,9 +26,9 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer):
def __post_init__(self):
- pass
+ self.__K = 0
- def analyze(self, scan_path: GazeFeatures.ScanPathType) -> float:
+ def analyze(self, scan_path: GazeFeatures.ScanPathType):
"""Analyze scan path."""
assert(len(scan_path) > 1)
@@ -55,9 +55,13 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer):
Ks.append(((scan_step.duration - duration_mean) / duration_std) - ((scan_step.last_saccade.amplitude - amplitude_mean) / amplitude_std))
- K = numpy.array(Ks).mean()
+ self.__K = numpy.array(Ks).mean()
- return K
+ @property
+ def K(self) -> float:
+
+ return self.__K
+
@dataclass
class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
@@ -71,7 +75,7 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
def __post_init__(self):
- pass
+ self.__K = 0
def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType) -> float:
"""Analyze aoi scan path."""
@@ -100,6 +104,9 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
Ks.append(((aoi_scan_step.duration - duration_mean) / duration_std) - ((aoi_scan_step.last_saccade.amplitude - amplitude_mean) / amplitude_std))
- K = numpy.array(Ks).mean()
+ self.__K = numpy.array(Ks).mean()
+
+ @property
+ def K(self):
- return K \ No newline at end of file
+ return self.__K \ No newline at end of file
diff --git a/src/argaze/GazeAnalysis/LempelZivComplexity.py b/src/argaze/GazeAnalysis/LempelZivComplexity.py
index c449414..64a309f 100644
--- a/src/argaze/GazeAnalysis/LempelZivComplexity.py
+++ b/src/argaze/GazeAnalysis/LempelZivComplexity.py
@@ -24,11 +24,17 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
def __post_init__(self):
- pass
+ self.__lempel_ziv_complexity = 0
- def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType) -> int:
+ def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType):
"""Analyze aoi scan path."""
assert(len(aoi_scan_path) > 1)
- return lempel_ziv_complexity(str(aoi_scan_path))
+ self.__lempel_ziv_complexity = lempel_ziv_complexity(str(aoi_scan_path))
+
+ @property
+ def lempel_ziv_complexity(self) -> int:
+
+ return self.__lempel_ziv_complexity
+ \ No newline at end of file
diff --git a/src/argaze/GazeAnalysis/NGram.py b/src/argaze/GazeAnalysis/NGram.py
index f3f0cca..1ae8a07 100644
--- a/src/argaze/GazeAnalysis/NGram.py
+++ b/src/argaze/GazeAnalysis/NGram.py
@@ -20,19 +20,31 @@ from argaze import GazeFeatures
@dataclass
class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
+ """
+ Parameters:
+ n: lenght of grams to search.
+ """
+
+ n: int = field(default=2)
def __post_init__(self):
- pass
+ self.__ngrams_count = {}
- def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType, n: int) -> dict:
+ def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType):
"""Analyze aoi scan path."""
assert(len(aoi_scan_path) > 1)
sequence = str(aoi_scan_path)
- ngrams = zip(*[sequence[i:] for i in range(n)])
+ ngrams = zip(*[sequence[i:] for i in range(self.n)])
ngrams = [ngram for ngram in ngrams]
- return {tuple([aoi_scan_path.get_letter_aoi(l) for l in ngram]) : ngrams.count(ngram) for ngram in ngrams}
+ self.__ngrams_count = {tuple([aoi_scan_path.get_letter_aoi(l) for l in ngram]) : ngrams.count(ngram) for ngram in ngrams}
+
+ @property
+ def ngrams_count(self) -> dict:
+
+ return self.__ngrams_count
+ \ No newline at end of file
diff --git a/src/argaze/GazeAnalysis/NearestNeighborIndex.py b/src/argaze/GazeAnalysis/NearestNeighborIndex.py
index b9654de..cf29169 100644
--- a/src/argaze/GazeAnalysis/NearestNeighborIndex.py
+++ b/src/argaze/GazeAnalysis/NearestNeighborIndex.py
@@ -24,13 +24,18 @@ from scipy.spatial.distance import cdist
@dataclass
class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer):
"""Implementation of Nearest Neighbor Index (NNI) as described in Di Nocera et al., 2006
+
+ Parameters:
+ size: screen dimension.
"""
+ size: tuple[float, float]
+
def __post_init__(self):
- pass
+ self.__nearest_neighbor_index = 0
- def analyze(self, scan_path: GazeFeatures.ScanPathType, screen_dimension: tuple[float, float]) -> float:
+ def analyze(self, scan_path: GazeFeatures.ScanPathType):
"""Analyze scan path."""
assert(len(scan_path) > 1)
@@ -50,6 +55,12 @@ class ScanPathAnalyzer(GazeFeatures.ScanPathAnalyzer):
dNN = numpy.sum(minimums / len(fixations_focus))
# Mean random distance
- dran = 0.5 * numpy.sqrt(screen_dimension[0] * screen_dimension[1] / len(fixations_focus))
+ dran = 0.5 * numpy.sqrt(self.size[0] * self.size[1] / len(fixations_focus))
+
+ self.__nearest_neighbor_index = dNN / dran
- return dNN / dran
+ @property
+ def nearest_neighbor_index(self) -> float:
+
+ return self.__nearest_neighbor_index
+
diff --git a/src/argaze/GazeAnalysis/TransitionMatrix.py b/src/argaze/GazeAnalysis/TransitionMatrix.py
index 52bfa3a..6d7451d 100644
--- a/src/argaze/GazeAnalysis/TransitionMatrix.py
+++ b/src/argaze/GazeAnalysis/TransitionMatrix.py
@@ -26,14 +26,16 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
def __post_init__(self):
- pass
+ self.__transition_matrix_probabilities = pandas.DataFrame()
+ self.__transition_matrix_density = 0.
- def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType) -> Tuple[pandas.DataFrame, float]:
+ def analyze(self, aoi_scan_path: GazeFeatures.AOIScanPathType):
"""Analyze aoi scan path.
Returns:
transition matrix probabilities
- transition matrix density"""
+ transition matrix density
+ """
assert(len(aoi_scan_path) > 1)
@@ -42,9 +44,17 @@ class AOIScanPathAnalyzer(GazeFeatures.AOIScanPathAnalyzer):
# Editing transition matrix probabilities
# Note: when no transiton starts from an aoi, destination probabilites is equal to 1/S where S is the number of aois
- transition_matrix_probabilities = aoi_scan_path.transition_matrix.apply(lambda row: row.apply(lambda p: p / row_sum[row.name] if row_sum[row.name] > 0 else 1 / row_sum.size), axis=1)
+ self.__transition_matrix_probabilities = aoi_scan_path.transition_matrix.apply(lambda row: row.apply(lambda p: p / row_sum[row.name] if row_sum[row.name] > 0 else 1 / row_sum.size), axis=1)
# Calculate matrix density
- transition_matrix_density = (transition_matrix_probabilities != 0.).astype(int).sum(axis=1).sum() / transition_matrix_probabilities.size
+ self.__transition_matrix_density = (self.__transition_matrix_probabilities != 0.).astype(int).sum(axis=1).sum() / self.__transition_matrix_probabilities.size
- return transition_matrix_probabilities, transition_matrix_density
+ @property
+ def transition_matrix_probabilities(self) -> pandas.DataFrame:
+
+ return self.__transition_matrix_probabilities
+
+ @property
+ def transition_matrix_density(self) -> float:
+
+ return self.__transition_matrix_density \ No newline at end of file
diff --git a/src/argaze/GazeFeatures.py b/src/argaze/GazeFeatures.py
index 14d4f62..620903a 100644
--- a/src/argaze/GazeFeatures.py
+++ b/src/argaze/GazeFeatures.py
@@ -784,7 +784,7 @@ class AOIScanPath(list):
class AOIScanPathAnalyzer():
"""Abstract class to define what should provide a aoi scan path analyzer."""
- def analyze(self, aoi_scan_path: AOIScanPathType) -> Any:
+ def analyze(self, aoi_scan_path: AOIScanPathType):
"""Analyze aoi scan path."""
raise NotImplementedError('analyze() method not implemented')
diff --git a/src/argaze/utils/demo_environment/demo_ar_features_setup.json b/src/argaze/utils/demo_environment/demo_ar_features_setup.json
index 3c1d512..05b0d0b 100644
--- a/src/argaze/utils/demo_environment/demo_ar_features_setup.json
+++ b/src/argaze/utils/demo_environment/demo_ar_features_setup.json
@@ -21,9 +21,8 @@
"size": [640, 480],
"background": "screen_background.jpg",
"gaze_movement_identifier": {
- "type": "DispersionThresholdIdentification",
- "parameters": {
- "deviation_max_threshold": 25,
+ "DispersionThresholdIdentification": {
+ "deviation_max_threshold": 50,
"duration_min_threshold": 200
}
}
@@ -52,7 +51,7 @@
}
},
"aruco_aoi": {
- "Screen_Plan": {
+ "GrayRectangle": {
"upper_left_corner": {
"marker_identifier": 0,
"marker_corner_index": 2
diff --git a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json b/src/argaze/utils/demo_environment/demo_gaze_features_setup.json
index 56d5c72..a00f37b 100644
--- a/src/argaze/utils/demo_environment/demo_gaze_features_setup.json
+++ b/src/argaze/utils/demo_environment/demo_gaze_features_setup.json
@@ -8,14 +8,29 @@
"size": [1920, 1149],
"background": "screen_background.jpg",
"gaze_movement_identifier": {
- "type": "DispersionThresholdIdentification",
- "parameters": {
+ "DispersionThresholdIdentification": {
"deviation_max_threshold": 50,
"duration_min_threshold": 200
}
},
- "scan_path": true,
- "aoi_scan_path": true,
+ "scan_path_analyzers": {
+ "KCoefficient": {},
+ "NearestNeighborIndex": {
+ "size": [1920, 1149]
+ },
+ "ExploitExploreRatio": {
+ "short_fixation_duration_threshold": 0
+ }
+ },
+ "aoi_scan_path_analyzers": {
+ "TransitionMatrix": {},
+ "KCoefficient": {},
+ "LempelZivComplexity": {},
+ "NGram": {
+ "n": 3
+ },
+ "Entropy":{}
+ },
"heatmap": true
}
}
diff --git a/src/argaze/utils/demo_gaze_features_run.py b/src/argaze/utils/demo_gaze_features_run.py
index 2a9474f..03467e9 100644
--- a/src/argaze/utils/demo_gaze_features_run.py
+++ b/src/argaze/utils/demo_gaze_features_run.py
@@ -30,420 +30,257 @@ def main():
# Manage arguments
parser = argparse.ArgumentParser(description=main.__doc__.split('-')[0])
- parser.add_argument('-dev', '--deviation_max_threshold', metavar='DEVIATION_MAX_THRESHOLD', type=int, default=50, help='maximal distance for fixation identification in pixel')
- parser.add_argument('-vel', '--velocity_max_threshold', metavar='VELOCITY_MAX_THRESHOLD', type=int, default=1, help='maximal velocity for fixation identification in pixel/millisecond')
- parser.add_argument('-dmin', '--duration_min_threshold', metavar='DURATION_MIN_THRESHOLD', type=int, default=200, help='minimal duration for fixation identification in millisecond')
+ parser.add_argument('environment', metavar='ENVIRONMENT', type=str, help='ar environment filepath')
args = parser.parse_args()
- # Load AR enviroment
- demo_environment_filepath = os.path.join(current_directory, 'demo_environment/setup.json')
- demo_environment = ArFeatures.ArEnvironment.from_json(demo_environment_filepath)
+ # Load AR environment
+ ar_environment = ArFeatures.ArEnvironment.from_json(args.environment)
- # Access to main AR scene
- demo_scene = demo_environment.scenes["AR Scene Demo"]
-
- # Load aoi scene image
- aoi_scene_filepath = os.path.join(current_directory, 'demo_environment/aoi_scene.jpg')
- aoi_scene_image = cv2.imread(aoi_scene_filepath)
-
- window_size = (aoi_scene_image.shape[1], aoi_scene_image.shape[0])
-
- # Project AOI scene onto Full HD screen
- aoi_scene_projection = demo_scene.aoi_scene.orthogonal_projection * window_size
+ # Select AR screen
+ ar_screen = ar_environment.scenes["AR Scene Demo"].screens["GrayRectangle"]
# Create a window to display AR environment
- window_name = "AOI Scene"
- cv2.namedWindow(window_name, cv2.WINDOW_AUTOSIZE)
-
- # Init gaze processing
- gaze_position = GazeFeatures.GazePosition()
+ cv2.namedWindow(ar_screen.name, cv2.WINDOW_AUTOSIZE)
- heatmap = AOIFeatures.Heatmap(window_size)
- heatmap.init()
-
- enable_heatmap = False
+ # Heatmap buffer display option
clear_heatmap = False
enable_heatmap_buffer = False
- gaze_movement_identifier = {
- 'I-DT': DispersionThresholdIdentification.GazeMovementIdentifier(args.deviation_max_threshold, args.duration_min_threshold),
- 'I-VT': VelocityThresholdIdentification.GazeMovementIdentifier(args.velocity_max_threshold, args.duration_min_threshold)
- }
- fixation_color = {
- 'I-DT': (0, 255, 255),
- 'I-VT': (255, 0, 255)
- }
- current_fixation_color = (255, 255, 0)
- identification_mode = 'I-DT'
-
- raw_scan_path = GazeFeatures.ScanPath()
- aoi_scan_path = GazeFeatures.AOIScanPath(aoi_scene_projection.keys())
-
- tm = TransitionMatrix.AOIScanPathAnalyzer()
- tm_probabilities = pandas.DataFrame()
- tm_density = 0.
- enable_tm_analysis = False
-
- raw_kc_analyzer = KCoefficient.ScanPathAnalyzer()
- raw_kc_analysis = 0
- aoi_kc_analyzer = KCoefficient.AOIScanPathAnalyzer()
- aoi_kc_analysis = 0
- kc_mode = 'raw'
- enable_kc_analysis = False
-
- lzc_analyzer = LempelZivComplexity.AOIScanPathAnalyzer()
- lzc_analysis = 0
- enable_lzc_analysis = False
-
- ngram_analyzer = NGram.AOIScanPathAnalyzer()
- ngram_analysis = {}
- enable_ngram_analysis = False
-
- entropy_analyzer = Entropy.AOIScanPathAnalyzer()
- entropy_analysis = (-1, -1)
- enable_entropy_analysis = False
-
- nni_analyzer = NearestNeighborIndex.ScanPathAnalyzer()
- nni_analysis = 0
- enable_nni_analysis = False
-
- xxr_analyzer = ExploitExploreRatio.ScanPathAnalyzer()
- xxr_analysis = 0.
- enable_xxr_analysis = False
-
- gaze_movement_lock = threading.Lock()
+ ar_screen_lock = threading.Lock()
# Init timestamp
- start_ts = time.time()
+ start_time = time.time()
# Update pointer position
def on_mouse_event(event, x, y, flags, param):
- nonlocal gaze_position
- nonlocal clear_heatmap
- nonlocal tm_probabilities
- nonlocal tm_density
- nonlocal raw_kc_analysis
- nonlocal aoi_kc_analysis
- nonlocal lzc_analysis
- nonlocal ngram_analysis
- nonlocal entropy_analysis
- nonlocal nni_analysis
- nonlocal xxr_analysis
-
- # Edit millisecond timestamp
- data_ts = int((time.time() - start_ts) * 1e3)
-
- # Update gaze position with mouse pointer position
- gaze_position = GazeFeatures.GazePosition((x, y))
-
- # Don't identify gaze movement while former identification is exploited in video loop
- if gaze_movement_lock.locked():
+ # Don't look at screen while screen is exploited into video loop
+ if ar_screen_lock.locked():
return
- # Lock gaze movement exploitation
- gaze_movement_lock.acquire()
-
- # Edit heatmap
- if enable_heatmap:
-
- # Clear heatmap
- if clear_heatmap:
-
- heatmap.init(10 if enable_heatmap_buffer else 0)
- clear_heatmap = False
-
- # Update heatmap
- heatmap.update(gaze_position.value, sigma=0.05)
-
- else:
-
- # Identify gaze movement accordding select identification mode
- gaze_movement = gaze_movement_identifier[identification_mode].identify(data_ts, gaze_position)
-
- if GazeFeatures.is_fixation(gaze_movement):
-
- # Does the fixation match an AOI?
- look_at = 'Screen'
- for name, aoi in aoi_scene_projection.items():
-
- _, _, circle_ratio = aoi.circle_intersection(gaze_movement.focus, args.deviation_max_threshold)
-
- if circle_ratio > 0.25:
+ # Lock screen exploitation
+ ar_screen_lock.acquire()
- if name != 'Screen':
+ try:
- look_at = name
- break
+ # Edit millisecond timestamp
+ timestamp = int((time.time() - start_time) * 1e3)
- # Append fixation to raw scan path
- raw_scan_path.append_fixation(data_ts, gaze_movement)
+ # Project gaze position into screen
+ ar_screen.look(timestamp, GazeFeatures.GazePosition((x, y)))
- try:
+ except GazeFeatures.AOIScanStepError as e:
- # Append fixation to aoi scan path
- new_step = aoi_scan_path.append_fixation(data_ts, gaze_movement, look_at)
+ print(f'Error on {e.aoi} step:', e)
- # Analyse aoi scan path
- if new_step and len(aoi_scan_path) > 1:
+ # Unlock screen exploitation
+ ar_screen_lock.release()
- if enable_tm_analysis:
-
- tm_probabilities, tm_density = tm.analyze(aoi_scan_path)
-
- if enable_kc_analysis:
-
- aoi_kc_analysis = aoi_kc_analyzer.analyze(aoi_scan_path)
-
- if enable_lzc_analysis:
+ return
- lzc_analysis = lzc_analyzer.analyze(aoi_scan_path)
+ # Attach mouse callback to window
+ cv2.setMouseCallback(ar_screen.name, on_mouse_event)
- if enable_ngram_analysis:
+ # Waiting for 'ctrl+C' interruption
+ try:
- ngram_analysis = ngram_analyzer.analyze(aoi_scan_path, 3)
+ # Analyse mouse positions
+ while True:
- if enable_entropy_analysis and enable_tm_analysis:
+ # Lock screen exploitation
+ ar_screen_lock.acquire()
- entropy_analysis = entropy_analyzer.analyze(aoi_scan_path, tm_probabilities)
+ # Draw screen
+ image = ar_screen.background.copy()
- except GazeFeatures.AOIScanStepError as e:
+ # Draw heatmap
+ if ar_screen.heatmap:
- print(f'Error on {e.aoi} step:', e)
+ # Clear heatmap
+ if clear_heatmap:
- elif GazeFeatures.is_saccade(gaze_movement):
+ ar_screen.heatmap.init(10 if enable_heatmap_buffer else 0)
+ clear_heatmap = False
- # Append saccade to raw scan path
- new_step = raw_scan_path.append_saccade(data_ts, gaze_movement)
+ image = cv2.addWeighted(ar_screen.heatmap.image, 0.5, image, 1., 0)
- # Analyse scan path
- if new_step and len(raw_scan_path) > 1:
+ # Write heatmap buffer manual
+ buffer_on_off = 'on' if enable_heatmap_buffer else 'off'
+ buffer_display_disable = 'disable' if enable_heatmap_buffer else 'enable'
+ cv2.putText(image, f'Heatmap buffer: {buffer_on_off} (Press \'b\' key to {buffer_display_disable})', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_heatmap_buffer else (255, 255, 255), 1, cv2.LINE_AA)
- if enable_kc_analysis:
+ # Draw AOI
+ ar_screen.aoi_2d_scene.draw(image, color=(0, 0, 0))
- raw_kc_analysis = raw_kc_analyzer.analyze(raw_scan_path)
+ # Draw gaze position
+ ar_screen.current_gaze_position.draw(image, color=(255, 255, 255))
- if enable_nni_analysis:
+ # Draw gaze movements
+ current_gaze_movement = ar_screen.current_gaze_movement
- nni_analysis = nni_analyzer.analyze(raw_scan_path, window_size)
+ current_gaze_movement.draw(image, color=(0, 255, 255))
+ current_gaze_movement.draw_positions(image)
- if enable_xxr_analysis:
+ # Check screen fixation
+ if GazeFeatures.is_fixation(current_gaze_movement):
- xxr_analysis = xxr_analyzer.analyze(raw_scan_path)
+ # Draw looked AOI
+ ar_screen.aoi_2d_scene.draw_circlecast(image, current_gaze_movement.focus, current_gaze_movement.deviation_max, base_color=(0, 0, 0), matching_color=(255, 255, 255))
- # Append saccade to aoi scan path
- aoi_scan_path.append_saccade(data_ts, gaze_movement)
+ # Write last 5 steps of aoi scan path
+ path = ''
+ for step in ar_screen.aoi_scan_path[-5:]:
- # Unlock gaze movement exploitation
- gaze_movement_lock.release()
+ path += f'> {step.aoi} '
+
+ path += f'> {ar_screen.aoi_scan_path.current_aoi}'
- return
+ cv2.putText(image, path, (20, ar_screen.size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
- # Attach mouse callback to window
- cv2.setMouseCallback(window_name, on_mouse_event)
+ # Display Transition matrix analysis if loaded
+ try:
- # Waiting for 'ctrl+C' interruption
- try:
+ transition_matrix_analyzer = ar_screen.aoi_scan_path_analyzers["TransitionMatrix"]
- # Analyse mouse positions
- while True:
-
- image = aoi_scene_image.copy()
-
- # Lock gaze movement identification
- gaze_movement_lock.acquire()
-
- # Write heatmap help
- on_off = 'on' if enable_heatmap else 'off'
- enable_disable = 'disable' if enable_heatmap else 'enable'
- buffer_on_off = 'on' if enable_heatmap_buffer else 'off'
- buffer_enable_disable = 'disable' if enable_heatmap_buffer else 'enable'
- cv2.putText(image, f'Heatmap: {on_off} (Press \'h\' key to {enable_disable}), Buffer: {buffer_on_off} (Press \'b\' key to {buffer_enable_disable})', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_heatmap else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Draw gaze spread heatmap
- if enable_heatmap:
-
- image = cv2.addWeighted(heatmap.heatmap, 0.5, image, 1., 0)
-
- else:
-
- # Write identification mode
- cv2.putText(image, f'Gaze movement identification mode: {identification_mode} (Press \'m\' key to switch)', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, fixation_color[identification_mode], 1, cv2.LINE_AA)
-
- # Write TPM help
- on_off = 'on' if enable_tm_analysis else 'off'
- display_hide = 'hide' if enable_tm_analysis else 'display'
- cv2.putText(image, f'Transition matrix: {on_off} (Press \'t\' key to {display_hide})', (20, 120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_tm_analysis else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Write Kc help
- on_off = 'on' if enable_kc_analysis else 'off'
- display_hide = 'hide' if enable_kc_analysis else 'display'
- cv2.putText(image, f'coefficient K: {on_off} (Press \'k\' key to {display_hide})', (20, 160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_kc_analysis else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Write LZC help
- on_off = 'on' if enable_lzc_analysis else 'off'
- display_hide = 'hide' if enable_lzc_analysis else 'display'
- cv2.putText(image, f'Lempel-Ziv complexity: {on_off} (Press \'z\' key to {display_hide})', (20, 200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_lzc_analysis else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Write N-Gram help
- on_off = 'on' if enable_ngram_analysis else 'off'
- display_hide = 'hide' if enable_ngram_analysis else 'display'
- cv2.putText(image, f'Tri-Gram: {on_off} (Press \'n\' key to {display_hide})', (20, 240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_ngram_analysis else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Write entropy help
- on_off = 'on' if enable_entropy_analysis else 'off'
- display_hide = 'hide' if enable_entropy_analysis else 'display'
- cv2.putText(image, f'Entropy: {on_off} (Press \'e\' key to {display_hide})', (20, 280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_entropy_analysis else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Write nni help
- on_off = 'on' if enable_nni_analysis else 'off'
- display_hide = 'hide' if enable_nni_analysis else 'display'
- cv2.putText(image, f'Nearest neighbor index: {on_off} (Press \'i\' key to {display_hide})', (20, 320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_nni_analysis else (255, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(image, f'Transition matrix density: {transition_matrix_analyzer.transition_matrix_density:.2f}', (20, ar_screen.size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- # Write xxr help
- on_off = 'on' if enable_xxr_analysis else 'off'
- display_hide = 'hide' if enable_xxr_analysis else 'display'
- cv2.putText(image, f'Exploit Explore Ratio: {on_off} (Press \'x\' key to {display_hide})', (20, 360), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255) if enable_xxr_analysis else (255, 255, 255), 1, cv2.LINE_AA)
-
- # Check fixation identification
- if gaze_movement_identifier[identification_mode].current_fixation != None:
+ # Iterate over indexes (departures)
+ for from_aoi, row in transition_matrix_analyzer.transition_matrix_probabilities.iterrows():
- current_fixation = gaze_movement_identifier[identification_mode].current_fixation
+ # Iterate over columns (destinations)
+ for to_aoi, probability in row.items():
- # Draw looked AOI
- aoi_scene_projection.draw_circlecast(image, current_fixation.focus, current_fixation.deviation_max, base_color=(0, 0, 0), matching_color=(255, 255, 255))
+ if from_aoi != to_aoi and probability > 0.0:
- # Draw current fixation
- current_fixation.draw(image, color=current_fixation_color)
+ from_center = ar_screen.aoi_2d_scene[from_aoi].center.astype(int)
+ to_center = ar_screen.aoi_2d_scene[to_aoi].center.astype(int)
+ start_line = (0.5 * from_center + 0.5 * to_center).astype(int)
- # Draw current fixation gaze positions
- current_fixation.draw_positions(image)
+ color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55]
- else:
+ cv2.line(image, start_line, to_center, color, int(probability*10) + 2)
+ cv2.line(image, from_center, to_center, [55, 55, 55], 2)
+
+ except KeyError:
+ pass
- # Draw pointer as gaze position
- gaze_position.draw(image, draw_precision=False)
+ # Display scan path K Coefficient analysis if loaded
+ try:
- # Draw AOI scene projection
- aoi_scene_projection.draw(image, color=(0, 0, 0))
+ kc_analyzer = ar_screen.scan_path_analyzers["KCoefficient"]
+
+ # Write raw Kc analysis
+ if kc_analyzer.K < 0.:
- # Check saccade identification
- if gaze_movement_identifier[identification_mode].current_saccade != None:
+ cv2.putText(image, f'K coefficient: Ambient attention', (20, ar_screen.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+
+ elif kc_analyzer.K > 0.:
- current_saccade = gaze_movement_identifier[identification_mode].current_saccade
+ cv2.putText(image, f'K coefficient: Focal attention', (20, ar_screen.size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
+
+ except KeyError:
+ pass
- # Draw current saccade gaze positions
- current_saccade.draw_positions(image)
+ # Display aoi scan path K-modified coefficient analysis if loaded
+ try:
- # Draw last 10 steps of raw scan path
- raw_scan_path.draw(image, fixation_color=fixation_color[identification_mode], deepness=10)
+ aoi_kc_analyzer = ar_screen.aoi_scan_path_analyzers["KCoefficient"]
- # Write last 5 steps of aoi scan path
- path = ''
- for step in aoi_scan_path[-5:]:
+ # Write aoi Kc analysis
+ if aoi_kc_analyzer.K < 0.:
- path += f'> {step.aoi} '
+ cv2.putText(image, f'K-modified coefficient: Ambient attention', (20, ar_screen.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- path += f'> {aoi_scan_path.current_aoi}'
+ elif aoi_kc_analyzer.K > 0.:
- cv2.putText(image, path, (20, window_size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
-
- # Draw transition probability matrix
- if enable_tm_analysis:
-
- cv2.putText(image, f'Transition matrix density: {tm_density:.2f}', (20, window_size[1]-160), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- # Iterate over indexes (departures)
- for from_aoi, row in tm_probabilities.iterrows():
+ cv2.putText(image, f'K-modified coefficient: Focal attention', (20, ar_screen.size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
+
+ except KeyError:
+ pass
- # Iterate over columns (destinations)
- for to_aoi, probability in row.items():
+ # Display Lempel-Ziv complexity analysis if loaded
+ try:
- if from_aoi != to_aoi and probability > 0.0:
+ lzc_analyzer = ar_screen.aoi_scan_path_analyzers["LempelZivComplexity"]
- from_center = aoi_scene_projection[from_aoi].center.astype(int)
- to_center = aoi_scene_projection[to_aoi].center.astype(int)
- start_line = (0.5 * from_center + 0.5 * to_center).astype(int)
+ cv2.putText(image, f'Lempel-Ziv complexity: {lzc_analyzer.lempel_ziv_complexity}', (20, ar_screen.size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55]
+ except KeyError:
+ pass
- cv2.line(image, start_line, to_center, color, int(probability*10) + 2)
- cv2.line(image, from_center, to_center, [55, 55, 55], 2)
+ # Display N-Gram analysis if loaded
+ try:
- if enable_kc_analysis:
+ ngram_analyzer = ar_screen.aoi_scan_path_analyzers["NGram"]
- # Write raw Kc analysis
- if raw_kc_analysis < 0.:
+ start = ar_screen.size[1] - ((len(ngram_analyzer.ngrams_count) + 1) * 40)
+ cv2.putText(image, f'{ngram_analyzer.n}-Gram:', (ar_screen.size[0]-700, start-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- cv2.putText(image, f'Raw: Ambient attention', (20, window_size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- elif raw_kc_analysis > 0.:
+ for i, (ngram, count) in enumerate(ngram_analyzer.ngrams_count.items()):
- cv2.putText(image, f'Raw: Focal attention', (20, window_size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
-
- # Write aoi Kc analysis
- if aoi_kc_analysis < 0.:
+ ngram_string = f'{ngram[0]}'
+ for g in range(1, ngram_analyzer.n):
+ ngram_string += f'>{ngram[g]}'
- cv2.putText(image, f'AOI: Ambient attention', (20, window_size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- elif aoi_kc_analysis > 0.:
+ cv2.putText(image, f'{ngram_string}: {count}', (ar_screen.size[0]-700, start+(i*40)), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- cv2.putText(image, f'AOI: Focal attention', (20, window_size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
+ except KeyError:
+ pass
- # Write LZC
- if enable_lzc_analysis:
+ # Display Entropy analysis if loaded
+ try:
- cv2.putText(image, f'Lempel-Ziv complexity: {lzc_analysis}', (20, window_size[1]-200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ entropy_analyzer = ar_screen.aoi_scan_path_analyzers["Entropy"]
- # Write N-Gram
- if enable_ngram_analysis:
+ cv2.putText(image, f'Stationary entropy: {entropy_analyzer.stationary_entropy:.3f},', (20, ar_screen.size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(image, f'Transition entropy: {entropy_analyzer.transition_entropy:.3f},', (20, ar_screen.size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+
+ except KeyError:
+ pass
- start = window_size[1] - ((len(ngram_analysis) + 1) * 40)
- cv2.putText(image, f'Tri-Gram:', (window_size[0]-700, start-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ # Display Nearest Neighbor index analysis if loaded
+ try:
- for i, (ngram, count) in enumerate(ngram_analysis.items()):
+ nni_analyzer = ar_screen.scan_path_analyzers["NearestNeighborIndex"]
- trigram = f'{ngram[0]}>{ngram[1]}>{ngram[2]}'
- cv2.putText(image, f'{trigram}: {count}', (window_size[0]-700, start+(i*40)), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(image, f'Nearest neighbor index: {nni_analyzer.nearest_neighbor_index:.3f}', (20, ar_screen.size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+
+ except KeyError:
+ pass
- # Write entropy
- if enable_entropy_analysis:
+ # Display Exploit/Explore ratio analysis if loaded
+ try:
- cv2.putText(image, f'Stationary entropy: {entropy_analysis[0]:.3f},', (20, window_size[1]-280), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- cv2.putText(image, f'Transition entropy: {entropy_analysis[1]:.3f},', (20, window_size[1]-240), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- # Write NNI
- if enable_nni_analysis:
+ xxr_analyser = ar_screen.scan_path_analyzers["ExploitExploreRatio"]
- cv2.putText(image, f'Nearest neighbor index: {nni_analysis:.3f}', (20, window_size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(image, f'Exploit explore ratio: {xxr_analyser.exploit_explore_ratio:.3f}', (20, ar_screen.size[1]-360), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
- # Write XXR
- if enable_xxr_analysis:
+ except KeyError:
- cv2.putText(image, f'Exploit explore ratio: {xxr_analysis:.3f}', (20, window_size[1]-320), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+ pass
- # Unlock gaze movement identification
- gaze_movement_lock.release()
+ # Unlock screen exploitation
+ ar_screen_lock.release()
# Draw image
- cv2.imshow(window_name, image)
+ cv2.imshow(ar_screen.name, image)
key_pressed = cv2.waitKey(10)
#if key_pressed != -1:
# print(key_pressed)
- # Switch identification mode with 'm' key
- if key_pressed == 109:
+ # Reload environment with 'h' key
+ if key_pressed == 114:
- mode_list = list(gaze_movement_identifier.keys())
- current_index = mode_list.index(identification_mode) + 1
- identification_mode = mode_list[current_index % len(mode_list)]
+ # Lock screen exploitation
+ ar_screen_lock.acquire()
- # Enable heatmap with 'h' key
- if key_pressed == 104:
+ ar_environment = ArFeatures.ArEnvironment.from_json(args.environment)
+ ar_screen = ar_environment.scenes["AR Scene Demo"].screens["GrayRectangle"]
- enable_heatmap = not enable_heatmap
+ # Unlock screen exploitation
+ ar_screen_lock.release()
# Enable heatmap buffer with 'b' key
if key_pressed == 98:
@@ -452,46 +289,6 @@ def main():
clear_heatmap = True
- # Enable Kc analysis with 'k' key
- if key_pressed == 107:
-
- enable_kc_analysis = not enable_kc_analysis
-
- # Enable TPM analysis with 't' key
- if key_pressed == 116:
-
- enable_tm_analysis = not enable_tm_analysis
-
- # Enable LZC analysis with 'z' key
- if key_pressed == 122:
-
- enable_lzc_analysis = not enable_lzc_analysis
-
- # Enable ngram analysis with 'n' key
- if key_pressed == 110:
-
- enable_ngram_analysis = not enable_ngram_analysis
-
- # Enable entropy analysis with 'e' key
- if key_pressed == 101:
-
- enable_entropy_analysis = not enable_entropy_analysis
-
- # Transition matrix is needed
- if enable_entropy_analysis:
-
- enable_tm_analysis = True
-
- # Enable NNI analysis with 'i' key
- if key_pressed == 105:
-
- enable_nni_analysis = not enable_nni_analysis
-
- # Enable XXR analysis with 'x' key
- if key_pressed == 120:
-
- enable_xxr_analysis = not enable_xxr_analysis
-
# Stop calibration by pressing 'Esc' key
if key_pressed == 27:
break