aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/argaze/utils/demo_gaze_features_run.py259
1 files changed, 153 insertions, 106 deletions
diff --git a/src/argaze/utils/demo_gaze_features_run.py b/src/argaze/utils/demo_gaze_features_run.py
index 06c9703..a957ce0 100644
--- a/src/argaze/utils/demo_gaze_features_run.py
+++ b/src/argaze/utils/demo_gaze_features_run.py
@@ -61,6 +61,8 @@ def main():
gaze_spread_sum = numpy.zeros((aoi_scene_image.shape[0], aoi_scene_image.shape[1]))
heatmap_matrix = numpy.zeros(aoi_scene_image.shape, dtype=numpy.uint8)
+ enable_heatmap = False
+
gaze_movement_identifier = {
'I-DT': DispersionThresholdIdentification.GazeMovementIdentifier(args.deviation_max_threshold, args.duration_min_threshold),
'I-VT': VelocityThresholdIdentification.GazeMovementIdentifier(args.velocity_max_threshold, args.duration_min_threshold)
@@ -72,6 +74,7 @@ def main():
tpm = TransitionProbabilityMatrix.AOIScanPathAnalyzer()
tpm_analysis = pandas.DataFrame()
+ enable_tpm_analysis = False
raw_cK_analyzer = CoefficientK.ScanPathAnalyzer()
raw_cK_analysis = 0
@@ -80,6 +83,7 @@ def main():
aoi_cK_analysis = 0
ck_mode = 'raw'
+ enable_ck_analysis = False
gaze_movement_lock = threading.Lock()
@@ -110,59 +114,69 @@ def main():
gaze_movement_lock.acquire()
# Edit heatmap
- gaze_spread_sum += screen_frame.point_spread(gaze_position.value, sigma=0.05)
- heatmap_gray = (255 * gaze_spread_sum / numpy.max(gaze_spread_sum)).astype(numpy.uint8)
- heatmap_matrix = cv2.applyColorMap(heatmap_gray, cv2.COLORMAP_JET)
+ if enable_heatmap:
+
+ gaze_spread_sum += screen_frame.point_spread(gaze_position.value, sigma=0.05)
+ heatmap_gray = (255 * gaze_spread_sum / numpy.max(gaze_spread_sum)).astype(numpy.uint8)
+ heatmap_matrix = cv2.applyColorMap(heatmap_gray, cv2.COLORMAP_JET)
+
+ else:
+
+ # Identify gaze movement accordding select identification mode
+ gaze_movement = gaze_movement_identifier[identification_mode].identify(data_ts, gaze_position)
+
+ if GazeFeatures.is_fixation(gaze_movement):
+
+ # Does the fixation match an AOI?
+ look_at = 'Screen'
+ for name, aoi in aoi_scene_projection.items():
- # Identify gaze movement accordding select identification mode
- gaze_movement = gaze_movement_identifier[identification_mode].identify(data_ts, gaze_position)
+ _, _, circle_ratio = aoi.circle_intersection(gaze_movement.focus, args.deviation_max_threshold)
- if GazeFeatures.is_fixation(gaze_movement):
+ if circle_ratio > 0.25:
- # Does the fixation match an AOI?
- look_at = 'Screen'
- for name, aoi in aoi_scene_projection.items():
+ if name != 'Screen':
- _, _, circle_ratio = aoi.circle_intersection(gaze_movement.focus, args.deviation_max_threshold)
+ look_at = name
+ break
- if circle_ratio > 0.25:
+ # Append fixation to raw scan path
+ raw_scan_path.append_fixation(data_ts, gaze_movement)
- if name != 'Screen':
+ try:
- look_at = name
- break
+ # Append fixation to aoi scan path
+ new_step = aoi_scan_path.append_fixation(data_ts, gaze_movement, look_at)
- # Append fixation to raw scan path
- raw_scan_path.append_fixation(data_ts, gaze_movement)
+ # Analyse aoi scan path
+ if new_step and len(aoi_scan_path) > 1:
- try:
+ if enable_tpm_analysis:
- # Append fixation to aoi scan path
- new_step = aoi_scan_path.append_fixation(data_ts, gaze_movement, look_at)
+ tpm_analysis = tpm.analyze(aoi_scan_path)
- # Analyse aoi scan path
- if new_step and len(aoi_scan_path) > 1:
+ if enable_ck_analysis:
- tpm_analysis = tpm.analyze(aoi_scan_path)
+ aoi_cK_analysis = aoi_cK_analyzer.analyze(aoi_scan_path)
- aoi_cK_analysis = aoi_cK_analyzer.analyze(aoi_scan_path)
+ except GazeFeatures.AOIScanStepError as e:
- except GazeFeatures.AOIScanStepError as e:
+ print(f'Error on {e.aoi} step:', e)
- print(f'Error on {e.aoi} step:', e)
+ elif GazeFeatures.is_saccade(gaze_movement):
- elif GazeFeatures.is_saccade(gaze_movement):
+ # Append saccade to raw scan path
+ new_step = raw_scan_path.append_saccade(data_ts, gaze_movement)
- # Append saccade to raw scan path
- new_step = raw_scan_path.append_saccade(data_ts, gaze_movement)
+ # Analyse scan path
+ if new_step and len(raw_scan_path) > 1:
- # Analyse scan path
- if new_step and len(raw_scan_path) > 1:
+ if enable_ck_analysis:
- raw_cK_analysis = raw_cK_analyzer.analyze(raw_scan_path)
+ raw_cK_analysis = raw_cK_analyzer.analyze(raw_scan_path)
- # Append saccade to aoi scan path
- aoi_scan_path.append_saccade(data_ts, gaze_movement)
+ # Append saccade to aoi scan path
+ aoi_scan_path.append_saccade(data_ts, gaze_movement)
# Unlock gaze movement exploitation
gaze_movement_lock.release()
@@ -183,108 +197,126 @@ def main():
# Lock gaze movement identification
gaze_movement_lock.acquire()
+ # Write heatmap help
+ on_off = 'on' if enable_heatmap else 'off'
+ enable_disable = 'disable' if enable_heatmap else 'enable'
+ cv2.putText(aoi_matrix, f'Heatmap: {on_off} (Press \'h\' key to {enable_disable})', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+
# Draw gaze spread heatmap
- aoi_matrix = cv2.addWeighted(heatmap_matrix, 0.5, aoi_matrix, 1., 0)
- #aoi_matrix = numpy.maximum(aoi_matrix, heatmap_matrix)
+ if enable_heatmap:
- # Write identification mode
- cv2.putText(aoi_matrix, f'Gaze movement identification mode: {identification_mode} (Press \'m\' key to switch)', (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
-
- # Check fixation identification
- if gaze_movement_identifier[identification_mode].current_fixation != None:
+ aoi_matrix = cv2.addWeighted(heatmap_matrix, 0.5, aoi_matrix, 1., 0)
- current_fixation = gaze_movement_identifier[identification_mode].current_fixation
+ else:
- # Draw looked AOI
- aoi_scene_projection.draw_circlecast(aoi_matrix, current_fixation.focus, current_fixation.deviation_max, base_color=(0, 0, 0), matching_color=(255, 255, 255))
+ # Write identification mode
+ cv2.putText(aoi_matrix, f'Gaze movement identification mode: {identification_mode} (Press \'m\' key to switch)', (20, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+
+ # Write TPM help
+ on_off = 'on' if enable_tpm_analysis else 'off'
+ display_hide = 'hide' if enable_tpm_analysis else 'display'
+ cv2.putText(aoi_matrix, f'Transition matrix probability: {on_off} (Press \'t\' key to {display_hide})', (20, 120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
- # Draw current fixation
- cv2.circle(aoi_matrix, (int(current_fixation.focus[0]), int(current_fixation.focus[1])), int(current_fixation.deviation_max), (255, 255, 255), len(current_fixation.positions))
-
- # Draw current fixation gaze positions
- gaze_positions = current_fixation.positions.copy()
- while len(gaze_positions) >= 2:
+ # Write cK help
+ on_off = 'on' if enable_ck_analysis else 'off'
+ display_hide = 'hide' if enable_ck_analysis else 'display'
+ cv2.putText(aoi_matrix, f'coefficient K: {on_off} (Press \'k\' key to {display_hide})', (20, 160), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
- ts_start, start_gaze_position = gaze_positions.pop_first()
- ts_next, next_gaze_position = gaze_positions.first
+ # Check fixation identification
+ if gaze_movement_identifier[identification_mode].current_fixation != None:
- # Draw start gaze
- start_gaze_position.draw(aoi_matrix, draw_precision=False)
+ current_fixation = gaze_movement_identifier[identification_mode].current_fixation
- # Draw movement from start to next
- cv2.line(aoi_matrix, start_gaze_position, next_gaze_position, (0, 55, 55), 1)
+ # Draw looked AOI
+ aoi_scene_projection.draw_circlecast(aoi_matrix, current_fixation.focus, current_fixation.deviation_max, base_color=(0, 0, 0), matching_color=(255, 255, 255))
- else:
+ # Draw current fixation
+ cv2.circle(aoi_matrix, (int(current_fixation.focus[0]), int(current_fixation.focus[1])), int(current_fixation.deviation_max), (255, 255, 255), len(current_fixation.positions))
+
+ # Draw current fixation gaze positions
+ gaze_positions = current_fixation.positions.copy()
+ while len(gaze_positions) >= 2:
+
+ ts_start, start_gaze_position = gaze_positions.pop_first()
+ ts_next, next_gaze_position = gaze_positions.first
+
+ # Draw start gaze
+ start_gaze_position.draw(aoi_matrix, draw_precision=False)
+
+ # Draw movement from start to next
+ cv2.line(aoi_matrix, start_gaze_position, next_gaze_position, (0, 55, 55), 1)
- # Draw pointer as gaze position
- gaze_position.draw(aoi_matrix, draw_precision=False)
+ else:
- # Draw AOI scene projection
- aoi_scene_projection.draw(aoi_matrix, color=(0, 0, 0))
+ # Draw pointer as gaze position
+ gaze_position.draw(aoi_matrix, draw_precision=False)
- # Check saccade identification
- if gaze_movement_identifier[identification_mode].current_saccade != None:
+ # Draw AOI scene projection
+ aoi_scene_projection.draw(aoi_matrix, color=(0, 0, 0))
- current_saccade = gaze_movement_identifier[identification_mode].current_saccade
+ # Check saccade identification
+ if gaze_movement_identifier[identification_mode].current_saccade != None:
- # Draw current saccade gaze positions
- gaze_positions = current_saccade.positions.copy()
- while len(gaze_positions) >= 2:
+ current_saccade = gaze_movement_identifier[identification_mode].current_saccade
- ts_start, start_gaze_position = gaze_positions.pop_first()
- ts_next, next_gaze_position = gaze_positions.first
+ # Draw current saccade gaze positions
+ gaze_positions = current_saccade.positions.copy()
+ while len(gaze_positions) >= 2:
- # Draw start gaze
- start_gaze_position.draw(aoi_matrix, draw_precision=False)
+ ts_start, start_gaze_position = gaze_positions.pop_first()
+ ts_next, next_gaze_position = gaze_positions.first
- # Draw movement from start to next
- cv2.line(aoi_matrix, start_gaze_position, next_gaze_position, (0, 0, 255), 1)
+ # Draw start gaze
+ start_gaze_position.draw(aoi_matrix, draw_precision=False)
- # Write last 5 steps of aoi scan path
- path = ''
- for step in aoi_scan_path[-5:]:
- path += f'> {step.aoi} '
- path += f'> {aoi_scan_path.current_aoi}'
+ # Draw movement from start to next
+ cv2.line(aoi_matrix, start_gaze_position, next_gaze_position, (0, 0, 255), 1)
- cv2.putText(aoi_matrix, path, (20, window_size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
+ # Write last 5 steps of aoi scan path
+ path = ''
+ for step in aoi_scan_path[-5:]:
+ path += f'> {step.aoi} '
+ path += f'> {aoi_scan_path.current_aoi}'
+
+ cv2.putText(aoi_matrix, path, (20, window_size[1]-40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 1, cv2.LINE_AA)
- # Draw transition probability matrix
- for from_aoi, column in tpm_analysis.items():
+ # Draw transition probability matrix
+ if enable_tpm_analysis:
- for to_aoi, probability in column.items():
+ for from_aoi, column in tpm_analysis.items():
- if from_aoi != to_aoi and probability > 0.0:
+ for to_aoi, probability in column.items():
- from_center = aoi_scene_projection[from_aoi].center.astype(int)
- to_center = aoi_scene_projection[to_aoi].center.astype(int)
- start_line = (0.5 * from_center + 0.5 * to_center).astype(int)
+ if from_aoi != to_aoi and probability > 0.0:
- color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55]
+ from_center = aoi_scene_projection[from_aoi].center.astype(int)
+ to_center = aoi_scene_projection[to_aoi].center.astype(int)
+ start_line = (0.5 * from_center + 0.5 * to_center).astype(int)
- cv2.line(aoi_matrix, start_line, to_center, color, int(probability*10) + 2)
- cv2.line(aoi_matrix, from_center, to_center, [55, 55, 55], 2)
+ color = [int(probability*200) + 55, int(probability*200) + 55, int(probability*200) + 55]
- # Write raw cK analysis
- if raw_cK_analysis < 0.:
+ cv2.line(aoi_matrix, start_line, to_center, color, int(probability*10) + 2)
+ cv2.line(aoi_matrix, from_center, to_center, [55, 55, 55], 2)
- cv2.putText(aoi_matrix, f'Raw: Ambient attention', (20, window_size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- elif raw_cK_analysis > 0.:
+ if enable_ck_analysis:
- cv2.putText(aoi_matrix, f'Raw: Focal attention', (20, window_size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
-
- # Write aoi cK analysis
- if aoi_cK_analysis < 0.:
+ # Write raw cK analysis
+ if raw_cK_analysis < 0.:
- cv2.putText(aoi_matrix, f'AOI: Ambient attention', (20, window_size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
-
- elif aoi_cK_analysis > 0.:
+ cv2.putText(aoi_matrix, f'Raw: Ambient attention', (20, window_size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+
+ elif raw_cK_analysis > 0.:
- cv2.putText(aoi_matrix, f'AOI: Focal attention', (20, window_size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
-
- ### TEST: GazePosition Heatmap
+ cv2.putText(aoi_matrix, f'Raw: Focal attention', (20, window_size[1]-120), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
+
+ # Write aoi cK analysis
+ if aoi_cK_analysis < 0.:
+
+ cv2.putText(aoi_matrix, f'AOI: Ambient attention', (20, window_size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 1, cv2.LINE_AA)
+
+ elif aoi_cK_analysis > 0.:
- ##############################
+ cv2.putText(aoi_matrix, f'AOI: Focal attention', (20, window_size[1]-80), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 1, cv2.LINE_AA)
# Unlock gaze movement identification
gaze_movement_lock.release()
@@ -294,8 +326,8 @@ def main():
key_pressed = cv2.waitKey(10)
- #if key_pressed != -1:
- # print(key_pressed)
+ if key_pressed != -1:
+ print(key_pressed)
# Switch identification mode with 'm' key
if key_pressed == 109:
@@ -304,6 +336,21 @@ def main():
current_index = mode_list.index(identification_mode) + 1
identification_mode = mode_list[current_index % len(mode_list)]
+ # Enable heatmap with 'h' key
+ if key_pressed == 104:
+
+ enable_heatmap = not enable_heatmap
+
+ # Enable cK analysis with 'k' key
+ if key_pressed == 107:
+
+ enable_ck_analysis = not enable_ck_analysis
+
+ # Enable TPM analysis with 't' key
+ if key_pressed == 116:
+
+ enable_tpm_analysis = not enable_tpm_analysis
+
# Stop calibration by pressing 'Esc' key
if cv2.waitKey(10) == 27:
break