aboutsummaryrefslogtreecommitdiff
path: root/utils/processTobiiRecords.sh
blob: 8ee57735c0942c4ac4e43cca84e9906ebf31d20b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
#! /bin/bash
#
# Load json ArGaze context file ($1) then, look for all Tobii record segments into input folder ($2).
#
# For each Tobii record segment found: 
# - context "segment" field is modified to bind on it,
# - context "pipeline" field is modified to bind on a temporary patched pipeline \ 
#	resulting from the merging of the given pipeline and an optionnal "patch.json" file in Tobii record segment,
# - then, ArGaze executes the modified context from output folder ($3) to export all records into it,
# - finally, context is resetted and temporary patched pipeline is removed.
#
# Arguments:
# $1: ArGaze context file
# $2: folder from where to look for Tobii records
# $3: folder where to export ArGaze processing outputs
#######################################

# Check required arguments
if [ "$#" -ne 3 ]; then
  echo "!!! Incorrect number of arguments"
  exit 1
fi

context_file=$1
input_folder=$2
output_folder=$3

# Check requirements:
if brew ls --versions jq > /dev/null; then
 	: # jq installed
else
	echo "*** Installing jq package"
 	brew install jq
fi

# Check context
ctx_folder="$(dirname "$context_file")"
ctx_class=$(jq "keys[0]" $context_file)
ctx_name=$(jq .$ctx_class.name $context_file)
ctx_segment=$(jq .$ctx_class.segment $context_file)
pipeline_file=$(jq -r .$ctx_class.pipeline $context_file)

echo "*** Loading $ctx_class context:"
echo "- Name: $ctx_name"

# Move to context folder
cd $ctx_folder

# Check pipeline
if [ -f "$pipeline_file" ]; then
	
	ppl_class=$(jq "keys[0]" $pipeline_file)
	ppl_name=$(jq .$ppl_class.name $pipeline_file)
	echo "- Pipeline: $ppl_name"

else 

	echo "!!! Missing $pipeline_file pipeline file"
	exit 1

fi

#######################################
# Process Tobii segment folder
# Arguments:
#   $1: Path to Tobii segment
#######################################
function process_segment() {

	local rec_id=$1
	local seg_folder=$2
	local seg_id=$(basename $seg_folder)
	local seg_length=$(jq .seg_length $seg_folder/segment.json)

	local ca_state=$(jq -r .ca_state $seg_folder/calibration.json)

	echo "- Segment $seg_id:"
	echo "	- Lenght: $seg_length"
	echo "	- Calibration: $ca_state"

	# Check patch
	patch_file="$seg_folder/patch.json"
	if [ -f "$patch_file" ]; then
		
		echo "+ Patch:"
		echo "$(jq . $patch_file)"

		# Edit temporary patched pipeline
		temp_file=".$rec_id-$seg_id.patch.json"
		echo "$(jq --tab -s ".[0] * .[1]" $pipeline_file $patch_file)" > "$ctx_folder/$temp_file"

		# Modify context pipeline
		echo "$(jq --tab ".$ctx_class.pipeline = \"$temp_file\"" $context_file)" > $context_file

		echo "*** $ctx_folder/$temp_file file created"
	fi

	# Modify context segment
	echo "$(jq --tab ".$ctx_class.segment = \"$seg_folder\"" $context_file)" > $context_file

	# Create segment output folder then, move into
	seg_output=$output_folder/$rec_id/segments/$seg_id
	mkdir -p $seg_output
	cd $seg_output

	# Launch argaze with modified context
	echo "*** ArGaze processing starts"

	python -m argaze $context_file

	echo "*** ArGaze processing ends"

	# Move back to context folder
	cd $ctx_folder

	# Reset context segment
	echo "$(jq --tab ".$ctx_class.segment = $ctx_segment" $context_file)" > $context_file

	# Check temporary pipeline
	if [ -f "$temp_file" ]; then

		# Delete temporary patched pipeline
		rm "$ctx_folder/$temp_file"

		# Reset context pipeline
		echo "$(jq --tab ".$ctx_class.pipeline = \"$pipeline_file\"" $context_file)" > $context_file
	
		echo "*** $ctx_folder/$temp_file file removed"
	fi
}

#######################################
# Process Tobii record folder
# Arguments:
#   $1: Path to Tobii record
#######################################
function process_record() {

	local rec_folder=$1
	local rec_id=$(basename $rec_folder)
	local rec_name=$(jq .rec_info.Name $rec_folder/recording.json)

	local pa_id=$(jq .pa_id $rec_folder/participant.json)
	local pa_name=$(jq .pa_info.Name $rec_folder/participant.json)

	echo "*** Loading record $rec_id:"
	echo "- Name: $rec_name"
	echo "- Participant:"
	echo "	- Id: $pa_id"
	echo "	- Name: $pa_name"

	for segment in $rec_folder/segments/*; do
		process_segment $rec_id "$rec_folder/segments/$(basename $segment)"
	done
}

echo "*** Looking for Tobii records into $2 folder"

# Check if the path is directly a path to a record segment
if [ -f "$input_folder/livedata.json.gz" ]; then

	process_record "$(dirname "$(dirname "$input_folder")")"

# Otherwise, look for all Tobii record segment
else

	for tobii_data in $input_folder/**/segments/*/livedata.json.gz; do
	    process_record "$(dirname "$(dirname "$(dirname "$tobii_data")")")"
	done

fi