... | ... |
@@ -0,0 +1,509 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+set -e |
|
4 |
+ |
|
5 |
+declare -A datatypes=( |
|
6 |
+ [/sim/description]=text |
|
7 |
+ [/sim/long-description]=text |
|
8 |
+ [/sim/author]=text |
|
9 |
+ [/sim/flight-model]=text |
|
10 |
+ [/sim/type]=text |
|
11 |
+ [/sim/model/path]=text |
|
12 |
+ [/sim/rating/FDM]="integer DEFAULT 0" |
|
13 |
+ [/sim/rating/systems]="integer DEFAULT 0" |
|
14 |
+ [/sim/rating/cockpit]="integer DEFAULT 0" |
|
15 |
+ [/sim/rating/model]="integer DEFAULT 0" |
|
16 |
+) |
|
17 |
+ |
|
18 |
+missing_data_check=( /sim/model/path ) |
|
19 |
+ |
|
20 |
+database=${DB:-$0.db} |
|
21 |
+test -r "$0.conf" && source "$0.conf" |
|
22 |
+ |
|
23 |
+#locale=fr |
|
24 |
+ |
|
25 |
+tempid=$(mktemp --dry-run XXXXXXX) |
|
26 |
+temppath=/dev/shm |
|
27 |
+ |
|
28 |
+setxml=$temppath/setxml-$tempid |
|
29 |
+json_file=$temppath/github_json-$tempid |
|
30 |
+in_ram_database=$temppath/${database##*/}-$tempid |
|
31 |
+ |
|
32 |
+function xmlgetnext () { |
|
33 |
+ local IFS='>' |
|
34 |
+ read -d '<' TAG VALUE |
|
35 |
+ # by design, the first TAG/VALUE pair is empty |
|
36 |
+ # to avoid infinite loops at end of file parsing we return an error |
|
37 |
+ # the next time we find an empty TAG |
|
38 |
+ if test -z "$TAG"; then |
|
39 |
+ test ${xmlgetnext_empty_tag:-0} -gt 0 && return 1 |
|
40 |
+ xmlgetnext_empty_tag=$(( xmlgetnext_empty_tag + 1 )) |
|
41 |
+ fi |
|
42 |
+ # process $TAG only if necessary |
|
43 |
+ local _TAG=$(printf '%q' $TAG) |
|
44 |
+ if test ${_TAG:0:1} = '$'; then |
|
45 |
+ TAG=$(tr '\n' ' ' <<< $TAG | sed 's/ */ /g; s/ *$//') |
|
46 |
+ fi |
|
47 |
+} |
|
48 |
+ |
|
49 |
+function json () { |
|
50 |
+ jq --raw-output "$1" < ${2:-${json_file:?}} |
|
51 |
+} |
|
52 |
+ |
|
53 |
+rm -f $temppath/sqlite_request |
|
54 |
+function sqlite_request () { |
|
55 |
+ echo -e "## REQ $(( ++sqlite_request_count ))\n${1}\n" >> $temppath/sqlite_request |
|
56 |
+ sqlite3 "$in_ram_database" <<< "$1" |
|
57 |
+} |
|
58 |
+ |
|
59 |
+function xmlremovecomments () { |
|
60 |
+ sed -ri 's/<(!--|script>)/\n&/;s/(<\/script|--)>/&\n/' $setxml |
|
61 |
+ sed -ri '/<(script>|!--).*(<\/script|--)>/d;/<(script>|!--)/,/(<\/script|--)>/d' $setxml |
|
62 |
+ sed -i 's/\xef\xbb\xbf//;s/\r//' $setxml # removes BOM and ^M |
|
63 |
+} |
|
64 |
+ |
|
65 |
+function trap_break () { |
|
66 |
+ trap '' INT |
|
67 |
+ echo "stop requested" |
|
68 |
+} |
|
69 |
+ |
|
70 |
+function trap_exit () { |
|
71 |
+ trapped_rc=$? |
|
72 |
+ trap '' INT |
|
73 |
+ |
|
74 |
+ if declare -f on_exit > /dev/null; then |
|
75 |
+ on_exit |
|
76 |
+ fi |
|
77 |
+ |
|
78 |
+ if test ! -e $in_ram_database; then |
|
79 |
+ exit |
|
80 |
+ fi |
|
81 |
+ echo "updating installation status" |
|
82 |
+ for ac in $(sqlite_request 'select printf("%i:%s/%s", aircrafts.id, aircrafts.name, setxml.file) |
|
83 |
+ from aircrafts inner join setxml |
|
84 |
+ where aircrafts.id = setxml.variantof and setxml.installed != 0;'); do |
|
85 |
+ ac_path=${ac#*:} |
|
86 |
+ if test ! -e ${hangar[path]}/$ac_path-set.xml; then |
|
87 |
+ sqlite_request "update setxml set installed = 0 where file = '${ac_path#*/}' and variantof = ${ac%:*}" |
|
88 |
+ fi |
|
89 |
+ done |
|
90 |
+ for ac in ${hangar[path]}/*/*-set.xml; do |
|
91 |
+ ac=${ac/${hangar[path]}} |
|
92 |
+ sx=${ac##*/} |
|
93 |
+ ac=${ac%/*} |
|
94 |
+ if test -d ${hangar[path]}/$ac/.svn; then |
|
95 |
+ install_type=1 |
|
96 |
+ elif test -d ${hangar[path]}/$ac/.git; then |
|
97 |
+ install_type=2 |
|
98 |
+ else |
|
99 |
+ install_type=3 |
|
100 |
+ fi |
|
101 |
+ sqlite_request "update setxml set installed = $install_type |
|
102 |
+ where exists ( |
|
103 |
+ select 1 |
|
104 |
+ from aircrafts |
|
105 |
+ where name = '${ac/\/}' and setxml.variantof = id |
|
106 |
+ )" |
|
107 |
+ done |
|
108 |
+ local missing_setxml=$(sqlite_request "select printf(' - %s (%s)', aircrafts.name, hangars.name) |
|
109 |
+ from aircrafts inner join hangars |
|
110 |
+ where hangars.id = aircrafts.hangar and aircrafts.id not in (select variantof from setxml)") |
|
111 |
+ if test -n "$missing_setxml"; then |
|
112 |
+ echo -e "missing setxml config for :\n$missing_setxml" |
|
113 |
+ fi |
|
114 |
+ |
|
115 |
+ for data_presence_check in ${missing_data_check[@]}; do |
|
116 |
+ if [[ -v datatypes[$data_presence_check] ]]; then |
|
117 |
+ local missing_data=$(sqlite_request "select count(setxml.file) |
|
118 |
+ from aircrafts inner join setxml |
|
119 |
+ where aircrafts.id = setxml.variantof and setxml.\`$data_presence_check\` = ''") |
|
120 |
+ if test $missing_data -gt 0; then |
|
121 |
+ echo "$missing_data aircrafts without $data_presence_check information" |
|
122 |
+ if test $missing_data -le 10; then |
|
123 |
+ echo "aircrafts without $data_presence_check information:" |
|
124 |
+ sqlite_request "select printf(' - %s/%s (%s)', aircrafts.name, setxml.file, hangars.name) |
|
125 |
+ from aircrafts inner join setxml, hangars |
|
126 |
+ where |
|
127 |
+ aircrafts.id = setxml.variantof |
|
128 |
+ and |
|
129 |
+ aircrafts.hangar = hangars.id |
|
130 |
+ and |
|
131 |
+ setxml.\`$data_presence_check\` = ''" |
|
132 |
+ fi |
|
133 |
+ fi |
|
134 |
+ fi |
|
135 |
+ done |
|
136 |
+ |
|
137 |
+ if test -r "$database" && md5sum $in_ram_database | sed "s,$in_ram_database,$database," | md5sum --status -c -; then |
|
138 |
+ echo "no changes in $database" |
|
139 |
+ elif test -w "$database"; then |
|
140 |
+ rm -f "$database" |
|
141 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
142 |
+ echo "database $database updated" |
|
143 |
+ elif test ! -e "$database" -a -w ${database%/*}; then |
|
144 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
145 |
+ echo "database $database created" |
|
146 |
+ else |
|
147 |
+ echo "nothing can be done with $database !" |
|
148 |
+ fi |
|
149 |
+ find $temppath -type f -name "*-$tempid" -delete |
|
150 |
+} |
|
151 |
+ |
|
152 |
+function update_database () { |
|
153 |
+ sqlite_request "insert into aircrafts (name, author, revision, date, hangar) |
|
154 |
+ select name, author, revision, date, hangar from recover_aircrafts |
|
155 |
+ where recover_aircrafts.name = '$ac' and recover_aircrafts.hangar = ${hangar[id]} |
|
156 |
+ on conflict (name, hangar) where aircrafts.name = '$ac' and aircrafts.hangar = ${hangar[id]} do |
|
157 |
+ update set |
|
158 |
+ author = (select author from recover_aircrafts where name = '$ac'), |
|
159 |
+ revision = (select revision from recover_aircrafts where name = '$ac'), |
|
160 |
+ date = (select date from recover_aircrafts where name = '$ac') |
|
161 |
+ where aircrafts.name = '$ac' and aircrafts.hangar = ${hangar[id]}" |
|
162 |
+ |
|
163 |
+ id=$(sqlite_request "select id from aircrafts where name is '${ac}' and hangar = ${hangar[id]}") |
|
164 |
+ |
|
165 |
+ echo $(sqlite_request "select printf('[ %i/%i ] $ac', count(sx), count(distinct ac)) from recover_setxml") |
|
166 |
+ |
|
167 |
+ for sx in $(sqlite_request "select distinct sx from recover_setxml where ac = '$ac'"); do |
|
168 |
+ unset data |
|
169 |
+ declare -A data |
|
170 |
+ |
|
171 |
+ printf " -> $sx" |
|
172 |
+ getfromrepo ${ac}/$sx-set.xml > $setxml |
|
173 |
+ |
|
174 |
+ unset xmlgetnext_empty_tag property include include_rootpath ac_save |
|
175 |
+ while xmlgetnext; do |
|
176 |
+ if [[ "$TAG" =~ ^"PropertyList include=" ]]; then |
|
177 |
+ include_rootpath=${include%/*} |
|
178 |
+ test $include = $include_rootpath && unset include_rootpath |
|
179 |
+ |
|
180 |
+ eval $(echo ${TAG#* }) # include="..." |
|
181 |
+ |
|
182 |
+ if [[ "$include" =~ ^Aircraft/Generic/ ]]; then |
|
183 |
+ unset include include_rootpath |
|
184 |
+ continue |
|
185 |
+ |
|
186 |
+ elif [[ "$include" =~ ^'../' ]]; then |
|
187 |
+ if test -n "$include_rootpath"; then |
|
188 |
+ if [[ "$include_rootpath" =~ '/' ]]; then |
|
189 |
+ include_rootpath=${include_rootpath%/*} |
|
190 |
+ else |
|
191 |
+ unset include_rootpath |
|
192 |
+ fi |
|
193 |
+ else |
|
194 |
+ ac_save=$ac |
|
195 |
+ unset ac |
|
196 |
+ fi |
|
197 |
+ include=${include/\.\.\/} |
|
198 |
+ fi |
|
199 |
+ getfromrepo ${ac}/${include_rootpath:+$include_rootpath/}$include >> $setxml |
|
200 |
+ fi |
|
201 |
+ done < $setxml |
|
202 |
+ |
|
203 |
+ test -n "$ac_save" && ac=$ac_save |
|
204 |
+ |
|
205 |
+# some aircrafts (mostly from the helijah's files architecture template) |
|
206 |
+# break because of infinite loop in middle of file |
|
207 |
+# I can't find the reason of this infinite loop |
|
208 |
+# this is the reason of this double-pass |
|
209 |
+ unset xmlgetnext_empty_tag property |
|
210 |
+ while xmlgetnext; do |
|
211 |
+ case "${TAG:0:1}" in |
|
212 |
+ ''|'?'|'!') |
|
213 |
+ continue;; |
|
214 |
+ /) |
|
215 |
+ property=${property%/*};; |
|
216 |
+ *) |
|
217 |
+ if test "${TAG: -1}" != '/'; then |
|
218 |
+ property+=/${TAG%% *} |
|
219 |
+ fi;; |
|
220 |
+ esac |
|
221 |
+ |
|
222 |
+ if [[ "$property" = /PropertyList@($data_pattern) ]]; then |
|
223 |
+ if test -z "${data[${property/\/PropertyList}]}"; then |
|
224 |
+ eval "data[${property/\/PropertyList}]=\"${VALUE//\"/\\\"}\"" |
|
225 |
+ data[${property/\/PropertyList}]=$(tr '\n' ' ' <<< ${data[${property/\/PropertyList}]} | sed -r 's/^\s*//;s/\s+/ /g;s/\s*$//') |
|
226 |
+ fi |
|
227 |
+ fi |
|
228 |
+ |
|
229 |
+ # continue parsing (while loop) until everything's found |
|
230 |
+ for col in ${!datatypes[@]}; do |
|
231 |
+ test -z "${data[$col]}" && continue 2 |
|
232 |
+ done |
|
233 |
+ break # everything's found |
|
234 |
+ done < $setxml |
|
235 |
+ |
|
236 |
+ if eval "test -z \"$data_test_null\""; then |
|
237 |
+ printf "\nWARNING: no info found, skipping\n" |
|
238 |
+ mkdir -p $temppath/no-data-ac |
|
239 |
+ cp -f $setxml $temppath/no-data-ac/${ac}-${sx} |
|
240 |
+ else |
|
241 |
+ insert_values="'$sx', $id, " |
|
242 |
+ insert_col='file, variantof, ' |
|
243 |
+ update_values='' |
|
244 |
+ for col in ${!data[@]}; do |
|
245 |
+ if test ${datatypes[$col]%% *} = 'text'; then |
|
246 |
+ single_quote="'" |
|
247 |
+ elif [[ ${datatypes[$col]%% *} = 'integer' && "${data[$col]// }" = +([0-9]) ]]; then |
|
248 |
+ single_quote="" |
|
249 |
+ else |
|
250 |
+ unset datatypes[$col] |
|
251 |
+ continue |
|
252 |
+ fi |
|
253 |
+ insert_col+="\`$col\`, " |
|
254 |
+ insert_values+="$single_quote${data[$col]//\'/\'\'}$single_quote, " |
|
255 |
+ if test -n "${data[$col]}"; then |
|
256 |
+ update_values+="\`$col\` = $single_quote${data[$col]//\'/\'\'}$single_quote, " |
|
257 |
+ fi |
|
258 |
+ done |
|
259 |
+ local flag_new= |
|
260 |
+ local flag_status= |
|
261 |
+ if test $(sqlite_request "select count(file) from setxml where file = '$sx' and variantof = $id") -eq 0; then |
|
262 |
+ flag_new="NEW" |
|
263 |
+ fi |
|
264 |
+ for criteria in FDM model systems cockpit; do |
|
265 |
+ if test ${data[/sim/rating/$criteria]:-0} -ge 4; then |
|
266 |
+ flag_status+='*' |
|
267 |
+ fi |
|
268 |
+ done |
|
269 |
+ if test -n "$flag_new" -o -n "$flag_status"; then |
|
270 |
+ printf " (${flag_new:+$flag_new }$flag_status)" |
|
271 |
+ fi |
|
272 |
+ printf "\n" |
|
273 |
+ sqlite_request "insert into setxml (${insert_col%,*}, installed) values (${insert_values%,*}, 0) |
|
274 |
+ on conflict (file, variantof) where file = '$sx' and variantof = $id do |
|
275 |
+ update set |
|
276 |
+ ${update_values%,*}, installed = 0 |
|
277 |
+ where |
|
278 |
+ file = '$sx' and variantof = $id" |
|
279 |
+ fi |
|
280 |
+ |
|
281 |
+ sqlite_request "delete from recover_setxml where ac = '$ac' and sx = '$sx'" |
|
282 |
+ done |
|
283 |
+} |
|
284 |
+ |
|
285 |
+function add_record () { |
|
286 |
+ ac_ass_array[$1]="$2" |
|
287 |
+} |
|
288 |
+ |
|
289 |
+function get_record () { |
|
290 |
+ if test -n "$1"; then |
|
291 |
+ echo "${ac_ass_array[$1]}" |
|
292 |
+ else |
|
293 |
+ for k in ${!ac_ass_array[@]}; do |
|
294 |
+ echo $k = ${ac_ass_array[$k]} |
|
295 |
+ done |
|
296 |
+ fi |
|
297 |
+} |
|
298 |
+ |
|
299 |
+function add_aircraft () { |
|
300 |
+ for key in name revision date author; do |
|
301 |
+ test -n "${ac_ass_array[$key]}" # exit if missing data (with the help of "set -e") |
|
302 |
+ done |
|
303 |
+ local new_revision=$(sqlite_request "select revision from recover_aircrafts |
|
304 |
+ where name = '${ac_ass_array[name]}'") |
|
305 |
+ if test -z "${new_revision}"; then |
|
306 |
+ sqlite_request "insert into recover_aircrafts (name, revision, date, author, hangar) |
|
307 |
+ values ( |
|
308 |
+ '${ac_ass_array[name]}', |
|
309 |
+ ${ac_ass_array[revision]}, |
|
310 |
+ ${ac_ass_array[date]}, |
|
311 |
+ '${ac_ass_array[author]}', |
|
312 |
+ ${hangar[id]})" |
|
313 |
+ elif test ${new_revision} -lt ${ac_ass_array[revision]//\"}; then |
|
314 |
+ sqlite_request "update recover_aircrafts |
|
315 |
+ set |
|
316 |
+ revision = ${ac_ass_array[revision]}, |
|
317 |
+ date = ${ac_ass_array[date]}, |
|
318 |
+ author = '${ac_ass_array[author]}', |
|
319 |
+ hangar = ${hangar[id]} |
|
320 |
+ where name = '${ac_ass_array[name]}'" |
|
321 |
+ fi |
|
322 |
+ for key in name revision date author; do |
|
323 |
+ ac_ass_array[$key]='' |
|
324 |
+ done |
|
325 |
+} |
|
326 |
+ |
|
327 |
+function add_setxml_for_aircraft () { |
|
328 |
+ sqlite_request "insert into recover_setxml values ('$1', '${2/%-set.xml}') |
|
329 |
+ on conflict (ac, sx) where ac = '$1' and sx = '${2/%-set.xml}' |
|
330 |
+ do nothing" |
|
331 |
+} |
|
332 |
+ |
|
333 |
+function apply_revision () { |
|
334 |
+ for ac in $(sqlite_request "select name from recover_aircrafts"); do |
|
335 |
+ # delete aircrafts that have been deleted from the repo |
|
336 |
+ sqlite_request "delete from setxml |
|
337 |
+ where (file, variantof) in ( |
|
338 |
+ select file, variantof from setxml |
|
339 |
+ inner join aircrafts |
|
340 |
+ where aircrafts.id = setxml.variantof |
|
341 |
+ and aircrafts.name = '$ac' |
|
342 |
+ and aircrafts.hangar = ${hangar[id]} |
|
343 |
+ and setxml.file not in ( |
|
344 |
+ select sx from recover_setxml where ac = '$ac' |
|
345 |
+ ) |
|
346 |
+ )" |
|
347 |
+ |
|
348 |
+ # delete aircrafts without setxml found |
|
349 |
+ sqlite_request "delete from recover_aircrafts |
|
350 |
+ where name not in (select distinct ac from recover_setxml)" |
|
351 |
+ |
|
352 |
+ update_database |
|
353 |
+ if test -d ${hangar[path]}/${ac}/.${hangar[type]} \ |
|
354 |
+ && \ |
|
355 |
+ case ${hangar[type]} in |
|
356 |
+ svn) test "$(svn info --show-item=url ${hangar[path]}/${ac})" != "${hangar[url]}/${ac}";; |
|
357 |
+ git) test "$(git -C ${hangar[path]}/${ac} config --get remote.origin.url)" != "${hangar[url]}/${ac}.git";; |
|
358 |
+ esac \ |
|
359 |
+ || test -d ${hangar[path]}/${ac} -a ! -d ${hangar[path]}/${ac}/.${hangar[type]}; then |
|
360 |
+ echo "INFO: local ${ac} installed out from repo" >&2 |
|
361 |
+ fi |
|
362 |
+ sqlite_request "delete from recover_aircrafts where name = '$ac'" |
|
363 |
+ done |
|
364 |
+} |
|
365 |
+ |
|
366 |
+trap trap_break INT |
|
367 |
+trap trap_exit EXIT |
|
368 |
+ |
|
369 |
+stty -echoctl |
|
370 |
+ |
|
371 |
+declare -A hangar |
|
372 |
+data_pattern=$(printf "%s|" ${!datatypes[@]}) |
|
373 |
+data_pattern=${data_pattern:0:-1} |
|
374 |
+data_test_null=$(printf '${data[%s]}' ${!datatypes[@]}) |
|
375 |
+ |
|
376 |
+if test -e $database; then |
|
377 |
+ cp $database $in_ram_database |
|
378 |
+ |
|
379 |
+ sql_cols=$(sqlite_request "pragma table_info(setxml)" | awk -F'|' '{printf("%s %s ", $2, $3)}') |
|
380 |
+ script_cols="file text variantof integer " |
|
381 |
+ for col in ${!datatypes[@]}; do |
|
382 |
+ script_cols+="$col ${datatypes["$col"]%% *} " |
|
383 |
+ done |
|
384 |
+ script_cols+="installed integer " # last space is important |
|
385 |
+ if test "$sql_cols" != "$script_cols"; then |
|
386 |
+ echo "ALERT: datbase version mismatch !" |
|
387 |
+ exit 1 |
|
388 |
+ fi |
|
389 |
+ if sqlite_request '.tables' | grep -q 'recover_'; then |
|
390 |
+ echo "recovering from previous saved state" |
|
391 |
+ hangar[id]=$(sqlite_request "select hangar from recover_aircrafts limit 1") |
|
392 |
+ eval $(sqlite_request "select printf('hangar[name]=%s;hangar[url]=%s;hangar[type]=%s;hangar[source]=%s', |
|
393 |
+ name, url, type, source) |
|
394 |
+ from hangars |
|
395 |
+ where id = '${hangar[id]}'") |
|
396 |
+ source $(grep -l "^\s*hangar\[name\]=${hangar[name]}\s*$" ${0%*/}.d/*.hangar) |
|
397 |
+ eval "getfromrepo () {$(declare -f getfromrepo | sed '1,2d;$d'); xmlremovecomments;}" |
|
398 |
+ apply_revision |
|
399 |
+ exit |
|
400 |
+ fi |
|
401 |
+fi |
|
402 |
+ |
|
403 |
+sqlite_request "create table if not exists hangars ( |
|
404 |
+ id integer primary key, |
|
405 |
+ name text, |
|
406 |
+ source text, |
|
407 |
+ type text, |
|
408 |
+ url text, |
|
409 |
+ path text, |
|
410 |
+ active integer)" |
|
411 |
+ |
|
412 |
+sqlite_request 'create unique index if not exists "index_hangars" on hangars (url)' |
|
413 |
+ |
|
414 |
+sqlite_request "create table if not exists aircrafts ( |
|
415 |
+ id integer primary key, |
|
416 |
+ name text, |
|
417 |
+ revision integer, |
|
418 |
+ date integer, |
|
419 |
+ author text, |
|
420 |
+ hangar integer)" |
|
421 |
+ |
|
422 |
+sqlite_request 'create unique index if not exists "index_aircrafts" on aircrafts (name, hangar)' |
|
423 |
+ |
|
424 |
+sqlite_request "create table if not exists setxml ( |
|
425 |
+ file text, |
|
426 |
+ variantof integer, |
|
427 |
+ $(for col in ${!datatypes[@]}; do printf "'%s' %s, " $col "${datatypes[$col]}"; done) |
|
428 |
+ installed integer)" |
|
429 |
+ |
|
430 |
+sqlite_request 'create unique index if not exists "index_setxml" on setxml (file, variantof)' |
|
431 |
+ |
|
432 |
+for file in $(find ${0%*/}.d -type f -name "*.hangar"); do |
|
433 |
+ unset hangar error_message |
|
434 |
+ unset -f getfromrepo parse_repo_history |
|
435 |
+ declare -A hangar |
|
436 |
+ source $file |
|
437 |
+ |
|
438 |
+ test -n "${hangar[name]}" \ |
|
439 |
+ -a -n "${hangar[source]}" \ |
|
440 |
+ -a -n "${hangar[type]}" \ |
|
441 |
+ -a -n "${hangar[url]}" \ |
|
442 |
+ -a -n "${hangar[active]}" || \ |
|
443 |
+ error_message="${error_message:+$error_message, }missing hangar data" |
|
444 |
+ |
|
445 |
+ declare -f getfromrepo > /dev/null || \ |
|
446 |
+ error_message="${error_message:+$error_message, }missing getfromrepo function" |
|
447 |
+ |
|
448 |
+ declare -f parse_repo_history > /dev/null || \ |
|
449 |
+ error_message="${error_message:+$error_message, }missing parse_repo_history function" |
|
450 |
+ |
|
451 |
+ if test -n "$error_message"; then |
|
452 |
+ echo "file $file isn't a valid hangar ($error_message)" |
|
453 |
+ continue |
|
454 |
+ fi |
|
455 |
+ |
|
456 |
+ sqlite_request "insert into hangars (name, source, type, url, path, active) |
|
457 |
+ values ( |
|
458 |
+ '${hangar[name]}', '${hangar[source]}', '${hangar[type]}', |
|
459 |
+ '${hangar[url]}', '${hangar[path]}', ${hangar[active]}) |
|
460 |
+ on conflict (url) where url = '${hangar[url]}' do |
|
461 |
+ update set |
|
462 |
+ name = '${hangar[name]}', |
|
463 |
+ path = '${hangar[path]}', |
|
464 |
+ active = ${hangar[active]} |
|
465 |
+ where url = '${hangar[url]}'" |
|
466 |
+done |
|
467 |
+ |
|
468 |
+unset hangar |
|
469 |
+unset -f getfromrepo parse_repo_history |
|
470 |
+declare -A hangar ac_ass_array |
|
471 |
+for h_id in $(sqlite_request "select id from hangars where active = 1"); do |
|
472 |
+ |
|
473 |
+ sqlite_request 'create table if not exists recover_aircrafts ( |
|
474 |
+ name text, |
|
475 |
+ revision integer, |
|
476 |
+ date integer, |
|
477 |
+ author text, |
|
478 |
+ hangar integer)' |
|
479 |
+ |
|
480 |
+ sqlite_request 'create table if not exists recover_setxml ( |
|
481 |
+ ac text, |
|
482 |
+ sx text)' |
|
483 |
+ |
|
484 |
+ sqlite_request 'create unique index if not exists "index_recover_setxml" on recover_setxml (ac, sx)' |
|
485 |
+ |
|
486 |
+ eval $(sqlite_request "select printf('hangar[id]=%i;hangar[source]=%s;', id, source) |
|
487 |
+ from hangars |
|
488 |
+ where id = '${h_id}'") |
|
489 |
+ |
|
490 |
+ source $(grep -l "^\s*hangar\[source\]=${hangar[source]}\s*$" ${0%*/}.d/*.hangar) |
|
491 |
+ |
|
492 |
+ eval "getfromrepo () {$(declare -f getfromrepo | sed '1,2d;$d'); xmlremovecomments;}" |
|
493 |
+ |
|
494 |
+ echo -e "=${hangar[name]//?/=}=\n ${hangar[name]} \n=${hangar[name]//?/=}=" |
|
495 |
+ |
|
496 |
+ latest_revision=$(( $(sqlite_request "select max(revision) |
|
497 |
+ from aircrafts inner join hangars |
|
498 |
+ where hangars.id = aircrafts.hangar and hangars.name = '${hangar[name]}'") + 1 )) |
|
499 |
+ |
|
500 |
+ parse_repo_history |
|
501 |
+ |
|
502 |
+ if declare -f on_exit > /dev/null; then |
|
503 |
+ on_exit |
|
504 |
+ fi |
|
505 |
+ sqlite_request "drop index 'index_recover_setxml'" |
|
506 |
+ sqlite_request "drop table recover_aircrafts" |
|
507 |
+ sqlite_request "drop table recover_setxml" |
|
508 |
+done |
|
509 |
+ |
... | ... |
@@ -0,0 +1,78 @@ |
1 |
+Files with name endind by .hangar are sourced (bash syntax). |
|
2 |
+Please have a look into the two provided to check how it is done |
|
3 |
+ |
|
4 |
+Each file must contains: |
|
5 |
+- variable `hangar` definition as an associative array with at least the |
|
6 |
+ following keys: |
|
7 |
+ name: the human identifier of the hangar |
|
8 |
+ url: the url of the remote repository this is the unique identifier for |
|
9 |
+ each hangar, can't be the same in more than one hangar) |
|
10 |
+ type: the type of repo (git, svn, ...) at the moment, only git and svn |
|
11 |
+ repos are known and verified by installation status routine |
|
12 |
+ path: the local path in which the remote repo (or part of it) is cloned |
|
13 |
+ active: is the hangar active (1) or not (0) |
|
14 |
+ |
|
15 |
+- function `parse_repo_history` which describes how to get the hangar content |
|
16 |
+ (initial import and updates) |
|
17 |
+- function `getfromrepo` which describes how to get |
|
18 |
+- optional function `on_exit` to describe what to do when exiting the hangar |
|
19 |
+ |
|
20 |
+Some functions are provided by the main script to ease the database management: |
|
21 |
+- add_record <key> <value> |
|
22 |
+ this function record the key with the value, these |
|
23 |
+ keys are intended to be information for aircraft. |
|
24 |
+ Mandatory keys are: |
|
25 |
+ name : the name of the aircraft |
|
26 |
+ revision : the revision from the repo |
|
27 |
+ date : date of the last update |
|
28 |
+ author : author of the commit |
|
29 |
+ |
|
30 |
+- get_record [key] |
|
31 |
+ returns the value recorded for the key if no key is |
|
32 |
+ provided, prints all the recorded keys in the form: |
|
33 |
+ key1 = value1 |
|
34 |
+ key2 = value2 |
|
35 |
+ ... (maybe useful for debugging) |
|
36 |
+ |
|
37 |
+- add_aircraft |
|
38 |
+ adds aircraft in the database in a buffer table, ready to be |
|
39 |
+ integrated into the main aircrafts table by using `apply_revision` function. |
|
40 |
+ If one or more of the mandatory keys as decribed in `add_record` is or are |
|
41 |
+ missing, the function may exit as an error and the the whole script exits |
|
42 |
+ |
|
43 |
+- add_setxml_for_aircraft <aircraft> <-set.xml file> |
|
44 |
+ add in special buffer table the -set.xml entry for aircraft |
|
45 |
+ the trailing "-set.xml" is removed if found |
|
46 |
+ |
|
47 |
+- xmlgetnext |
|
48 |
+ in a while loop to read the XML content of a file, export $TAG |
|
49 |
+ (formatted) and $VALUE (not formatted) By design the first couple TAG/VALUE is |
|
50 |
+ always empty, some files have an unidentified issue that make enter teh while |
|
51 |
+ loop in an infinite loop. To avoid this, please use the following syntax: |
|
52 |
+ |
|
53 |
+ unset xmlgetnext_empty_tag |
|
54 |
+ while xmlgetnext; do |
|
55 |
+ # PUT HERE YOUR STUFF |
|
56 |
+ done < /your/xml/file |
|
57 |
+ |
|
58 |
+- sqlite_request <SQLite request> |
|
59 |
+ perform the request on database (actually in |
|
60 |
+ a copied database which will be dumped into the original at the end of |
|
61 |
+ the script). |
|
62 |
+ Don't touch the original database, and always use this |
|
63 |
+ wrapper, unless you will lose your changes at the end. |
|
64 |
+ Moreover this wrapper write in a file /dev/shm/sqlite_request all the |
|
65 |
+ requests, so it is useful to debug, or just watch what is doing |
|
66 |
+ |
|
67 |
+- apply_revision |
|
68 |
+ use the buffered tables to feed the main tables with all the |
|
69 |
+ information it will find by parsing the new or updated aircrafts config files |
|
70 |
+ |
|
71 |
+Some variables are available |
|
72 |
+- $latest_revision the revision to starts the history remote retrieving, |
|
73 |
+ defaults to 1 |
|
74 |
+ |
|
75 |
+- $tempid a single identifer to create temporary files (useful to get all the |
|
76 |
+ files generated by the script ending with the same ID) |
|
77 |
+ |
|
78 |
+Enjoy adding your preferred hangar :) |
... | ... |
@@ -0,0 +1,127 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+hangar[path]=$HOME/.fgfs/flightgear-fgaddon/Aircraft |
|
4 |
+ |
|
5 |
+hangar[name]=FGADDON |
|
6 |
+hangar[source]=sourceforge |
|
7 |
+hangar[type]=svn |
|
8 |
+hangar[url]=https://svn.code.sf.net/p/flightgear/fgaddon/trunk/Aircraft |
|
9 |
+hangar[active]=1 |
|
10 |
+ |
|
11 |
+function getfromrepo () { |
|
12 |
+ svn cat ${hangar[url]}/$1 |
|
13 |
+} |
|
14 |
+ |
|
15 |
+function on_exit () { |
|
16 |
+ rm -f $aircrafts $aircraft |
|
17 |
+} |
|
18 |
+ |
|
19 |
+function parse_repo_history () { |
|
20 |
+ function getaircraftinfo () { # $1 = aircraft |
|
21 |
+ svn list --xml --depth files ${hangar[url]}/$1 > $aircraft |
|
22 |
+ unset xmlgetnext_empty_tag |
|
23 |
+ while xmlgetnext; do |
|
24 |
+ if test "$TAG" = 'name' && test "${VALUE/%-set.xml}" != "$VALUE"; then |
|
25 |
+ add_setxml_for_aircraft $1 ${VALUE/%-set.xml} |
|
26 |
+ fi |
|
27 |
+ done < $aircraft |
|
28 |
+ } |
|
29 |
+ |
|
30 |
+ aircrafts=$temppath/Aircraft-$tempid |
|
31 |
+ aircraft=$temppath/aircraft-$tempid |
|
32 |
+ |
|
33 |
+ if test $latest_revision -eq 1; then |
|
34 |
+ echo getting repository list |
|
35 |
+ if ! svn list --xml --depth immediates ${hangar[url]} > $aircrafts; then |
|
36 |
+ echo "error while retrieving list" |
|
37 |
+ exit |
|
38 |
+ fi |
|
39 |
+ total=$(grep -c '<entry' $aircrafts) |
|
40 |
+ is_ac=0 |
|
41 |
+ else |
|
42 |
+ if test ${latest_revision:-0} -gt $(svn info --show-item revision ${hangar[url]}); then |
|
43 |
+ echo "already latest revisison" |
|
44 |
+ return |
|
45 |
+ fi |
|
46 |
+ echo "downloading history from revision ${latest_revision:-0}" |
|
47 |
+ if ! svn log --revision ${latest_revision:-0}:HEAD --xml --verbose ${hangar[url]} > $aircrafts; then |
|
48 |
+ echo "error while retrieving history" |
|
49 |
+ exit |
|
50 |
+ fi |
|
51 |
+ total=$(grep -c '<logentry' $aircrafts) |
|
52 |
+ fi |
|
53 |
+ |
|
54 |
+ progress=0 |
|
55 |
+ |
|
56 |
+ echo parsing repository |
|
57 |
+ |
|
58 |
+ unset xmlgetnext_empty_tag |
|
59 |
+ while xmlgetnext; do |
|
60 |
+ |
|
61 |
+ if test $latest_revision -eq 1; then |
|
62 |
+ if test "$TAG" = 'entry kind="dir"'; then |
|
63 |
+ is_ac=1 |
|
64 |
+ continue |
|
65 |
+ elif test $is_ac -eq 0 -a "$TAG" != '/list'; then |
|
66 |
+ continue |
|
67 |
+ fi |
|
68 |
+ else |
|
69 |
+ if test "${TAG%% *}" = 'logentry'; then |
|
70 |
+ is_ac=1 |
|
71 |
+ elif test ${is_ac:-0} -eq 0 -a "$TAG" != '/log'; then |
|
72 |
+ continue |
|
73 |
+ fi |
|
74 |
+ fi |
|
75 |
+ |
|
76 |
+ case "$TAG" in |
|
77 |
+ 'name') |
|
78 |
+ add_record name $VALUE |
|
79 |
+ ;; |
|
80 |
+ 'logentry revision='*|'commit revision='*) |
|
81 |
+ add_record revision ${TAG#*=} |
|
82 |
+ ;; |
|
83 |
+ 'author') |
|
84 |
+ add_record author ${VALUE//\'/\'\'} |
|
85 |
+ ;; |
|
86 |
+ 'date') |
|
87 |
+ add_record date $(date +%s -d "$VALUE") |
|
88 |
+ ;; |
|
89 |
+ 'path '*) |
|
90 |
+ TAG=${TAG#* } |
|
91 |
+ TAG=${TAG// /;} |
|
92 |
+ TAG=${TAG//-/_} |
|
93 |
+ eval $(echo ${TAG// /;}) |
|
94 |
+ path=(${VALUE//\// }) |
|
95 |
+ if test $kind = 'dir' -a ${#path[@]} -eq 3 -a $action = 'D'; then |
|
96 |
+ sqlite_request "delete from setxml |
|
97 |
+ where variantof in ( |
|
98 |
+ select id from aircrafts |
|
99 |
+ where name = '${path[2]}' |
|
100 |
+ and hangar = ${hangar[id]} |
|
101 |
+ )" |
|
102 |
+ sqlite_request "delete from aircrafts |
|
103 |
+ where name = '${path[2]}' |
|
104 |
+ and hangar = ${hangar[id]}" |
|
105 |
+ is_ac=0 |
|
106 |
+ continue |
|
107 |
+ fi |
|
108 |
+ is_ac=1 |
|
109 |
+ add_record name ${path[2]} |
|
110 |
+ ;; |
|
111 |
+ '/logentry'|'/entry') |
|
112 |
+ getaircraftinfo $(get_record name) |
|
113 |
+ add_aircraft |
|
114 |
+ newprogress=$((++entry * 100 / $total)) |
|
115 |
+ if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then |
|
116 |
+ progress=$newprogress |
|
117 |
+ echo "$progress% ($(sqlite_request 'select count(name) from recover_aircrafts'))" |
|
118 |
+ fi |
|
119 |
+ is_ac=0 |
|
120 |
+ ;; |
|
121 |
+ '/list'|'/log') |
|
122 |
+ apply_revision |
|
123 |
+ break |
|
124 |
+ ;; |
|
125 |
+ esac |
|
126 |
+ done < $aircrafts |
|
127 |
+} |
... | ... |
@@ -0,0 +1,103 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+hangar[path]=$HOME/.fgfs/flightgear-fgaddon/Aircraft |
|
4 |
+ |
|
5 |
+hangar[name]=FGMEMBERS |
|
6 |
+hangar[source]=github |
|
7 |
+hangar[type]=git |
|
8 |
+hangar[url]=https://github.com/FGMEMBERS |
|
9 |
+hangar[active]=1 |
|
10 |
+ |
|
11 |
+function getfromrepo () { |
|
12 |
+ curl -s https://raw.githubusercontent.com/FGMEMBERS/${1%%/*}/master/${1#*/} |
|
13 |
+} |
|
14 |
+ |
|
15 |
+function on_exit () { |
|
16 |
+ rm -f $gh_curl_content $json_file $contents |
|
17 |
+ unset contents gh_curl_content |
|
18 |
+} |
|
19 |
+ |
|
20 |
+function parse_repo_history () { |
|
21 |
+ function github_curl () { |
|
22 |
+ test $# -eq 0 && return $(test -n "$githup_token") |
|
23 |
+ curl ${githup_token:+-u $githup_token} -si $1 > $gh_curl_content |
|
24 |
+ eval $(sed -rn '1,/^\s*$/{s/^X-Ratelimit-Remaining:\s*([0-9]+).*$/remaining=\1/ip;s/^X-Ratelimit-Reset:\s*([0-9]+).*$/reset=\1/ip}' $gh_curl_content) |
|
25 |
+ if test ${remaining:-1} -eq 0; then |
|
26 |
+ ((reset = reset + 10)) # just to be prudent |
|
27 |
+ echo "github API limit: waiting $(date +%H:%M -d@$reset) to continue" >&2 |
|
28 |
+ if [[ $1 =~ '/contents/'$ ]]; then |
|
29 |
+ echo "process already found data so far" >&2 |
|
30 |
+ apply_revision >&2 |
|
31 |
+ fi |
|
32 |
+ sleep $(( $reset - $(date +%s) )) |
|
33 |
+ curl ${githup_token:+-u $githup_token} -s $1 # not sure that was really fetched |
|
34 |
+ else |
|
35 |
+ sed -n '/^\s*$/,$p' $gh_curl_content |
|
36 |
+ fi |
|
37 |
+ } |
|
38 |
+ |
|
39 |
+ gh_curl_content=$temppath/github-$tempid |
|
40 |
+ contents=$temppath/contents-$tempid |
|
41 |
+ github_orga_repos='https://api.github.com/orgs/FGMEMBERS/repos?sort=updated&type=all&per_page=100&page=_page_' |
|
42 |
+ page=1 |
|
43 |
+ |
|
44 |
+ echo '[' > ${json_file}_recent |
|
45 |
+ |
|
46 |
+ while github_curl "${github_orga_repos/_page_/$((page++))}" > $json_file; do |
|
47 |
+ jq_length=$(json 'length') |
|
48 |
+ test $jq_length -eq 0 && break |
|
49 |
+ |
|
50 |
+ for ((i = 0; i < $jq_length; i++)); do |
|
51 |
+ if test $(date +%s -d $(json ".[$i].updated_at")) -gt $latest_revision; then |
|
52 |
+ json ".[$i]" >> ${json_file}_recent |
|
53 |
+ echo ',' >> ${json_file}_recent |
|
54 |
+ else |
|
55 |
+ break 2 |
|
56 |
+ fi |
|
57 |
+ done |
|
58 |
+ done |
|
59 |
+ |
|
60 |
+ sed -i '${/^,/d}' ${json_file}_recent |
|
61 |
+ echo ']' >> ${json_file}_recent |
|
62 |
+ |
|
63 |
+ mv -f ${json_file}_recent $json_file |
|
64 |
+ jq_length=$(json 'length') |
|
65 |
+ local progress=0 |
|
66 |
+ local repo |
|
67 |
+ |
|
68 |
+ if test $latest_revision -eq 1; then |
|
69 |
+ if github_curl; then |
|
70 |
+ max_requests_per_hour=5000 |
|
71 |
+ else |
|
72 |
+ max_requests_per_hour=60 |
|
73 |
+ fi |
|
74 |
+ echo "the initial import may take more than $(($jq_length / $max_requests_per_hour)) hours to perform" |
|
75 |
+ fi |
|
76 |
+ |
|
77 |
+ for ((i = 0; i < $jq_length; i++)); do |
|
78 |
+ local repo=$(json ".[$i].name") |
|
79 |
+ add_record name $repo |
|
80 |
+ |
|
81 |
+ github_curl "https://api.github.com/repos/FGMEMBERS/${repo}/contents/" > $contents |
|
82 |
+ for sx in $(json '.[] | select(.type == "file") | .path | capture("(?<setxml>.+)-set.xml") | .setxml' $contents); do |
|
83 |
+ add_setxml_for_aircraft $repo $sx |
|
84 |
+ done |
|
85 |
+ |
|
86 |
+ if test -n "$sx"; then |
|
87 |
+ add_record revision $(date +%s -d $(json ".[$i].updated_at")) |
|
88 |
+ add_record date $(date +%s -d $(json ".[$i].updated_at")) |
|
89 |
+ add_record author ${hangar[name]} |
|
90 |
+ add_aircraft |
|
91 |
+ else |
|
92 |
+ sqlite_request "delete from aircrafts where name = '$repo' and hangar = ${hangar[i]}" |
|
93 |
+ fi |
|
94 |
+ |
|
95 |
+ newprogress=$((i * 100 / $jq_length)) |
|
96 |
+ if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then |
|
97 |
+ progress=$newprogress |
|
98 |
+ echo "$progress% ($(sqlite_request 'select count(name) from recover_aircrafts'))" |
|
99 |
+ fi |
|
100 |
+ done |
|
101 |
+ |
|
102 |
+ apply_revision |
|
103 |
+} |
... | ... |
@@ -0,0 +1,69 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+source_path=${1:-"$HOME/scripts/flightgear/source"} |
|
4 |
+ |
|
5 |
+function xmlgetnext () { |
|
6 |
+ local IFS='>' |
|
7 |
+ read -d '<' TAG VALUE |
|
8 |
+ # by design, the first TAG/VALUE pair is empty |
|
9 |
+ # to avoid infinite loops at end of file parsing we return an error |
|
10 |
+ # the next time we find an empty TAG |
|
11 |
+ if test -z "$TAG"; then |
|
12 |
+ test ${xmlgetnext_firstentry:-1} -eq 1 && xmlgetnext_firstentry=0 || return 1; |
|
13 |
+ fi |
|
14 |
+ # process $TAG only if necessary |
|
15 |
+ local _TAG=$(printf '%q' $TAG) |
|
16 |
+ if test ${_TAG:0:1} = '$'; then |
|
17 |
+ TAG=$(tr '\n' ' ' <<< $TAG | sed 's/ */ /g; s/ *$//') |
|
18 |
+ fi |
|
19 |
+} |
|
20 |
+ |
|
21 |
+exit_trap () { |
|
22 |
+ rm $apixml |
|
23 |
+} |
|
24 |
+ |
|
25 |
+function showRSS () { |
|
26 |
+ echo -e "${1^^}\n${1//[[:alnum:]]/-}" |
|
27 |
+ revparse=$(git -C "$source_path/$1" rev-parse HEAD) |
|
28 |
+ wget -qO- https://sourceforge.net/p/flightgear/$1/feed | while xmlgetnext; do |
|
29 |
+ case "${TAG:0:1}" in |
|
30 |
+ ''|'?'|'!') |
|
31 |
+ continue;; |
|
32 |
+ /) |
|
33 |
+ property=${property%/*};; |
|
34 |
+ *) |
|
35 |
+ if test "${TAG: -1}" != '/'; then |
|
36 |
+ property+=/${TAG%% *} |
|
37 |
+ fi;; |
|
38 |
+ esac |
|
39 |
+ |
|
40 |
+ case "$property" in |
|
41 |
+ '/rss/channel/item/title') |
|
42 |
+ title=$VALUE |
|
43 |
+ ;; |
|
44 |
+ '/rss/channel/item/link') |
|
45 |
+ sha1=${VALUE/https:\/\/sourceforge.net\/p\/flightgear\/${1}\/ci\/} |
|
46 |
+ ;; |
|
47 |
+ '/rss/channel/item/pubDate') |
|
48 |
+ pubDate=$(date +'%e %B' -d "$VALUE") |
|
49 |
+ ;; |
|
50 |
+ esac |
|
51 |
+ |
|
52 |
+ if test "$TAG" = "/item"; then |
|
53 |
+ if test $revparse/ = $sha1; then |
|
54 |
+ title="* ${title}" |
|
55 |
+ else |
|
56 |
+ title=" ${title}" |
|
57 |
+ fi |
|
58 |
+ echo "$title (${pubDate/# })" |
|
59 |
+ fi |
|
60 |
+ done |
|
61 |
+} |
|
62 |
+ |
|
63 |
+cat << EOF |
|
64 |
+$(showRSS simgear) |
|
65 |
+ |
|
66 |
+$(showRSS flightgear) |
|
67 |
+ |
|
68 |
+$(showRSS fgdata) |
|
69 |
+EOF |
... | ... |
@@ -0,0 +1,525 @@ |
1 |
+#!/bin/zsh |
|
2 |
+ |
|
3 |
+#FGDIR=$HOME/scripts/flightgear |
|
4 |
+#FGADDON=$HOME/.fgfs/flightgear-fgaddon |
|
5 |
+function fgfs () { |
|
6 |
+ local fgfs_source=$FGDIR/source |
|
7 |
+ local fgfs_build=$FGDIR/build |
|
8 |
+ local fgfs_install=$FGDIR/install |
|
9 |
+ function update_fg () { |
|
10 |
+ case $1 in |
|
11 |
+ fgaddon) |
|
12 |
+ DB=$FGADDON/fgaddon.db $HOME/.fgfs/fgaddon |
|
13 |
+ ;; |
|
14 |
+ check) |
|
15 |
+ test -r $HOME/.fgfs/jenkins-status && bash $HOME/.fgfs/jenkins-status |
|
16 |
+ ;; |
|
17 |
+ rss) |
|
18 |
+ test -r $HOME/.fgfs/fgcoderss && bash $HOME/.fgfs/fgcoderss |
|
19 |
+ ;; |
|
20 |
+ data) |
|
21 |
+ for control_system update_command in ${(kv)control_system_data}; do |
|
22 |
+ find $FGADDON \ |
|
23 |
+ -maxdepth 3 \ |
|
24 |
+ -mindepth 1 \ |
|
25 |
+ -type d \ |
|
26 |
+ -name .${control_system} \ |
|
27 |
+ -printf "\n[ %h ]\n" \ |
|
28 |
+ -execdir ${control_system} ${update_command} \; |
|
29 |
+ done |
|
30 |
+ ;; |
|
31 |
+ source) |
|
32 |
+ for component in $(<$fgfs_source/.$2); do |
|
33 |
+ for control_system update_command in ${(kv)control_system_data}; do |
|
34 |
+ find $fgfs_source/$component \ |
|
35 |
+ -maxdepth 1 \ |
|
36 |
+ -type d \ |
|
37 |
+ -name .${control_system} \ |
|
38 |
+ -printf "\n[ %h ]\n" \ |
|
39 |
+ -execdir ${control_system} ${update_command} \; |
|
40 |
+ done |
|
41 |
+ done |
|
42 |
+ ;; |
|
43 |
+ build) |
|
44 |
+ local simultaneous=$(nproc) |
|
45 |
+ local previously_installed=() |
|
46 |
+ mkdir -p $fgfs_install |
|
47 |
+ for component in $(<$fgfs_source/.$2); do |
|
48 |
+ if test -d $fgfs_source/$component/.git; then |
|
49 |
+ local branch=$(git -C $fgfs_source/$component name-rev --name-only --no-undefined --always HEAD) |
|
50 |
+ elif test -d $fgfs_source/${component}/.svn; then |
|
51 |
+ local branch=${${(s:/:)$(svn info --show-item relative-url $fgfs_source/$component)}[2]} |
|
52 |
+ fi |
|
53 |
+ |
|
54 |
+# TODO: prendre en compte les cas sans cmake |
|
55 |
+ if test -r $fgfs_source/$component/CMakeLists.txt; then |
|
56 |
+ local cmake_options=("-DCMAKE_BUILD_TYPE=Release" "-DCMAKE_INSTALL_PREFIX=$fgfs_install/$component") |
|
57 |
+ test -e $fgfs_source/${component}.specific && source $fgfs_source/${component}.specific |
|
58 |
+ if test ${#previously_installed[@]} -gt 0; then |
|
59 |
+ cmake_options+=(${(j. .)${:--DCMAKE_PREFIX_PATH=$fgfs_install/${^previously_installed}}}) |
|
60 |
+ cmake_options+=(${(j. .)${:--DCMAKE_INCLUDE_PATH=$fgfs_install/${^previously_installed}/include}}) |
|
61 |
+ fi |
|
62 |
+ cmake_options+=("-j$simultaneous") |
|
63 |
+ |
|
64 |
+ title="*** ${component:u}${branch:+ [$branch]} ***" |
|
65 |
+ printf "\n%s\n%s\n%s\n" "${(l:${#title}::*:)}" "$title" "${(l:${#title}::*:)}" |
|
66 |
+ mkdir -p $fgfs_build/$component |
|
67 |
+ cd $fgfs_build/$component |
|
68 |
+ |
|
69 |
+ echo cmake ${cmake_options[@]//$HOME/\$HOME} ${fgfs_source/#$HOME/\$HOME}/$component |
|
70 |
+ |
|
71 |
+ cmake ${cmake_options[@]} $fgfs_source/$component > /dev/null \ |
|
72 |
+ && make -j$simultaneous > /dev/null \ |
|
73 |
+ && { |
|
74 |
+ make install | grep --color=always -v '^-- Up-to-date:' |
|
75 |
+ } \ |
|
76 |
+ || { |
|
77 |
+ echo "erreur construction $component" |
|
78 |
+ cd $FGDIR |
|
79 |
+ return |
|
80 |
+ } |
|
81 |
+ |
|
82 |
+ cd - > /dev/null |
|
83 |
+ fi |
|
84 |
+ |
|
85 |
+ previously_installed+=($component) |
|
86 |
+ done |
|
87 |
+ unset component |
|
88 |
+ ;; |
|
89 |
+ esac |
|
90 |
+ } |
|
91 |
+ function ld_library_path () { |
|
92 |
+ local IFS=$'\n' |
|
93 |
+ for lib in $(ls -d $FGDIR/install/*/lib); do |
|
94 |
+ egrep -q "(^|:)$lib(:|$)" <<< "${LD_LIBRARY_PATH}" || LD_LIBRARY_PATH="${lib}${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}" |
|
95 |
+ done |
|
96 |
+ export LD_LIBRARY_PATH |
|
97 |
+ } |
|
98 |
+ local aircrafts="$FGADDON/Aircraft" |
|
99 |
+ local fgfs_args=("--fg-root=$FGDIR/source/fgdata") |
|
100 |
+ local aircraft= |
|
101 |
+ local airport= |
|
102 |
+ if [[ -o BASH_REMATCH ]]; then |
|
103 |
+ local bash_rematch_set=1 |
|
104 |
+ fi |
|
105 |
+ |
|
106 |
+ for fgfs_arg in $@; do |
|
107 |
+ if test ${fgfs_arg#--} = ${fgfs_arg}; then |
|
108 |
+############ APPAREIL DANS FGADDON ? |
|
109 |
+ if test -n "$(find $aircrafts -maxdepth 2 -type f -name ${fgfs_arg}-set.xml -print -quit)"; then |
|
110 |
+ fgfs_args+=("--aircraft=$fgfs_arg") |
|
111 |
+ fgfs_args+=("--fg-aircraft=$FGADDON/Aircraft") |
|
112 |
+ |
|
113 |
+############ APPAREIL DANS FGDIR/source/fgdata ? |
|
114 |
+ elif official_aircraft=$(find "$fgfs_source/fgdata/Aircraft" -maxdepth 2 -type f -name ${fgfs_arg}-set.xml -printf "%h" -quit) && test -n "$official_aircraft"; then |
|
115 |
+ fgfs_args+=("--aircraft=$fgfs_arg") |
|
116 |
+ fgfs_args+=("--aircraft-dir=$official_aircraft") |
|
117 |
+ unset official_aircraft |
|
118 |
+ |
|
119 |
+############ APPAREIL DISPONIBLE DANS UN HANGAR CONNU ? |
|
120 |
+ elif which sqlite3 > /dev/null 2>&1 \ |
|
121 |
+ && test -r $FGADDON/fgaddon.db \ |
|
122 |
+ && test $(sqlite3 $FGADDON/fgaddon.db <<< "select count(file) from setxml where file is '${fgfs_arg}'") -gt 0; then |
|
123 |
+ local PS3='which aircraft ? ' |
|
124 |
+ local IFS=$'\n' |
|
125 |
+ select ac in $(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
126 |
+ aircrafts.name, |
|
127 |
+ hangars.name, |
|
128 |
+ date(aircrafts.date, 'unixepoch')) |
|
129 |
+ from aircrafts |
|
130 |
+ inner join setxml, hangars |
|
131 |
+ where |
|
132 |
+ aircrafts.hangar = hangars.id |
|
133 |
+ and |
|
134 |
+ setxml.variantof = aircrafts.id |
|
135 |
+ and |
|
136 |
+ setxml.file = '${fgfs_arg}' |
|
137 |
+ order by aircrafts.date desc"); do |
|
138 |
+ test -z "$ac" && continue |
|
139 |
+ read -q "REPLY?download ${${(@s/ /)ac}[1]} ? (y/N) " |
|
140 |
+ if test -n "$REPLY" && test ${REPLY:l} = "y"; then |
|
141 |
+ declare -A hangar |
|
142 |
+ eval $(sqlite3 $FGADDON/fgaddon.db <<< "select printf('hangar[type]=%s;hangar[url]=%s;', type, url) |
|
143 |
+ from hangars where name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
144 |
+ case ${hangar[type]} in |
|
145 |
+ git) |
|
146 |
+ git -C $FGADDON/Aircraft clone ${hangar[url]}/${${(@s/ /)ac}[1]}.git;; |
|
147 |
+ svn) |
|
148 |
+ svn checkout ${hangar[url]}/${${(@s/ /)ac}[1]} $FGADDON/Aircraft/${${(@s/ /)ac}[1]};; |
|
149 |
+ esac |
|
150 |
+ fgfs_args+=("--aircraft=$fgfs_arg") |
|
151 |
+ fgfs_args+=("--fg-aircraft=$FGADDON/Aircraft") |
|
152 |
+ unset -v hangar |
|
153 |
+ else |
|
154 |
+ echo "falling back to default" |
|
155 |
+ fi |
|
156 |
+ break |
|
157 |
+ done |
|
158 |
+ |
|
159 |
+############ SERVEUR MULTIPLAY ? |
|
160 |
+ elif set -o BASH_REMATCH && [[ $fgfs_arg =~ "^mp([0-9]+)$" ]]; then |
|
161 |
+ fgfs_args+=("--multiplay=out,10,mpserver${BASH_REMATCH[2]}.flightgear.org,5000") |
|
162 |
+ fgfs_args+=("--callsign=f-zakh") |
|
163 |
+ test -z "$bash_rematch_set" && set +o BASH_REMATCH |
|
164 |
+ |
|
165 |
+############ DEMANDE DE TRACE DE VOL ? |
|
166 |
+ elif [[ $fgfs_arg == "log" ]]; then |
|
167 |
+ # option construite plus tard avec nom de l'appareil |
|
168 |
+ local log_requested=1 |
|
169 |
+ |
|
170 |
+############ AEROPORT ? |
|
171 |
+ else |
|
172 |
+ local candidate_airport=${fgfs_arg%:*}; [[ $candidate_airport == $fgfs_arg ]] && candidate_airport=${fgfs_arg%+*} |
|
173 |
+ local candidate_parking=${fgfs_arg#*:}; [[ $candidate_parking == $fgfs_arg ]] && unset candidate_parking |
|
174 |
+ local candidate_runway=${fgfs_arg#*+}; [[ $candidate_runway == $fgfs_arg ]] && unset candidate_runway |
|
175 |
+ local terrafs=${candidate_airport#-} |
|
176 |
+ |
|
177 |
+ if [[ $terrafs != $candidate_airport && -x $HOME/.fgfs/terrafs && -d $HOME/.fgfs/terrafs.d ]]; then |
|
178 |
+ candidate_airport=$terrafs |
|
179 |
+ $HOME/.fgfs/terrafs $HOME/.fgfs/terrafs.d |
|
180 |
+ fgfs_args+=(--fg-scenery=$HOME/.fgfs/terrafs.d) |
|
181 |
+ fgfs_args+=(--disable-terrasync) |
|
182 |
+ local scenes_dir=$HOME/.fgfs/terrafs.d |
|
183 |
+ else |
|
184 |
+ fgfs_args+=(--terrasync-dir=$HOME/.fgfs/TerraSync) |
|
185 |
+ fgfs_args+=(--enable-terrasync) |
|
186 |
+ local scenes_dir=$HOME/.fgfs/TerraSync |
|
187 |
+ fi |
|
188 |
+ |
|
189 |
+ local airport_data="$scenes_dir/Airports/$candidate_airport[1]:u/$candidate_airport[2]:u/$candidate_airport[3]:u/${candidate_airport:u}" |
|
190 |
+ if ! test -r "${airport_data}.threshold.xml"; then |
|
191 |
+ echo "airport ${candidate_airport:u} not found !" |
|
192 |
+ fi |
|
193 |
+ |
|
194 |
+ if test -n "$candidate_parking" && test "$candidate_parking" = '?'; then |
|
195 |
+ if test -r "${airport_data}.groundnet.xml"; then |
|
196 |
+ echo "Parkings ${candidate_airport:u}:" |
|
197 |
+ sed -rn "/<parkingList/,/parkingList>/s/^.* name=\"([^\"]+).*$/\1/p" "${airport_data}.groundnet.xml" |
|
198 |
+ else |
|
199 |
+ echo "no information for parkings available on ${candidate_airport:u}" |
|
200 |
+ fi |
|
201 |
+ return |
|
202 |
+ elif test -n "$candidate_runway" && test "$candidate_runway" = '?'; then |
|
203 |
+ if test -r "${airport_data}.threshold.xml"; then |
|
204 |
+ echo "Runways ${candidate_airport:u}:" |
|
205 |
+ sed -rn 's|^.*<rwy>(.+)</rwy>.*$|\1|p' "${airport_data}.threshold.xml" |
|
206 |
+ else |
|
207 |
+ echo "no information for runways available on ${candidate_airport:u}" |
|
208 |
+ fi |
|
209 |
+ return |
|
210 |
+ fi |
|
211 |
+ |
|
212 |
+ if test -r "${airport_data}.threshold.xml"; then |
|
213 |
+ fgfs_args+=(--airport=$candidate_airport) |
|
214 |
+ if [[ -a "${airport_data}.groundnet.xml" && -n "$candidate_parking" ]]; then |
|
215 |
+ if sed -rn "/<parkingList/,/parkingList>/p" "${airport_data}.groundnet.xml" | grep -q "name=\"${candidate_parking}\""; then |
|
216 |
+ fgfs_args+=("--parkpos='$candidate_parking'") |
|
217 |
+ else |
|
218 |
+ echo "$candidate_parking isn't a valid parking position" |
|
219 |
+ fi |
|
220 |
+ elif test -n "$candidate_runway"; then |
|
221 |
+ if grep -q "<rwy>${candidate_runway}</rwy>" "${airport_data}.threshold.xml"; then |
|
222 |
+ fgfs_args+=("--runway=$candidate_runway") |
|
223 |
+ else |
|
224 |
+ echo "$candidate_runway isn't a valid runway" |
|
225 |
+ fi |
|
226 |
+ fi |
|
227 |
+ fi |
|
228 |
+ fi |
|
229 |
+ |
|
230 |
+######## AUTRE OPTION |
|
231 |
+ else |
|
232 |
+ case $fgfs_arg in |
|
233 |
+ --update(-data|-source|-build|)) |
|
234 |
+ if test ! -r $fgfs_source/.${2:-flightgear}; then |
|
235 |
+ echo "${2:+unknown set $2\n}usage: --update|--update-data|--update-source|--update-build <set>" |
|
236 |
+ echo "available sets :" ${$(find $fgfs_source -maxdepth 1 -type f -name ".*" -printf "%f ")//#.} |
|
237 |
+ return 1 |
|
238 |
+ fi |
|
239 |
+ for component in $(<$fgfs_source/.${2:-flightgear}); do |
|
240 |
+ if ! test -d $fgfs_source/$component; then |
|
241 |
+ echo component $component not found |
|
242 |
+ return 1 |
|
243 |
+ fi |
|
244 |
+ done |
|
245 |
+ typeset -A control_system_data=( |
|
246 |
+ git pull |
|
247 |
+ svn up |
|
248 |
+ ) |
|
249 |
+ for up in ${${=${fgfs_arg#--update}:-data source build}#-}; do |
|
250 |
+ update_fg $up ${2:-flightgear} |
|
251 |
+ done |
|
252 |
+ unset control_system_data control_system update_command up |
|
253 |
+ return |
|
254 |
+ ;; |
|
255 |
+ --update-(fgaddon|check|rss)) |
|
256 |
+ update_fg ${fgfs_arg#--update-} |
|
257 |
+ return |
|
258 |
+ ;; |
|
259 |
+ --(show-aircraft|help)) |
|
260 |
+ local -A complement=( |
|
261 |
+ --show-aircraft --fg-aircraft=$FGADDON/Aircraft |
|
262 |
+ --help --verbose |
|
263 |
+ ) |
|
264 |
+ ld_library_path |
|
265 |
+ $FGDIR/install/flightgear/bin/fgfs $fgfs_arg ${complement[$fgfs_arg]} 2>/dev/null | pager |
|
266 |
+ return |
|
267 |
+ ;; |
|
268 |
+ --info) |
|
269 |
+ local PS3='which aircraft ? ' |
|
270 |
+ local IFS=$'\n' |
|
271 |
+ function _info () { |
|
272 |
+ local _info=$(sqlite3 $FGADDON/fgaddon.db <<< "select \`$1\` |
|
273 |
+ from setxml |
|
274 |
+ inner join aircrafts, hangars |
|
275 |
+ where |
|
276 |
+ aircrafts.hangar = hangars.id |
|
277 |
+ and |
|
278 |
+ setxml.variantof = aircrafts.id |
|
279 |
+ and |
|
280 |
+ setxml.file = '$file' |
|
281 |
+ and |
|
282 |
+ hangars.name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
283 |
+ if test -n "$_info"; then |
|
284 |
+ printf "%s: %s\n" "$2" "$_info" |
|
285 |
+ fi |
|
286 |
+ } |
|
287 |
+ select ac in $(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
288 |
+ aircrafts.name, |
|
289 |
+ hangars.name, |
|
290 |
+ date(aircrafts.date, 'unixepoch')) |
|
291 |
+ from aircrafts |
|
292 |
+ inner join setxml, hangars |
|
293 |
+ where |
|
294 |
+ aircrafts.hangar = hangars.id |
|
295 |
+ and |
|
296 |
+ setxml.variantof = aircrafts.id |
|
297 |
+ and |
|
298 |
+ setxml.file = '$2' |
|
299 |
+ order by aircrafts.date desc"); do |
|
300 |
+ test -z "$ac" && continue |
|
301 |
+ local file=$2 |
|
302 |
+ _info /sim/description "Short description" |
|
303 |
+ _info /sim/long-description "Long description" |
|
304 |
+ _info /sim/author "Author(s)" |
|
305 |
+ _info /sim/flight-model "Flight model" |
|
306 |
+ _info /sim/type "Type" |
|
307 |
+ echo Ratings |
|
308 |
+ for r in FDM systems cockpit model; do |
|
309 |
+ _info /sim/rating/$r " $r" |
|
310 |
+ done |
|
311 |
+ return |
|
312 |
+ done |
|
313 |
+ return |
|
314 |
+ ;; |
|
315 |
+ --show-thumbnail) |
|
316 |
+ local PS3='which aircraft ? ' |
|
317 |
+ local IFS=$'\n' |
|
318 |
+ select ac in $(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
319 |
+ aircrafts.name, |
|
320 |
+ hangars.name, |
|
321 |
+ date(aircrafts.date, 'unixepoch')) |
|
322 |
+ from aircrafts |
|
323 |
+ inner join setxml, hangars |
|
324 |
+ where |
|
325 |
+ aircrafts.hangar = hangars.id |
|
326 |
+ and |
|
327 |
+ setxml.variantof = aircrafts.id |
|
328 |
+ and ( |
|
329 |
+ setxml.file like '%$2%' |
|
330 |
+ or |
|
331 |
+ aircrafts.name like '%$2%' |
|
332 |
+ ) |
|
333 |
+ order by aircrafts.date desc"); do |
|
334 |
+ test -z "$ac" && continue |
|
335 |
+ local url=$(sqlite3 $FGADDON/fgaddon.db <<< "select printf('%s/${${(@s/ /)ac}[1]}', url) |
|
336 |
+ from hangars where name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
337 |
+ if test ${${(@s/ /)ac}[2]:1:-1} = 'FGMEMBERS'; then |
|
338 |
+ url="https://raw.githubusercontent.com/FGMEMBERS/${${(@s/ /)ac}[1]}/master" |
|
339 |
+ fi |
|
340 |
+ if wget --quiet --spider "$url/thumbnail.jpg"; then |
|
341 |
+ curl -s "$url/thumbnail.jpg" \ |
|
342 |
+ | convert - -resize '200%' -normalize -sharpen '0.0x1.0' - \ |
|
343 |
+ | display |
|
344 |
+ else |
|
345 |
+ echo "can't find or get thumbnail for ${${(@s/ /)ac}[1]} (${${(@s/ /)ac}[2]:1:-1}'s hangar)" |
|
346 |
+ fi |
|
347 |
+ done |
|
348 |
+ return |
|
349 |
+ ;; |
|
350 |
+ --search) |
|
351 |
+ command -v sqlite3 > /dev/null || return |
|
352 |
+ sqlite3 $FGADDON/fgaddon.db <<< "select printf('[%s, %s] %s (%s): %s ', |
|
353 |
+ hangars.name, |
|
354 |
+ date(aircrafts.date, 'unixepoch'), |
|
355 |
+ setxml.file, |
|
356 |
+ setxml.\`/sim/flight-model\`, |
|
357 |
+ setxml.\`/sim/description\`) |
|
358 |
+ from setxml |
|
359 |
+ inner join aircrafts, hangars |
|
360 |
+ where |
|
361 |
+ aircrafts.hangar = hangars.id |
|
362 |
+ and |
|
363 |
+ setxml.variantof = aircrafts.id |
|
364 |
+ and ( |
|
365 |
+ setxml.file like '%$2%' |
|
366 |
+ or |
|
367 |
+ aircrafts.name like '%$2%' |
|
368 |
+ ) |
|
369 |
+ order by aircrafts.date desc" |
|
370 |
+ return |
|
371 |
+ ;; |
|
372 |
+ --mp-list) |
|
373 |
+ local mplist=$(mktemp --dry-run /dev/shm/XXXXXXXXX) |
|
374 |
+ declare -A installed_model fgaddon_model unknown_model |
|
375 |
+ local ac_name= |
|
376 |
+ telnet mpserver01.flightgear.org 5001 2>/dev/null > $mplist |
|
377 |
+ grep 'pilot(s) online' $mplist |
|
378 |
+ echo |
|
379 |
+ local IFS=$'\n' |
|
380 |
+ for model in $(awk '/@/{a[$NF]++}END{for (i in a) printf("%s (%i)\n", i, a[i])}' $mplist); do |
|
381 |
+ if test -r $FGADDON/${model% *} \ |
|
382 |
+ -o -r $FGDIR/source/fgdata/${model% *}; then |
|
383 |
+ |
|
384 |
+ ((++installed_model[${${(s:/:)model}[2]} ${model#* }])) |
|
385 |
+ |
|
386 |
+ elif test -r $FGDIR/source/fgdata/AI/${model% *}; then |
|
387 |
+ |
|
388 |
+ ((++installed_model[*${${(s:/:)model}[2]} ${model#* }])) |
|
389 |
+ |
|
390 |
+ elif test -n "$(command -v sqlite3)" -a -r $FGADDON/fgaddon.db; then |
|
391 |
+ ac_name=$(sqlite3 $FGADDON/fgaddon.db <<< 'select printf("%s/%s", aircrafts.name, setxml.file) |
|
392 |
+ from aircrafts |
|
393 |
+ inner join setxml |
|
394 |
+ where aircrafts.id = setxml.variantof |
|
395 |
+ and setxml.`/sim/model/path` = "'${model% *}'" |
|
396 |
+ limit 1') |
|
397 |
+ if test -n "$ac_name"; then |
|
398 |
+ ((++fgaddon_model[${ac_name} ${model#* }])) |
|
399 |
+ else |
|
400 |
+ ((++unknown_model[${model}])) |
|
401 |
+ fi |
|
402 |
+ |
|
403 |
+ else |
|
404 |
+ ((++unknown_model[${model}])) |
|
405 |
+ fi |
|
406 |
+ done |
|
407 |
+ if test ${#installed_model[@]} -gt 0; then |
|
408 |
+ echo "${(j:\n:)${(Ok)installed_model[@]}}" > $mplist |
|
409 |
+ echo -e "${#installed_model[@]} models installed (*AI model only):\n$(column -c$(tput cols) $mplist)\n" |
|
410 |
+ fi |
|
411 |
+ if test ${#fgaddon_model[@]} -gt 0; then |
|
412 |
+ echo "${(j:\n:)${(Ok)fgaddon_model[@]}}" > $mplist |
|
413 |
+ echo -e "${#fgaddon_model[@]} models available in FGADDON:\n$(column -c$(tput cols) $mplist)\n" |
|
414 |
+ fi |
|
415 |
+ if test ${#unknown_model[@]} -gt 0; then |
|
416 |
+ echo "${(j:\n:)${(Ok)unknown_model[@]}}" > $mplist |
|
417 |
+ echo -e "${#unknown_model[@]} unknown models:\n$(column -c$(tput cols) $mplist)" |
|
418 |
+ fi |
|
419 |
+ unset installed_model unknown_model fgaddon_model |
|
420 |
+ rm $mplist |
|
421 |
+ return |
|
422 |
+ ;; |
|
423 |
+ --install) |
|
424 |
+ local PS3='which aircraft ? ' |
|
425 |
+ local IFS=$'\n' |
|
426 |
+ select ac in $(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
427 |
+ aircrafts.name, |
|
428 |
+ hangars.name, |
|
429 |
+ date(aircrafts.date, 'unixepoch')) |
|
430 |
+ from aircrafts |
|
431 |
+ inner join setxml, hangars |
|
432 |
+ where |
|
433 |
+ aircrafts.hangar = hangars.id |
|
434 |
+ and |
|
435 |
+ setxml.variantof = aircrafts.id |
|
436 |
+ and ( |
|
437 |
+ setxml.file like '%$2%' |
|
438 |
+ or |
|
439 |
+ aircrafts.name like '%$2%' |
|
440 |
+ ) |
|
441 |
+ order by aircrafts.date desc"); do |
|
442 |
+ test -z "$ac" && continue |
|
443 |
+ done |
|
444 |
+ test -z "$ac" && return |
|
445 |
+ declare -A hangar |
|
446 |
+ eval $(sqlite3 $FGADDON/fgaddon.db <<< "select printf('hangar[type]=%s;hangar[url]=%s;', type, url) |
|
447 |
+ from hangars where name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
448 |
+ case ${hangar[type]} in |
|
449 |
+ git) |
|
450 |
+ git -C $FGADDON/Aircraft clone ${hangar[url]}/${${(@s/ /)ac}[1]}.git;; |
|
451 |
+ svn) |
|
452 |
+ svn checkout ${hangar[url]}/${${(@s/ /)ac}[1]} $FGADDON/Aircraft/${${(@s/ /)ac}[1]};; |
|
453 |
+ esac |
|
454 |
+ unset hangar |
|
455 |
+ return |
|
456 |
+ ;; |
|
457 |
+ --fgcom) |
|
458 |
+ if test \ |
|
459 |
+ -r $FGADDON/Addons/fgcom-mumble/FGData/Protocol/fgcom-mumble.xml \ |
|
460 |
+ -a -r $HOME/.local/share/Mumble/Mumble/Plugins/fgcom-mumble.so \ |
|
461 |
+ -a -h $fgfs_install/mumble/lib/libPocoZip.so \ |
|
462 |
+ -a -x $fgfs_install/mumble/bin/mumble; then |
|
463 |
+ nohup sh -c "LD_LIBRARY_PATH=$fgfs_install/mumble/lib $fgfs_install/mumble/bin/mumble" > /dev/null 2>&1 & |
|
464 |
+ fgfs_args+=("--addon=$FGADDON/Addons/fgcom-mumble") |
|
465 |
+ fgfs_args+=("--generic=socket,out,10,localhost,16661,udp,fgcom-mumble") |
|
466 |
+ else |
|
467 |
+ echo "can't find protocol definition, or fgcom-mumble plugin" |
|
468 |
+ return 1 |
|
469 |
+ fi |
|
470 |
+ ;; |
|
471 |
+ --map|--lnm) |
|
472 |
+ if ! pgrep -u $USER -fx "$fgfs_install/littlefgconnect/Little\ FGconnect/littlefgconnect" > /dev/null; then |
|
473 |
+ if test \ |
|
474 |
+ -x $fgfs_install/littlefgconnect/"Little FGconnect"/littlefgconnect \ |
|
475 |
+ -a -r $FGADDON/Addons/littlenavmap/FGData/Protocol/littlenavmap.xml; then |
|
476 |
+ nohup sh -c "$fgfs_install/littlefgconnect/Little\ FGconnect/littlefgconnect" > /dev/null 2>&1 & |
|
477 |
+ else |
|
478 |
+ echo "can't find Little FGconnect" |
|
479 |
+ return 1 |
|
480 |
+ fi |
|
481 |
+ else |
|
482 |
+ echo "Little FGconnect already lauched" |
|
483 |
+ fi |
|
484 |
+ fgfs_args+=("--addon=$FGADDON/Addons/littlenavmap") |
|
485 |
+ ;; |
|
486 |
+ --addon=*) |
|
487 |
+ addon_path="${fgfs_arg#*=}" |
|
488 |
+ if test -d "$addon_path"; then |
|
489 |
+ fgfs_args+=("--addon=$addon_path") |
|
490 |
+ elif test -d "$FGADDON/Addons/$addon_path"; then |
|
491 |
+ fgfs_args+=("--addon=$FGADDON/Addons/$addon_path") |
|
492 |
+ else |
|
493 |
+ echo "can't find requested addon in $addon_path or $FGADDON/$addon_path !" |
|
494 |
+ fi |
|
495 |
+ ;; |
|
496 |
+ *) |
|
497 |
+ fgfs_args+=($fgfs_arg);; |
|
498 |
+ esac |
|
499 |
+ fi |
|
500 |
+ done |
|
501 |
+ unset fgfs_arg |
|
502 |
+ if [[ -n "$log_requested" && -z "${fgfs_args[(r)--igc=*]}" ]]; then |
|
503 |
+ fgfs_args+=(--igc=file,out,1,$(date +%Y%m%d-%H%M-${${${fgfs_args[(r)--aircraft=*]}#--aircraft=}:-$(sed -rn 's|^.+aircraft>(.+)</aircraft.+$|\1|p' $FGDIR/source/fgdata/defaults.xml)}.igc)) |
|
504 |
+ fi |
|
505 |
+ |
|
506 |
+ ld_library_path |
|
507 |
+ |
|
508 |
+ ( |
|
509 |
+ nmcli radio all off |
|
510 |
+ while test $(lsmod | grep -c nvidia) -eq 0; do |
|
511 |
+ : |
|
512 |
+ done |
|
513 |
+ nmcli radio wifi on |
|
514 |
+ )& |
|
515 |
+ |
|
516 |
+ fgfsrc=$HOME/.fgfs/fgfsrc |
|
517 |
+ echo ${fgfs_args[@]//$HOME/\$HOME} $(test -r $fgfsrc && egrep -v '^\s*(#|$)' $fgfsrc | tr '\n' ' ') |
|
518 |
+# TODO: ne lancer avec primusrun que si c'est nécesaire, d'autres solution existent ? |
|
519 |
+ primusrun $FGDIR/install/flightgear/bin/fgfs ${fgfs_args[@]} |
|
520 |
+ |
|
521 |
+ if grep -q $HOME/.fgfs/terrafs.d /proc/mounts; then |
|
522 |
+ fusermount -u $HOME/.fgfs/terrafs.d |
|
523 |
+ fi |
|
524 |
+ unset fgfs_args |
|
525 |
+} |
... | ... |
@@ -0,0 +1,71 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+source_path=${1:-"$HOME/scripts/flightgear/source"} |
|
4 |
+job=fgfs |
|
5 |
+ |
|
6 |
+function xmlgetnext () { |
|
7 |
+ local IFS='>' |
|
8 |
+ read -d '<' TAG VALUE |
|
9 |
+ # by design, the first TAG/VALUE pair is empty |
|
10 |
+ # to avoid infinite loops at end of file parsing we return an error |
|
11 |
+ # the next time we find an empty TAG |
|
12 |
+ if test -z "$TAG"; then |
|
13 |
+ test ${xmlgetnext_firstentry:-1} -eq 1 && xmlgetnext_firstentry=0 || return 1; |
|
14 |
+ fi |
|
15 |
+ # process $TAG only if necessary |
|
16 |
+ local _TAG=$(printf '%q' $TAG) |
|
17 |
+ if test ${_TAG:0:1} = '$'; then |
|
18 |
+ TAG=$(tr '\n' ' ' <<< $TAG | sed 's/ */ /g; s/ *$//') |
|
19 |
+ fi |
|
20 |
+} |
|
21 |
+ |
|
22 |
+exit_trap () { |
|
23 |
+ rm $apixml |
|
24 |
+} |
|
25 |
+ |
|
26 |
+set -e |
|
27 |
+trap exit_trap EXIT |
|
28 |
+ |
|
29 |
+apixml=$(mktemp --dry-run /dev/shm/XXXXXXXXXX) |
|
30 |
+ |
|
31 |
+wget -qO- --header 'Accept:application/xml' http://build.flightgear.org:8080/job/$job/api/xml > $apixml |
|
32 |
+ |
|
33 |
+declare -A code |
|
34 |
+ |
|
35 |
+while xmlgetnext; do |
|
36 |
+ case "${TAG:0:1}" in |
|
37 |
+ ''|'?'|'!') |
|
38 |
+ continue;; |
|
39 |
+ /) |
|
40 |
+ property=${property%/*};; |
|
41 |
+ *) |
|
42 |
+ if test "${TAG: -1}" != '/'; then |
|
43 |
+ property+=/${TAG%% *} |
|
44 |
+ fi;; |
|
45 |
+ esac |
|
46 |
+ |
|
47 |
+ case "$property" in |
|
48 |
+ /workflowJob/healthReport/score) |
|
49 |
+ score=$VALUE;; |
|
50 |
+ |
|
51 |
+ /workflowJob/lastSuccessfulBuild/url) |
|
52 |
+ wget -qO- --header 'Accept:application/xml' ${VALUE}/api/xml >> $apixml;; |
|
53 |
+ |
|
54 |
+ /workflowRun/action/lastBuiltRevision/branch/SHA1) |
|
55 |
+ sha1=$VALUE;; |
|
56 |
+ |
|
57 |
+ /workflowRun/action/remoteUrl) |
|
58 |
+ [[ ${VALUE##*/} = @(flight|sim)gear ]] && code[${VALUE##*/}]=$sha1;; |
|
59 |
+ |
|
60 |
+ /workflowRun/result) |
|
61 |
+ result=$VALUE |
|
62 |
+ for path in ${!code[@]}; do |
|
63 |
+ if test $(git -C "$source_path/$path" rev-parse HEAD) != ${code[$path]}; then |
|
64 |
+ echo "mismatch revision from jenkins info" |
|
65 |
+ exit |
|
66 |
+ fi |
|
67 |
+ done |
|
68 |
+ echo "result: $result, score $score" |
|
69 |
+ exit;; |
|
70 |
+ esac |
|
71 |
+done < $apixml |
... | ... |
@@ -0,0 +1,4 @@ |
1 |
+OpenSceneGraph |
|
2 |
+simgear |
|
3 |
+flightgear |
|
4 |
+fgdata |
... | ... |
@@ -0,0 +1 @@ |
1 |
+jsbsim |
... | ... |
@@ -0,0 +1 @@ |
1 |
+mumble |
... | ... |
@@ -0,0 +1 @@ |
1 |
+OpenVSP |
... | ... |
@@ -0,0 +1,17 @@ |
1 |
+mkdir -p $fgfs_build/OpenVSPlibs |
|
2 |
+cd $fgfs_build/OpenVSPlibs |
|
3 |
+cmake -DVSP_USE_SYSTEM_LIBXML2=true \ |
|
4 |
+ -DVSP_USE_SYSTEM_FLTK=true \ |
|
5 |
+ -DVSP_USE_SYSTEM_GLM=true \ |
|
6 |
+ -DVSP_USE_SYSTEM_GLEW=true \ |
|
7 |
+ -DVSP_USE_SYSTEM_CMINPACK=true \ |
|
8 |
+ -DVSP_USE_SYSTEM_LIBIGES=false \ |
|
9 |
+ -DVSP_USE_SYSTEM_EIGEN=false \ |
|
10 |
+ -DVSP_USE_SYSTEM_CODEELI=false \ |
|
11 |
+ -DVSP_USE_SYSTEM_CPPTEST=false \ |
|
12 |
+ -DCMAKE_BUILD_TYPE=Release \ |
|
13 |
+ $fgfs_source/OpenVSP/Libraries \ |
|
14 |
+&& make -j$simultaneous \ |
|
15 |
+&& cmake_options+=("-DVSP_LIBRARY_PATH=$fgfs_build/OpenVSPlibs") \ |
|
16 |
+&& cmake_options+=("-DCMAKE_BUILD_TYPE=Release") |
|
17 |
+cd - |
... | ... |
@@ -0,0 +1,7 @@ |
1 |
+cmake_options+=("-DENABLE_COMPOSITOR=ON") |
|
2 |
+cmake_options+=("-DFG_DATA_DIR:PATH=$fgfs_source/fgdata") |
|
3 |
+cmake_options+=("-DTRANSLATIONS_SRC_DIR:PATH=$fgfs_source/fgdata/Translations") |
|
4 |
+cmake_options+=("-DSimGear_DIR=$fgfs_install/simgear/lib/cmake/SimGear") |
|
5 |
+#cmake_options+=("CFLAGS="--std=c++14"") |
|
6 |
+ |
|
7 |
+export OSG_DIR=$fgfs_install/OpenSceneGraph |
... | ... |
@@ -0,0 +1,12 @@ |
1 |
+mkdir -p $fgfs_build/poco |
|
2 |
+cd $fgfs_build/poco |
|
3 |
+cmake -DCMAKE_INSTALL_PREFIX=$fgfs_install/mumble -j$simultaneous $fgfs_source/mumble/3rdparty/poco |
|
4 |
+make -j$simultaneous |
|
5 |
+make install |
|
6 |
+ |
|
7 |
+cmake_options+=("-DCMAKE_PREFIX_PATH=$fgfs_install/mumble") |
|
8 |
+cmake_options+=("-DCMAKE_INCLUDE_PATH=$fgfs_install/mumble/include") |
|
9 |
+cmake_options+=("-Doverlay-xcompile=OFF") |
|
10 |
+cmake_options+=("-Dserver=OFF") |
|
11 |
+cmake_options+=("-Dretracted-plugins=OFF") |
|
12 |
+cmake_options+=("-Dplugins=OFF") |