... | ... |
@@ -10,17 +10,20 @@ declare -A data=( |
10 | 10 |
[/sim/type]=text |
11 | 11 |
[/sim/model/path]=text |
12 | 12 |
) |
13 |
-fgaddon_svn=https://svn.code.sf.net/p/flightgear/fgaddon/trunk/Aircraft |
|
14 |
-fgaddon_path=$HOME/.fgfs/flightgear-fgaddon/Aircraft |
|
13 |
+ |
|
14 |
+missing_data_check=( /sim/model/path ) |
|
15 |
+ |
|
15 | 16 |
database=${DB:-$0.db} |
17 |
+test -r "$0.conf" && source "$0.conf" |
|
18 |
+ |
|
16 | 19 |
#locale=fr |
17 | 20 |
|
18 |
-test -r "$0.conf" && source $0.conf && echo config red |
|
21 |
+tempid=$(mktemp --dry-run XXXXXXX) |
|
22 |
+temppath=/dev/shm |
|
19 | 23 |
|
20 |
-aircrafts=$(mktemp --dry-run /dev/shm/Aircraft-XXXXXXXXX) |
|
21 |
-aircraft=$(mktemp --dry-run /dev/shm/aircraft-XXXXXXX) |
|
22 |
-setxml=$(mktemp --dry-run /dev/shm/setxml-XXXXXXXX) |
|
23 |
-in_ram_database=$(mktemp --dry-run /dev/shm/XXXXXXX) |
|
24 |
+setxml=$temppath/setxml-$tempid |
|
25 |
+json_file=$temppath/github_json-$tempid |
|
26 |
+in_ram_database=$temppath/${database##*/}-$tempid |
|
24 | 27 |
|
25 | 28 |
function xmlgetnext () { |
26 | 29 |
local IFS='>' |
... | ... |
@@ -39,52 +42,54 @@ function xmlgetnext () { |
39 | 42 |
fi |
40 | 43 |
} |
41 | 44 |
|
42 |
-rm -f /dev/shm/sqlite_request |
|
45 |
+function json () { |
|
46 |
+ jq --raw-output "$1" < ${2:-${json_file:?}} |
|
47 |
+} |
|
48 |
+ |
|
49 |
+rm -f $temppath/sqlite_request |
|
43 | 50 |
function sqlite_request () { |
44 |
- local delimiter=';' |
|
45 |
- test ${1:0:1} == '.' && delimiter='' |
|
46 |
- echo "${1}${delimiter}" >> /dev/shm/sqlite_request |
|
47 |
- if ! sqlite3 "$in_ram_database" <<< "$1"; then |
|
48 |
- register_state |
|
49 |
- fi |
|
51 |
+ echo -e "## REQ $(( ++sqlite_request_count ))\n${1}\n" >> $temppath/sqlite_request |
|
52 |
+ sqlite3 "$in_ram_database" <<< "$1" |
|
50 | 53 |
} |
51 | 54 |
|
52 | 55 |
function xmlremovecomments () { |
53 | 56 |
sed -ri 's/<(!--|script>)/\n&/;s/(<\/script|--)>/&\n/' $setxml |
54 | 57 |
sed -ri '/<(script>|!--).*(<\/script|--)>/d;/<(script>|!--)/,/(<\/script|--)>/d' $setxml |
55 |
- sed -i 's/\xef\xbb\xbf//' $setxml # removes BOM |
|
58 |
+ sed -i 's/\xef\xbb\xbf//;s/\r//' $setxml # removes BOM and ^M |
|
56 | 59 |
} |
57 | 60 |
|
58 | 61 |
function trap_break () { |
59 | 62 |
trap '' INT |
60 | 63 |
echo "stop requested" |
61 |
- register_state |
|
62 | 64 |
} |
63 | 65 |
|
64 | 66 |
function trap_exit () { |
65 | 67 |
trapped_rc=$? |
66 | 68 |
trap '' INT |
67 |
- rm -f $aircrafts $aircraft $setxml |
|
69 |
+ |
|
70 |
+ if declare -f on_exit > /dev/null; then |
|
71 |
+ on_exit |
|
72 |
+ fi |
|
73 |
+ |
|
68 | 74 |
if test ! -e $in_ram_database; then |
69 | 75 |
exit |
70 | 76 |
fi |
71 |
- test $trapped_rc -ne 0 && register_state |
|
72 | 77 |
echo "updating installation status" |
73 | 78 |
for ac in $(sqlite_request 'select printf("%i:%s/%s", aircrafts.id, aircrafts.name, setxml.file) |
74 | 79 |
from aircrafts inner join setxml |
75 | 80 |
where aircrafts.id = setxml.variantof and setxml.installed != 0;'); do |
76 | 81 |
ac_path=${ac#*:} |
77 |
- if test ! -e $fgaddon_path/$ac_path-set.xml; then |
|
82 |
+ if test ! -e ${hangar[path]}/$ac_path-set.xml; then |
|
78 | 83 |
sqlite_request "update setxml set installed = 0 where file = '${ac_path#*/}' and variantof = ${ac%:*}" |
79 | 84 |
fi |
80 | 85 |
done |
81 |
- for ac in $fgaddon_path/*/*-set.xml; do |
|
82 |
- ac=${ac/$fgaddon_path} |
|
86 |
+ for ac in ${hangar[path]}/*/*-set.xml; do |
|
87 |
+ ac=${ac/${hangar[path]}} |
|
83 | 88 |
sx=${ac##*/} |
84 | 89 |
ac=${ac%/*} |
85 |
- if test -d $fgaddon_path/$ac/.svn; then |
|
90 |
+ if test -d ${hangar[path]}/$ac/.svn; then |
|
86 | 91 |
install_type=1 |
87 |
- elif test -d $fgaddon_path/$ac/.git; then |
|
92 |
+ elif test -d ${hangar[path]}/$ac/.git; then |
|
88 | 93 |
install_type=2 |
89 | 94 |
else |
90 | 95 |
install_type=3 |
... | ... |
@@ -96,116 +101,86 @@ function trap_exit () { |
96 | 101 |
where name = '${ac/\/}' and setxml.variantof = id |
97 | 102 |
)" |
98 | 103 |
done |
99 |
- local missing_setxml=$(sqlite_request "select printf(' - %s', name) |
|
100 |
- from aircrafts |
|
101 |
- where id not in (select variantof from setxml)") |
|
104 |
+ local missing_setxml=$(sqlite_request "select printf(' - %s (%s)', aircrafts.name, hangars.name) |
|
105 |
+ from aircrafts inner join hangars |
|
106 |
+ where hangars.id = aircrafts.hangar and aircrafts.id not in (select variantof from setxml)") |
|
102 | 107 |
if test -n "$missing_setxml"; then |
103 | 108 |
echo -e "missing setxml config for :\n$missing_setxml" |
104 | 109 |
fi |
105 |
- local missing_model=$(sqlite_request 'select count(setxml.file) |
|
106 |
- from aircrafts inner join setxml |
|
107 |
- where aircrafts.id = setxml.variantof and setxml.`/sim/model/path` = ""') |
|
108 |
- if test $missing_model -gt 0; then |
|
109 |
- echo "$missing_model aircrafts without /sim/model/path information" |
|
110 |
- if test $missing_model -le 10; then |
|
111 |
- echo "aircrafts without /sim/model/path information:" |
|
112 |
- sqlite_request 'select printf(" - %s/%s", aircrafts.name, setxml.file) |
|
113 |
- from aircrafts inner join setxml |
|
114 |
- where aircrafts.id = setxml.variantof and setxml.`/sim/model/path` = ""' |
|
110 |
+ |
|
111 |
+ for data_presence_check in ${missing_data_check[@]}; do |
|
112 |
+ if [[ -v data[$data_presence_check] ]]; then |
|
113 |
+ local missing_data=$(sqlite_request "select count(setxml.file) |
|
114 |
+ from aircrafts inner join setxml |
|
115 |
+ where aircrafts.id = setxml.variantof and setxml.\`$data_presence_check\` = ''") |
|
116 |
+ if test $missing_data -gt 0; then |
|
117 |
+ echo "$missing_data aircrafts without $data_presence_check information" |
|
118 |
+ if test $missing_data -le 10; then |
|
119 |
+ echo "aircrafts without $data_presence_check information:" |
|
120 |
+ sqlite_request "select printf(' - %s/%s (%s)', aircrafts.name, setxml.file, hangars.name) |
|
121 |
+ from aircrafts inner join setxml, hangars |
|
122 |
+ where |
|
123 |
+ aircrafts.id = setxml.variantof |
|
124 |
+ and |
|
125 |
+ aircrafts.hangar = hangars.id |
|
126 |
+ and |
|
127 |
+ setxml.\`$data_presence_check\` = ''" |
|
128 |
+ fi |
|
129 |
+ fi |
|
115 | 130 |
fi |
116 |
- fi |
|
131 |
+ done |
|
132 |
+ |
|
117 | 133 |
if test -r "$database" && md5sum $in_ram_database | sed "s,$in_ram_database,$database," | md5sum --status -c -; then |
118 |
- rm -f $in_ram_database |
|
119 | 134 |
echo "no changes in $database" |
120 | 135 |
elif test -w "$database"; then |
121 |
- sqlite_request "vacuum" |
|
122 |
- mv -f $in_ram_database "$database" |
|
136 |
+ rm -f "$database" |
|
137 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
123 | 138 |
echo "database $database updated" |
124 |
- elif ! test -e "$database"; then |
|
125 |
- mv $in_ram_database "$database" |
|
139 |
+ elif test ! -e "$database" -a -w ${database%/*}; then |
|
140 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
126 | 141 |
echo "database $database created" |
127 | 142 |
else |
128 |
- rm -f $in_ram_database |
|
129 | 143 |
echo "nothing can be done with $database !" |
130 | 144 |
fi |
131 |
-} |
|
132 |
- |
|
133 |
-function register_state () { |
|
134 |
- sqlite_request "drop table if exists recover_rev" |
|
135 |
- sqlite_request "create table recover_rev ( |
|
136 |
- revkey text, |
|
137 |
- revision integer, |
|
138 |
- revauthor text, |
|
139 |
- revdate integer |
|
140 |
- )" |
|
141 |
- for revkey in ${!revindex[@]}; do |
|
142 |
- sqlite_request "insert into recover_rev values ( |
|
143 |
- '$revkey', |
|
144 |
- ${revindex[$revkey]:-0}, |
|
145 |
- '${revauthor[$revkey]}', |
|
146 |
- ${revdate[$revkey]:-0} |
|
147 |
- )" |
|
148 |
- done |
|
149 |
- sqlite_request "drop table if exists recover_setxmlmodified" |
|
150 |
- sqlite_request "create table if not exists recover_setxmlmodified ( |
|
151 |
- sx text |
|
152 |
- )" |
|
153 |
- for sx in ${!setxmlmodified[@]}; do |
|
154 |
- sqlite_request "insert into recover_setxmlmodified values ( |
|
155 |
- '$sx' |
|
156 |
- )" |
|
157 |
- done |
|
158 |
- exit |
|
145 |
+ rm -f $temppath/*-$tempid |
|
159 | 146 |
} |
160 | 147 |
|
161 | 148 |
function update_database () { |
162 |
- dbupdate=$(sqlite_request "select revision from aircrafts where name is '${ac:1}'") |
|
163 |
- if test -z "$dbupdate"; then |
|
164 |
- sqlite_request "insert into aircrafts (name, revision, date, author) |
|
165 |
- values ('${ac:1}', ${revindex[$ac]}, ${revdate[$ac]}, '${revauthor[$ac]}')" |
|
166 |
- elif test $dbupdate -lt ${revindex[$ac]}; then |
|
167 |
- sqlite_request "update aircrafts set |
|
168 |
- revision = ${revindex[$ac]}, |
|
169 |
- author = '${revauthor[$ac]}', |
|
170 |
- date = ${revdate[$ac]} |
|
171 |
- where name is '${ac:1}'" |
|
172 |
- fi |
|
173 |
- id=$(sqlite_request "select id from aircrafts where name is '${ac:1}'") |
|
149 |
+ sqlite_request "insert into aircrafts (name, author, revision, date, hangar) |
|
150 |
+ select name, author, revision, date, hangar from recover_aircrafts |
|
151 |
+ where recover_aircrafts.name = '$ac' and recover_aircrafts.hangar = ${hangar[id]} |
|
152 |
+ on conflict (name, hangar) where aircrafts.name = '$ac' and aircrafts.hangar = ${hangar[id]} do |
|
153 |
+ update set |
|
154 |
+ author = (select author from recover_aircrafts where name = '$ac'), |
|
155 |
+ revision = (select revision from recover_aircrafts where name = '$ac'), |
|
156 |
+ date = (select date from recover_aircrafts where name = '$ac') |
|
157 |
+ where aircrafts.name = '$ac' and aircrafts.hangar = ${hangar[id]}" |
|
174 | 158 |
|
175 |
- echo "[ ${#revindex[@]} ] ${ac:1}" |
|
176 |
- unset revindex[$ac] |
|
159 |
+ id=$(sqlite_request "select id from aircrafts where name is '${ac}' and hangar = ${hangar[id]}") |
|
177 | 160 |
|
178 |
- for sx in ${!setxmlmodified[@]}; do |
|
179 |
- unset include include_rootpath |
|
180 |
- [[ "$sx" =~ ^"${ac:1}/" ]] || continue |
|
161 |
+ echo $(sqlite_request "select printf('[ %i/%i ] $ac', count(sx), count(distinct ac)) from recover_setxml") |
|
162 |
+ |
|
163 |
+ for sx in $(sqlite_request "select sx from recover_setxml where ac = '$ac'"); do |
|
181 | 164 |
for col in ${!data[@]}; do |
182 | 165 |
data[$col]= |
183 | 166 |
done |
184 |
- sx=${sx#*/} |
|
167 |
+ |
|
185 | 168 |
echo " -> $sx" |
186 |
- if ! svn cat $fgaddon_svn/${ac:1}/$sx-set.xml > $setxml; then |
|
187 |
- register_state |
|
188 |
- fi |
|
189 |
- xmlremovecomments |
|
190 |
- unset xmlgetnext_empty_tag property |
|
191 |
- while xmlgetnext; do |
|
192 |
- case "${TAG:0:1}" in |
|
193 |
- ''|'?'|'!') |
|
194 |
- continue;; |
|
195 |
- /) |
|
196 |
- property=${property%/*};; |
|
197 |
- *) |
|
198 |
- if test "${TAG: -1}" != '/'; then |
|
199 |
- property+=/${TAG%% *} |
|
200 |
- fi;; |
|
201 |
- esac |
|
169 |
+ getfromrepo ${ac}/$sx-set.xml > $setxml |
|
202 | 170 |
|
171 |
+ unset xmlgetnext_empty_tag property include include_rootpath ac_save |
|
172 |
+ while xmlgetnext; do |
|
203 | 173 |
if [[ "$TAG" =~ ^"PropertyList include=" ]]; then |
204 | 174 |
include_rootpath=${include%/*} |
205 | 175 |
test $include = $include_rootpath && unset include_rootpath |
206 |
- eval $(echo ${TAG#* }) |
|
207 |
- [[ "$include" =~ ^Aircraft/Generic/ ]] && unset include include_rootpath && continue |
|
208 |
- if [[ "$include" =~ ^'../' ]]; then |
|
176 |
+ |
|
177 |
+ eval $(echo ${TAG#* }) # include="..." |
|
178 |
+ |
|
179 |
+ if [[ "$include" =~ ^Aircraft/Generic/ ]]; then |
|
180 |
+ unset include include_rootpath |
|
181 |
+ continue |
|
182 |
+ |
|
183 |
+ elif [[ "$include" =~ ^'../' ]]; then |
|
209 | 184 |
if test -n "$include_rootpath"; then |
210 | 185 |
if [[ "$include_rootpath" =~ '/' ]]; then |
211 | 186 |
include_rootpath=${include_rootpath%/*} |
... | ... |
@@ -218,11 +193,28 @@ function update_database () { |
218 | 193 |
fi |
219 | 194 |
include=${include/\.\.\/} |
220 | 195 |
fi |
221 |
- if ! svn cat $fgaddon_svn/${ac:1}/${include_rootpath:+$include_rootpath/}$include >> $setxml; then |
|
222 |
- register_state |
|
223 |
- fi |
|
224 |
- xmlremovecomments |
|
196 |
+ getfromrepo ${ac}/${include_rootpath:+$include_rootpath/}$include >> $setxml |
|
225 | 197 |
fi |
198 |
+ done < $setxml |
|
199 |
+ |
|
200 |
+ test -n "$ac_save" && ac=$ac_save |
|
201 |
+ |
|
202 |
+# some aircrafts (mostly from the helijah's files architecture template) |
|
203 |
+# break because of infinite loop in middle of file |
|
204 |
+# I can't find the reason of this infinite loop |
|
205 |
+# this is the reason of this double-pass |
|
206 |
+ unset xmlgetnext_empty_tag property |
|
207 |
+ while xmlgetnext; do |
|
208 |
+ case "${TAG:0:1}" in |
|
209 |
+ ''|'?'|'!') |
|
210 |
+ continue;; |
|
211 |
+ /) |
|
212 |
+ property=${property%/*};; |
|
213 |
+ *) |
|
214 |
+ if test "${TAG: -1}" != '/'; then |
|
215 |
+ property+=/${TAG%% *} |
|
216 |
+ fi;; |
|
217 |
+ esac |
|
226 | 218 |
|
227 | 219 |
if [[ "$property" = /PropertyList@($data_pattern) ]]; then |
228 | 220 |
if test -z "${data[${property/\/PropertyList}]}"; then |
... | ... |
@@ -240,42 +232,95 @@ function update_database () { |
240 | 232 |
|
241 | 233 |
if eval "test -z \"$data_test_null\""; then |
242 | 234 |
echo "WARNING: no info found, skipping" |
243 |
- continue |
|
244 |
- fi |
|
245 |
- |
|
246 |
- known=$(sqlite_request "select variantof from setxml where file is '$sx'") |
|
247 |
- if test -n "$known"; then |
|
248 |
- for col in ${!data[@]}; do |
|
249 |
- dbvalue=$(sqlite_request "select \`$col\` |
|
250 |
- from setxml |
|
251 |
- where file is '$sx' and variantof = $known") |
|
252 |
- if test "$dbvalue" != "${data[$col]}" -a -n "${data[$col]}"; then |
|
253 |
- sqlite_request "update setxml |
|
254 |
- set \`$col\` = '${data[$col]//\'/\'\'}' |
|
255 |
- where file is '$sx' and variantof = $known" |
|
256 |
- fi |
|
257 |
- done |
|
235 |
+ mkdir -p $temppath/no-data-ac |
|
236 |
+ cp -f $setxml $temppath/no-data-ac/${ac}-${sx} |
|
258 | 237 |
else |
259 |
- values="'$sx', $id, " |
|
238 |
+ insert_values="'$sx', $id, " |
|
239 |
+ insert_col='file, variantof, ' |
|
240 |
+ update_values='' |
|
260 | 241 |
for col in ${!data[@]}; do |
261 |
- values+="'${data[$col]//\'/\'\'}', " |
|
242 |
+ insert_col+="\`$col\`, " |
|
243 |
+ insert_values+="'${data[$col]//\'/\'\'}', " |
|
244 |
+ if test -n "${data[$col]}"; then |
|
245 |
+ update_values+="\`$col\` = '${data[$col]//\'/\'\'}', " |
|
246 |
+ fi |
|
262 | 247 |
done |
263 |
- values+=0 |
|
264 |
- sqlite_request "insert into setxml values ($values)" |
|
248 |
+ sqlite_request "insert into setxml (${insert_col%,*}, installed) values (${insert_values%,*}, 0) |
|
249 |
+ on conflict (file, variantof) where file = '$sx' and variantof = $id do |
|
250 |
+ update set |
|
251 |
+ ${update_values%,*}, installed = 0 |
|
252 |
+ where |
|
253 |
+ file = '$sx' and variantof = $id" |
|
265 | 254 |
fi |
266 |
- test -n "$ac_save" && ac=$ac_save |
|
267 |
- unset setxmlmodified[${ac:1}/$sx] |
|
255 |
+ |
|
256 |
+ sqlite_request "delete from recover_setxml where ac = '$ac' and sx = '$sx'" |
|
257 |
+ done |
|
258 |
+} |
|
259 |
+ |
|
260 |
+function add_record () { |
|
261 |
+ ac_ass_array[$1]="$2" |
|
262 |
+} |
|
263 |
+ |
|
264 |
+function get_record () { |
|
265 |
+ if test -n "$1"; then |
|
266 |
+ echo "${ac_ass_array[$1]}" |
|
267 |
+ else |
|
268 |
+ for k in ${!ac_ass_array[@]}; do |
|
269 |
+ echo $k = ${ac_ass_array[$k]} |
|
270 |
+ done |
|
271 |
+ fi |
|
272 |
+} |
|
273 |
+ |
|
274 |
+function add_aircraft () { |
|
275 |
+ for key in name revision date author; do |
|
276 |
+ test -n "${ac_ass_array[$key]}" # exit if missing data |
|
277 |
+ done |
|
278 |
+ sqlite_request "insert into recover_aircrafts (name, revision, date, author, hangar) |
|
279 |
+ values ( |
|
280 |
+ '${ac_ass_array[name]}', |
|
281 |
+ ${ac_ass_array[revision]}, |
|
282 |
+ ${ac_ass_array[date]}, |
|
283 |
+ '${ac_ass_array[author]}', |
|
284 |
+ ${hangar[id]})" |
|
285 |
+ for key in name revision date author; do |
|
286 |
+ ac_ass_array[$key]='' |
|
268 | 287 |
done |
269 | 288 |
} |
270 | 289 |
|
290 |
+function add_setxml_for_aircraft () { |
|
291 |
+ sqlite_request "insert into recover_setxml values ('$1', '${2/%-set.xml}')" |
|
292 |
+} |
|
293 |
+ |
|
271 | 294 |
function apply_revision () { |
272 |
- for ac in "${!revindex[@]}"; do |
|
295 |
+ for ac in $(sqlite_request "select name from recover_aircrafts"); do |
|
296 |
+ # delete aircrafts that have been deleted from the repo |
|
297 |
+ sqlite_request "delete from setxml |
|
298 |
+ where (file, variantof) in ( |
|
299 |
+ select file, variantof from setxml |
|
300 |
+ inner join aircrafts |
|
301 |
+ where aircrafts.id = setxml.variantof |
|
302 |
+ and aircrafts.name = '$ac' |
|
303 |
+ and aircrafts.hangar = ${hangar[id]} |
|
304 |
+ and setxml.file not in ( |
|
305 |
+ select sx from recover_setxml where ac = '$ac' |
|
306 |
+ ) |
|
307 |
+ )" |
|
308 |
+ |
|
309 |
+ # delete aircrafts without setxml found |
|
310 |
+ sqlite_request "delete from recover_aircrafts |
|
311 |
+ where name not in (select distinct ac from recover_setxml)" |
|
312 |
+ |
|
273 | 313 |
update_database |
274 |
- if test -d $fgaddon_path/${ac:1}/.svn \ |
|
275 |
- && test "$(svn info --show-item=url $fgaddon_path/${ac:1})" != "$fgaddon_svn/${ac:1}" \ |
|
276 |
- || test -d $fgaddon_path/${ac:1} -a ! -d $fgaddon_path/${ac:1}/.svn; then |
|
277 |
- echo "INFO: local ${ac:1} installed out from repo" >&2 |
|
314 |
+ if test -d ${hangar[path]}/${ac}/.${hangar[type]} \ |
|
315 |
+ && \ |
|
316 |
+ case ${hangar[type]} in |
|
317 |
+ svn) test "$(svn info --show-item=url ${hangar[path]}/${ac})" != "${hangar[url]}/${ac}";; |
|
318 |
+ git) test "$(git -C ${hangar[path]}/${ac} config --get remote.origin.url)" != "${hangar[url]}/${ac}";; |
|
319 |
+ esac \ |
|
320 |
+ || test -d ${hangar[path]}/${ac} -a ! -d ${hangar[path]}/${ac}/.${hangar[type]}; then |
|
321 |
+ echo "INFO: local ${ac} installed out from repo" >&2 |
|
278 | 322 |
fi |
323 |
+ sqlite_request "delete from recover_aircrafts where name = '$ac'" |
|
279 | 324 |
done |
280 | 325 |
} |
281 | 326 |
|
... | ... |
@@ -284,13 +329,14 @@ trap trap_exit EXIT |
284 | 329 |
|
285 | 330 |
stty -echoctl |
286 | 331 |
|
287 |
-declare -A revindex revauthor revdate setxmlmodified revpath |
|
332 |
+declare -A hangar |
|
288 | 333 |
data_pattern=$(printf "%s|" ${!data[@]}) |
289 | 334 |
data_pattern=${data_pattern:0:-1} |
290 | 335 |
data_test_null=$(printf '${data[%s]}' ${!data[@]}) |
291 | 336 |
|
292 | 337 |
if test -e $database; then |
293 | 338 |
cp $database $in_ram_database |
339 |
+ |
|
294 | 340 |
sql_cols=$(sqlite_request "pragma table_info(setxml)" | awk -F'|' '{printf("%s %s ", $2, $3)}') |
295 | 341 |
script_cols="file text variantof integer " |
296 | 342 |
for col in ${!data[@]}; do |
... | ... |
@@ -301,28 +347,40 @@ if test -e $database; then |
301 | 347 |
echo "ALERT: datbase version mismatch !" |
302 | 348 |
exit 1 |
303 | 349 |
fi |
304 |
- if sqlite_request '.tables' | grep -q 'recover_' && test -z "$1"; then |
|
350 |
+ if sqlite_request '.tables' | grep -q 'recover_'; then |
|
305 | 351 |
echo "recovering from previous saved state" |
306 |
- eval $(sqlite_request "select printf('revindex[%s]=%u;revauthor[%s]=%s;revdate[%s]=%u;', |
|
307 |
- revkey, revision, |
|
308 |
- revkey, revauthor, |
|
309 |
- revkey, revdate) |
|
310 |
- from recover_rev") |
|
311 |
- eval $(sqlite_request "select printf('setxmlmodified[%s]=1;', sx) |
|
312 |
- from recover_setxmlmodified") |
|
313 |
- sqlite_request "drop table recover_rev" |
|
314 |
- sqlite_request "drop table recover_setxmlmodified" |
|
352 |
+ hangar[id]=$(sqlite_request "select hangar from recover_aircrafts limit 1") |
|
353 |
+ eval $(sqlite_request "select printf('hangar[name]=%s;hangar[url]=%s;hangar[type]=%s;hangar[source]=%s', |
|
354 |
+ name, url, type, source) |
|
355 |
+ from hangars |
|
356 |
+ where id = '${hangar[id]}'") |
|
357 |
+ source $(grep -l "^\s*hangar\[name\]=${hangar[name]}\s*$" ${0%*/}.d/*.hangar) |
|
358 |
+ eval "getfromrepo () {$(declare -f getfromrepo | sed '1,2d;$d'); xmlremovecomments;}" |
|
315 | 359 |
apply_revision |
316 | 360 |
exit |
317 | 361 |
fi |
318 | 362 |
fi |
319 | 363 |
|
364 |
+sqlite_request "create table if not exists hangars ( |
|
365 |
+ id integer primary key, |
|
366 |
+ name text, |
|
367 |
+ source text, |
|
368 |
+ type text, |
|
369 |
+ url text, |
|
370 |
+ path text, |
|
371 |
+ active integer)" |
|
372 |
+ |
|
373 |
+sqlite_request 'create unique index if not exists "index_hangars" on hangars (url)' |
|
374 |
+ |
|
320 | 375 |
sqlite_request "create table if not exists aircrafts ( |
321 | 376 |
id integer primary key, |
322 | 377 |
name text, |
323 | 378 |
revision integer, |
324 | 379 |
date integer, |
325 |
- author text)" |
|
380 |
+ author text, |
|
381 |
+ hangar integer)" |
|
382 |
+ |
|
383 |
+sqlite_request 'create unique index if not exists "index_aircrafts" on aircrafts (name, hangar)' |
|
326 | 384 |
|
327 | 385 |
sqlite_request "create table if not exists setxml ( |
328 | 386 |
file text, |
... | ... |
@@ -330,178 +388,80 @@ sqlite_request "create table if not exists setxml ( |
330 | 388 |
$(for col in ${!data[@]}; do printf "'%s' %s, " $col ${data[$col]}; done) |
331 | 389 |
installed integer)" |
332 | 390 |
|
333 |
-latest_revision=$(( $(sqlite_request "select max(revision) from aircrafts") + 1 )) |
|
334 |
- |
|
335 |
-# for debugging purpose |
|
336 |
-if test -n "$2"; then |
|
337 |
- ac=_${1%/*} |
|
338 |
- revindex[$ac]=1 |
|
339 |
- revdate[$ac]=0 |
|
340 |
- revauthor[$ac]=foobar |
|
341 |
- setxmlmodified[${ac:1}/${1#*/}]=1 |
|
342 |
- set -x |
|
343 |
- update_database |
|
344 |
- set +x |
|
345 |
- exit |
|
346 |
-elif test -n "$1"; then |
|
347 |
- ac=_${1%/*} |
|
348 |
- eval $(sqlite_request "select printf('revindex[_%s]=%s;revdate[_%s]=%i;revauthor[_%s]=%s;', |
|
349 |
- name, revision, |
|
350 |
- name, date, |
|
351 |
- name, author) |
|
352 |
- from aircrafts |
|
353 |
- where name = '${ac:1}'") |
|
354 |
- setxmlmodified[${ac:1}/${1#*/}]=1 |
|
355 |
- if test -z "${revindex[$ac]}"; then |
|
356 |
- echo "aircraft ${ac:1} not found" |
|
357 |
- rm $in_ram_database |
|
358 |
- exit |
|
391 |
+sqlite_request 'create unique index if not exists "index_setxml" on setxml (file, variantof)' |
|
392 |
+ |
|
393 |
+for file in $(find ${0%*/}.d -type f -name "*.hangar"); do |
|
394 |
+ unset hangar error_message |
|
395 |
+ unset -f getfromrepo parse_repo_history |
|
396 |
+ declare -A hangar |
|
397 |
+ source $file |
|
398 |
+ |
|
399 |
+ test -n "${hangar[name]}" \ |
|
400 |
+ -a -n "${hangar[source]}" \ |
|
401 |
+ -a -n "${hangar[type]}" \ |
|
402 |
+ -a -n "${hangar[url]}" \ |
|
403 |
+ -a -n "${hangar[active]}" || \ |
|
404 |
+ error_message="${error_message:+$error_message, }missing hangar data" |
|
405 |
+ |
|
406 |
+ declare -f getfromrepo > /dev/null || \ |
|
407 |
+ error_message="${error_message:+$error_message, }missing getfromrepo function" |
|
408 |
+ |
|
409 |
+ declare -f parse_repo_history > /dev/null || \ |
|
410 |
+ error_message="${error_message:+$error_message, }missing parse_repo_history function" |
|
411 |
+ |
|
412 |
+ if test -n "$error_message"; then |
|
413 |
+ echo "file $file isn't a valid hangar ($error_message)" |
|
414 |
+ continue |
|
359 | 415 |
fi |
360 |
- update_database |
|
361 |
-######################################## |
|
362 |
-# TO KEEP SETXML uncomment below lines: |
|
363 |
-######################################## |
|
364 |
-# mkdir -p /dev/shm/aircrafts/${1%/*} |
|
365 |
-# ln -f $setxml /dev/shm/aircrafts/$1 |
|
366 |
-######################################## |
|
367 |
- exit |
|
368 |
-fi |
|
369 | 416 |
|
370 |
-echo "downloading FGADDON history from revision ${latest_revision:-0}" |
|
371 |
-svn log --revision ${latest_revision:-0}:HEAD --xml --verbose $fgaddon_svn > $aircrafts |
|
417 |
+ sqlite_request "insert into hangars (name, source, type, url, path, active) |
|
418 |
+ values ( |
|
419 |
+ '${hangar[name]}', '${hangar[source]}', '${hangar[type]}', |
|
420 |
+ '${hangar[url]}', '${hangar[path]}', ${hangar[active]}) |
|
421 |
+ on conflict (url) where url = '${hangar[url]}' do |
|
422 |
+ update set |
|
423 |
+ name = '${hangar[name]}', |
|
424 |
+ path = '${hangar[path]}', |
|
425 |
+ active = ${hangar[active]} |
|
426 |
+ where url = '${hangar[url]}'" |
|
427 |
+done |
|
428 |
+ |
|
429 |
+unset hangar |
|
430 |
+unset -f getfromrepo parse_repo_history |
|
431 |
+declare -A hangar ac_ass_array |
|
432 |
+for h_id in $(sqlite_request "select id from hangars where active = 1"); do |
|
433 |
+ |
|
434 |
+ sqlite_request 'create table if not exists recover_aircrafts ( |
|
435 |
+ name text, |
|
436 |
+ revision integer, |
|
437 |
+ date integer, |
|
438 |
+ author text, |
|
439 |
+ hangar integer)' |
|
372 | 440 |
|
373 |
-total=$(grep -c '<logentry' $aircrafts) |
|
374 |
-progress=0 |
|
441 |
+ sqlite_request 'create table if not exists recover_setxml ( |
|
442 |
+ ac text, |
|
443 |
+ sx text)' |
|
375 | 444 |
|
376 |
-echo parsing history |
|
445 |
+ eval $(sqlite_request "select printf('hangar[id]=%i;hangar[source]=%s;', id, source) |
|
446 |
+ from hangars |
|
447 |
+ where id = '${h_id}'") |
|
448 |
+ |
|
449 |
+ source $(grep -l "^\s*hangar\[source\]=${hangar[source]}\s*$" ${0%*/}.d/*.hangar) |
|
450 |
+ |
|
451 |
+ eval "getfromrepo () {$(declare -f getfromrepo | sed '1,2d;$d'); xmlremovecomments;}" |
|
452 |
+ |
|
453 |
+ echo -e "=${hangar[name]//?/=}=\n ${hangar[name]} \n=${hangar[name]//?/=}=" |
|
454 |
+ |
|
455 |
+ latest_revision=$(( $(sqlite_request "select max(revision) |
|
456 |
+ from aircrafts inner join hangars |
|
457 |
+ where hangars.id = aircrafts.hangar and hangars.name = '${hangar[name]}'") + 1 )) |
|
458 |
+ |
|
459 |
+ parse_repo_history |
|
460 |
+ |
|
461 |
+ if declare -f on_exit > /dev/null; then |
|
462 |
+ on_exit |
|
463 |
+ fi |
|
464 |
+ sqlite_request "drop table recover_aircrafts" |
|
465 |
+ sqlite_request "drop table recover_setxml" |
|
466 |
+done |
|
377 | 467 |
|
378 |
-while xmlgetnext; do |
|
379 |
- case "$TAG" in |
|
380 |
- 'logentry revision='*) |
|
381 |
- eval $(echo ${TAG#* }) |
|
382 |
- for action in ${!revpath[@]}; do |
|
383 |
- unset revpath[$action] |
|
384 |
- done |
|
385 |
- ;; |
|
386 |
- 'author') |
|
387 |
- revauthor=${VALUE//\'/\'\'} |
|
388 |
- ;; |
|
389 |
- 'date') |
|
390 |
- revdate=$(date +%s -d "$VALUE") |
|
391 |
- ;; |
|
392 |
- 'path '*) |
|
393 |
- TAG=${TAG#* } |
|
394 |
- TAG=${TAG// /;} |
|
395 |
- TAG=${TAG//-/_} |
|
396 |
- eval $(echo ${TAG// /;}) |
|
397 |
- path=(${VALUE//\// }) |
|
398 |
- if test $kind = 'file' -a ${#path[@]} -gt 3; then |
|
399 |
- revpath[$action]+="$VALUE " |
|
400 |
- elif test $kind = 'dir' -a ${#path[@]} -eq 3 -a $action = 'D'; then |
|
401 |
- unset revindex[_${path[2]}] revauthor[_${path[2]}] revdate[_${path[2]}] |
|
402 |
- for sx in ${!setxmlmodified[@]}; do |
|
403 |
- [[ "$sx" =~ "${path[2]}/" ]] && unset setxmlmodified[$sx] |
|
404 |
- done |
|
405 |
- sqlite_request "delete from aircrafts where name = '${path[2]}'" |
|
406 |
- fi |
|
407 |
- ;; |
|
408 |
- '/logentry') |
|
409 |
- for item in ${revpath[D]}; do |
|
410 |
- path=(${item//\// }) |
|
411 |
- if [[ "${path[3]}" =~ "-set.xml" ]]; then |
|
412 |
- unset setxmlmodified[${path[2]}/${path[3]/-set.xml}] |
|
413 |
- sqlite_request "delete from setxml where file = '${path[3]/-set.xml}'" |
|
414 |
- fi |
|
415 |
- done |
|
416 |
- for action in A M R; do |
|
417 |
- for item in ${revpath[$action]}; do |
|
418 |
- path=(${item//\// }) |
|
419 |
- test -z "${path[2]}" && continue # avoid empty |
|
420 |
- revindex[_${path[2]}]=$revision |
|
421 |
- revauthor[_${path[2]}]=$revauthor |
|
422 |
- revdate[_${path[2]}]=$revdate |
|
423 |
- [[ "${path[3]}" =~ "-set.xml" ]] && setxmlmodified[${path[2]}/${path[3]/-set.xml}]=1 |
|
424 |
- done |
|
425 |
- done |
|
426 |
- newprogress=$((++logentry * 100 / $total)) |
|
427 |
- if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then |
|
428 |
- progress=$newprogress |
|
429 |
- echo "$progress% (${#revindex[@]})" |
|
430 |
- fi |
|
431 |
- ;; |
|
432 |
- '/log') |
|
433 |
- apply_revision |
|
434 |
- break |
|
435 |
- ;; |
|
436 |
- esac |
|
437 |
-done < $aircrafts |
|
438 |
- |
|
439 |
-######################################################################## |
|
440 |
-# some aircrafts (mostly from the helijah's files architecture template) |
|
441 |
-# break because of infinite loop in middle of file |
|
442 |
-# I can't find the reason of this infinite loop |
|
443 |
-# these are the steps (some may be scripted) |
|
444 |
-# sorry about inconvenience... |
|
445 |
-# STEPS TO FOLLOW |
|
446 |
-# 1 - the following lines may be copied in a separate file (e.g /dev/shm/foo) |
|
447 |
-# 2 - uncomment by removing the FIRST column only and save the file |
|
448 |
-# 3 - uncomment the lines dedicated to save the setxml content in this script |
|
449 |
-# 4 - find the empty /sim/model/path: |
|
450 |
-# sqlite3 /your/database <<< 'select printf("%s/%s", aircrafts.name, setxml.file) |
|
451 |
-# from aircrafts inner join setxml |
|
452 |
-# where aircrafts.id = setxml.variantof and setxml.`/sim/model/path` = ""' |
|
453 |
-# 5 - play $ DB=/your/database ./fgaddon aicrafts_name/setxml_file |
|
454 |
-# 6 - play $ /dev/shm/foo aicrafts_name/setxml_file |
|
455 |
-# 7 - play $ sqlite3 /your/database <<< "update setxml set `/sim/model/path` = '<the path found>' where file = 'setxml_file'" |
|
456 |
-# |
|
457 |
-# exemple of one-line CLI: |
|
458 |
-# for i in $(sqlite3 .fgfs/flightgear-fgaddon/fgaddon.db <<< 'select printf("%s/%s", aircrafts.name, setxml.file) from aircrafts inner join setxml where aircrafts.id = setxml.variantof and setxml.`/sim/model/path` = ""'); do DB=.fgfs/flightgear-fgaddon/fgaddon.db .fgfs/fgaddon $i; sim_model_path=$(/dev/shm/foo $i | awk '/^\/sim\/model\/path/{print $NF}'); test -n "$sim_model_path" && sqlite3 .fgfs/flightgear-fgaddon/fgaddon.db <<< "update setxml set \`/sim/model/path\` = '$sim_model_path' where file = '${i#*/}'"; done |
|
459 |
- |
|
460 |
-##!/bin/bash |
|
461 |
-# |
|
462 |
-#declare -A data=( |
|
463 |
-# [/sim/model/path]=text |
|
464 |
-#) |
|
465 |
-#data_pattern=$(printf "%s|" ${!data[@]}) |
|
466 |
-#data_pattern=${data_pattern:0:-1} |
|
467 |
-#for col in ${!data[@]}; do |
|
468 |
-# data[$col]= |
|
469 |
-#done |
|
470 |
-# |
|
471 |
-#function xmlgetnext () { |
|
472 |
-# local IFS='>' |
|
473 |
-# read -d '<' TAG VALUE |
|
474 |
-# # by design, the first TAG/VALUE pair is empty |
|
475 |
-# # to avoid infinite loops at end of file parsing we return an error |
|
476 |
-# # the next time we find an empty TAG |
|
477 |
-# if test -z "$TAG"; then |
|
478 |
-# test ${xmlgetnext_empty_tag:-0} -gt 0 && return 1 |
|
479 |
-# xmlgetnext_empty_tag=$(( xmlgetnext_empty_tag + 1 )) |
|
480 |
-# fi |
|
481 |
-# # process $TAG only if necessary |
|
482 |
-# local _TAG=$(printf '%q' $TAG) |
|
483 |
-# if test ${_TAG:0:1} = '$'; then |
|
484 |
-# TAG=$(tr '\n' ' ' <<< $TAG | sed 's/ */ /g; s/ *$//') |
|
485 |
-# fi |
|
486 |
-#} |
|
487 |
-# |
|
488 |
-#while xmlgetnext; do |
|
489 |
-# case "${TAG:0:1}" in |
|
490 |
-# ''|'?'|'!') |
|
491 |
-# continue;; |
|
492 |
-# /) |
|
493 |
-# property=${property%/*};; |
|
494 |
-# *) |
|
495 |
-# if test "${TAG: -1}" != '/'; then |
|
496 |
-# property+=/${TAG%% *} |
|
497 |
-# fi;; |
|
498 |
-# esac |
|
499 |
-# |
|
500 |
-# if [[ "$property" = /PropertyList@($data_pattern) ]]; then |
|
501 |
-# if test -z "${data[${property/\/PropertyList}]}"; then |
|
502 |
-# eval "data[${property/\/PropertyList}]=\"${VALUE//\"/\\\"}\"" |
|
503 |
-# data[${property/\/PropertyList}]=$(tr '\n' ' ' <<< ${data[${property/\/PropertyList}]} | sed -r 's/^\s*//;s/\s+/ /g;s/\s*$//') |
|
504 |
-# echo "${property/\/PropertyList} : ${data[${property/\/PropertyList}]}" |
|
505 |
-# fi |
|
506 |
-# fi |
|
507 |
-#done < /dev/shm/aircrafts/$1 |
... | ... |
@@ -0,0 +1,78 @@ |
1 |
+Files with name endind by .hangar are sourced (bash syntax). |
|
2 |
+Please have a look into the two provided to check how it is done |
|
3 |
+ |
|
4 |
+Each file must contains: |
|
5 |
+- variable `hangar` definition as an associative array with at least the |
|
6 |
+ following keys: |
|
7 |
+ name: the human identifier of the hangar |
|
8 |
+ url: the url of the remote repository this is the unique identifier for |
|
9 |
+ each hangar, can't be the same in more than one hangar) |
|
10 |
+ type: the type of repo (git, svn, ...) at the moment, only git and svn |
|
11 |
+ repos are known and verified by installation status routine |
|
12 |
+ path: the local path in which the remote repo (or part of it) is cloned |
|
13 |
+ active: is the hangar active (1) or not (0) |
|
14 |
+ |
|
15 |
+- function `parse_repo_history` which describes how to get the hangar content |
|
16 |
+ (initial import and updates) |
|
17 |
+- function `getfromrepo` which describes how to get |
|
18 |
+- optional function `on_exit` to describe what to do when exiting the hangar |
|
19 |
+ |
|
20 |
+Some functions are provided by the main script to ease the database management: |
|
21 |
+- add_record <key> <value> |
|
22 |
+ this function record the key with the value, these |
|
23 |
+ keys are intended to be information for aircraft. |
|
24 |
+ Mandatory keys are: |
|
25 |
+ name : the name of the aircraft |
|
26 |
+ revision : the revision from the repo |
|
27 |
+ date : date of the last update |
|
28 |
+ author : author of the commit |
|
29 |
+ |
|
30 |
+- get_record [key] |
|
31 |
+ returns the value recorded for the key if no key is |
|
32 |
+ provided, prints all the recorded keys in the form: |
|
33 |
+ key1 = value1 |
|
34 |
+ key2 = value2 |
|
35 |
+ ... (maybe useful for debugging) |
|
36 |
+ |
|
37 |
+- add_aircraft |
|
38 |
+ adds aircraft in the database in a buffer table, ready to be |
|
39 |
+ integrated into the main aircrafts table by using `apply_revision` function. |
|
40 |
+ If one or more of the mandatory keys as decribed in `add_record` is or are |
|
41 |
+ missing, the function may exit as an error and the the whole script exits |
|
42 |
+ |
|
43 |
+- add_setxml_for_aircraft <aircraft> <-set.xml file> |
|
44 |
+ add in special buffer table the -set.xml entry for aircraft |
|
45 |
+ the trailing "-set.xml" is removed if found |
|
46 |
+ |
|
47 |
+- xmlgetnext |
|
48 |
+ in a while loop to read the XML content of a file, export $TAG |
|
49 |
+ (formatted) and $VALUE (not formatted) By design the first couple TAG/VALUE is |
|
50 |
+ always empty, some files have an unidentified issue that make enter teh while |
|
51 |
+ loop in an infinite loop. To avoid this, please use the following syntax: |
|
52 |
+ |
|
53 |
+ unset xmlgetnext_empty_tag |
|
54 |
+ while xmlgetnext; do |
|
55 |
+ # PUT HERE YOUR STUFF |
|
56 |
+ done < /your/xml/file |
|
57 |
+ |
|
58 |
+- sqlite_request <SQLite request> |
|
59 |
+ perform the request on database (actually in |
|
60 |
+ a copied database which will be dumped into the original at the end of |
|
61 |
+ the script). |
|
62 |
+ Don't touch the original database, and always use this |
|
63 |
+ wrapper, unless you will lose your changes at the end. |
|
64 |
+ Moreover this wrapper write in a file /dev/shm/sqlite_request all the |
|
65 |
+ requests, so it is useful to debug, or just watch what is doing |
|
66 |
+ |
|
67 |
+- apply_revision |
|
68 |
+ use the buffered tables to feed the main tables with all the |
|
69 |
+ information it will find by parsing the new or updated aircrafts config files |
|
70 |
+ |
|
71 |
+Some variables are available |
|
72 |
+- $latest_revision the revision to starts the history remote retrieving, |
|
73 |
+ defaults to 1 |
|
74 |
+ |
|
75 |
+- $tempid a single identifer to create temporary files (useful to get all the |
|
76 |
+ files generated by the script ending with the same ID) |
|
77 |
+ |
|
78 |
+Enjoy adding your preferred hangar :) |
... | ... |
@@ -0,0 +1,127 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+hangar[path]=$HOME/.fgfs/flightgear-fgaddon/Aircraft |
|
4 |
+ |
|
5 |
+hangar[name]=FGADDON |
|
6 |
+hangar[source]=sourceforge |
|
7 |
+hangar[type]=svn |
|
8 |
+hangar[url]=https://svn.code.sf.net/p/flightgear/fgaddon/trunk/Aircraft |
|
9 |
+hangar[active]=1 |
|
10 |
+ |
|
11 |
+function getfromrepo () { |
|
12 |
+ svn cat ${hangar[url]}/$1 |
|
13 |
+} |
|
14 |
+ |
|
15 |
+function on_exit () { |
|
16 |
+ rm -f $aircrafts $aircraft |
|
17 |
+} |
|
18 |
+ |
|
19 |
+function parse_repo_history () { |
|
20 |
+ function getaircraftinfo () { # $1 = aircraft |
|
21 |
+ svn list --xml --depth files ${hangar[url]}/$1 > $aircraft |
|
22 |
+ unset xmlgetnext_empty_tag |
|
23 |
+ while xmlgetnext; do |
|
24 |
+ if test "$TAG" = 'name' && test "${VALUE/%-set.xml}" != "$VALUE"; then |
|
25 |
+ add_setxml_for_aircraft $1 ${VALUE/%-set.xml} |
|
26 |
+ fi |
|
27 |
+ done < $aircraft |
|
28 |
+ } |
|
29 |
+ |
|
30 |
+ aircrafts=$temppath/Aircraft-$tempid |
|
31 |
+ aircraft=$temppath/aircraft-$tempid |
|
32 |
+ |
|
33 |
+ if test $latest_revision -eq 1; then |
|
34 |
+ echo getting repository list |
|
35 |
+ if ! svn list --xml --depth immediates ${hangar[url]} > $aircrafts; then |
|
36 |
+ echo "error while retrieving list" |
|
37 |
+ exit |
|
38 |
+ fi |
|
39 |
+ total=$(grep -c '<entry' $aircrafts) |
|
40 |
+ is_ac=0 |
|
41 |
+ else |
|
42 |
+ if test ${latest_revision:-0} -gt $(svn info --show-item revision ${hangar[url]}); then |
|
43 |
+ echo "already latest revisison" |
|
44 |
+ return |
|
45 |
+ fi |
|
46 |
+ echo "downloading history from revision ${latest_revision:-0}" |
|
47 |
+ if ! svn log --revision ${latest_revision:-0}:HEAD --xml --verbose ${hangar[url]} > $aircrafts; then |
|
48 |
+ echo "error while retrieving history" |
|
49 |
+ exit |
|
50 |
+ fi |
|
51 |
+ total=$(grep -c '<logentry' $aircrafts) |
|
52 |
+ fi |
|
53 |
+ |
|
54 |
+ progress=0 |
|
55 |
+ |
|
56 |
+ echo parsing repository |
|
57 |
+ |
|
58 |
+ unset xmlgetnext_empty_tag |
|
59 |
+ while xmlgetnext; do |
|
60 |
+ |
|
61 |
+ if test $latest_revision -eq 1; then |
|
62 |
+ if test "$TAG" = 'entry kind="dir"'; then |
|
63 |
+ is_ac=1 |
|
64 |
+ continue |
|
65 |
+ elif test $is_ac -eq 0 -a "$TAG" != '/list'; then |
|
66 |
+ continue |
|
67 |
+ fi |
|
68 |
+ else |
|
69 |
+ if test "${TAG%% *}" = 'logentry'; then |
|
70 |
+ is_ac=1 |
|
71 |
+ elif test ${is_ac:-0} -eq 0 -a "$TAG" != '/log'; then |
|
72 |
+ continue |
|
73 |
+ fi |
|
74 |
+ fi |
|
75 |
+ |
|
76 |
+ case "$TAG" in |
|
77 |
+ 'name') |
|
78 |
+ add_record name $VALUE |
|
79 |
+ ;; |
|
80 |
+ 'logentry revision='*|'commit revision='*) |
|
81 |
+ add_record revision ${TAG#*=} |
|
82 |
+ ;; |
|
83 |
+ 'author') |
|
84 |
+ add_record author ${VALUE//\'/\'\'} |
|
85 |
+ ;; |
|
86 |
+ 'date') |
|
87 |
+ add_record date $(date +%s -d "$VALUE") |
|
88 |
+ ;; |
|
89 |
+ 'path '*) |
|
90 |
+ TAG=${TAG#* } |
|
91 |
+ TAG=${TAG// /;} |
|
92 |
+ TAG=${TAG//-/_} |
|
93 |
+ eval $(echo ${TAG// /;}) |
|
94 |
+ path=(${VALUE//\// }) |
|
95 |
+ if test $kind = 'dir' -a ${#path[@]} -eq 3 -a $action = 'D'; then |
|
96 |
+ sqlite_request "delete from setxml |
|
97 |
+ where variantof in ( |
|
98 |
+ select id from aircrafts |
|
99 |
+ where name = '${path[2]}' |
|
100 |
+ and hangar = ${hangar[id]} |
|
101 |
+ )" |
|
102 |
+ sqlite_request "delete from aircrafts |
|
103 |
+ where name = '${path[2]}' |
|
104 |
+ and hangar = ${hangar[id]}" |
|
105 |
+ is_ac=0 |
|
106 |
+ continue |
|
107 |
+ fi |
|
108 |
+ is_ac=1 |
|
109 |
+ add_record name ${path[2]} |
|
110 |
+ ;; |
|
111 |
+ '/logentry'|'/entry') |
|
112 |
+ getaircraftinfo $(get_record name) |
|
113 |
+ add_aircraft |
|
114 |
+ newprogress=$((++entry * 100 / $total)) |
|
115 |
+ if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then |
|
116 |
+ progress=$newprogress |
|
117 |
+ echo "$progress% ($(sqlite_request 'select count(name) from recover_aircrafts'))" |
|
118 |
+ fi |
|
119 |
+ is_ac=0 |
|
120 |
+ ;; |
|
121 |
+ '/list'|'/log') |
|
122 |
+ apply_revision |
|
123 |
+ break |
|
124 |
+ ;; |
|
125 |
+ esac |
|
126 |
+ done < $aircrafts |
|
127 |
+} |
... | ... |
@@ -0,0 +1,103 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+hangar[path]=$HOME/.fgfs/flightgear-fgaddon/Aircraft |
|
4 |
+ |
|
5 |
+hangar[name]=FGMEMBERS |
|
6 |
+hangar[source]=github |
|
7 |
+hangar[type]=git |
|
8 |
+hangar[url]=https://api.github.com/orgs/FGMEMBERS |
|
9 |
+hangar[active]=1 |
|
10 |
+ |
|
11 |
+function getfromrepo () { |
|
12 |
+ curl -s https://raw.githubusercontent.com/FGMEMBERS/${1%%/*}/master/${1#*/} |
|
13 |
+} |
|
14 |
+ |
|
15 |
+function on_exit () { |
|
16 |
+ rm -f $gh_curl_content $json_file $contents |
|
17 |
+ unset contents gh_curl_content |
|
18 |
+} |
|
19 |
+ |
|
20 |
+function parse_repo_history () { |
|
21 |
+ function github_curl () { |
|
22 |
+ test $# -eq 0 && return $(test -n "$githup_token") |
|
23 |
+ curl ${githup_token:+-u $githup_token} -si $1 > $gh_curl_content |
|
24 |
+ eval $(sed -rn '1,/^\s*$/{s/^X-Ratelimit-Remaining:\s*([0-9]+).*$/remaining=\1/p;s/^X-Ratelimit-Reset:\s*([0-9]+).*$/reset=\1/p}' $gh_curl_content) |
|
25 |
+ if test ${remaining:-1} -eq 0; then |
|
26 |
+ ((reset = reset + 10)) # just to be prudent |
|
27 |
+ echo "github API limit: waiting $(date +%H:%M -d@$reset) to continue" >&2 |
|
28 |
+ if [[ $1 =~ '/contents/'$ ]]; then |
|
29 |
+ echo "process already found data so far" >&2 |
|
30 |
+ apply_revision >&2 |
|
31 |
+ fi |
|
32 |
+ sleep $(( $reset - $(date +%s) )) |
|
33 |
+ curl ${githup_token:+-u $githup_token} -s $1 # not sure that was really fetched |
|
34 |
+ else |
|
35 |
+ sed -n '/^\s*$/,$p' $gh_curl_content |
|
36 |
+ fi |
|
37 |
+ } |
|
38 |
+ |
|
39 |
+ gh_curl_content=$temppath/github-$tempid |
|
40 |
+ contents=$temppath/contents-$tempid |
|
41 |
+ github_orga_repos='https://api.github.com/orgs/FGMEMBERS/repos?sort=updated&type=all&per_page=100&page=_page_' |
|
42 |
+ page=1 |
|
43 |
+ |
|
44 |
+ echo '[' > ${json_file}_recent |
|
45 |
+ |
|
46 |
+ while github_curl "${github_orga_repos/_page_/$((page++))}" > $json_file; do |
|
47 |
+ jq_length=$(json 'length') |
|
48 |
+ test $jq_length -eq 0 && break |
|
49 |
+ |
|
50 |
+ for ((i = 0; i < $jq_length; i++)); do |
|
51 |
+ if test $(date +%s -d $(json ".[$i].updated_at")) -gt $latest_revision; then |
|
52 |
+ json ".[$i]" >> ${json_file}_recent |
|
53 |
+ echo ',' >> ${json_file}_recent |
|
54 |
+ else |
|
55 |
+ break 2 |
|
56 |
+ fi |
|
57 |
+ done |
|
58 |
+ done |
|
59 |
+ |
|
60 |
+ sed -i '${/^,/d}' ${json_file}_recent |
|
61 |
+ echo ']' >> ${json_file}_recent |
|
62 |
+ |
|
63 |
+ mv -f ${json_file}_recent $json_file |
|
64 |
+ jq_length=$(json 'length') |
|
65 |
+ local progress=0 |
|
66 |
+ local repo |
|
67 |
+ |
|
68 |
+ if test $latest_revision -eq 1; then |
|
69 |
+ if github_curl; then |
|
70 |
+ max_requests_per_hour=5000 |
|
71 |
+ else |
|
72 |
+ max_requests_per_hour=60 |
|
73 |
+ fi |
|
74 |
+ echo "the initial import may take more than $(($jq_length / $max_requests_per_hour)) hours to perform" |
|
75 |
+ fi |
|
76 |
+ |
|
77 |
+ for ((i = 0; i < $jq_length; i++)); do |
|
78 |
+ local repo=$(json ".[$i].name") |
|
79 |
+ add_record name $repo |
|
80 |
+ |
|
81 |
+ github_curl "https://api.github.com/repos/FGMEMBERS/${repo}/contents/" > $contents |
|
82 |
+ for sx in $(json '.[] | select(.type == "file") | .path | capture("(?<setxml>.+)-set.xml") | .setxml' $contents); do |
|
83 |
+ add_setxml_for_aircraft $repo $sx |
|
84 |
+ done |
|
85 |
+ |
|
86 |
+ if test -n "$sx"; then |
|
87 |
+ add_record revision $(date +%s -d $(json ".[$i].updated_at")) |
|
88 |
+ add_record date $(date +%s -d $(json ".[$i].updated_at")) |
|
89 |
+ add_record author FGMEMBERS |
|
90 |
+ add_aircraft |
|
91 |
+ else |
|
92 |
+ sqlite_request "delete from aircrafts where name = '$repo' and hangar = ${hangar[i]}" |
|
93 |
+ fi |
|
94 |
+ |
|
95 |
+ newprogress=$((i * 100 / $jq_length)) |
|
96 |
+ if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then |
|
97 |
+ progress=$newprogress |
|
98 |
+ echo "$progress% ($(sqlite_request 'select count(name) from recover_aircrafts'))" |
|
99 |
+ fi |
|
100 |
+ done |
|
101 |
+ |
|
102 |
+ apply_revision |
|
103 |
+} |