Merge branch 'master' of https://seb.lautre.net/git...
.../seb/scripts
... | ... |
@@ -1,6 +1,12 @@ |
1 | 1 |
#!/bin/bash |
2 | 2 |
|
3 |
-for tool in sqlite3 getopt md5sum mktemp; do |
|
3 |
+set -e |
|
4 |
+ |
|
5 |
+# on n'autorise qu'une seule exécution à la fois |
|
6 |
+process_token=$(mktemp --dry-run /dev/shm/XXXXXXXXXXXXXXXX) |
|
7 |
+token_file="$process_token.${0##*/}" |
|
8 |
+ |
|
9 |
+for tool in sqlite3 getopt mktemp w3m jq; do |
|
4 | 10 |
which $tool > /dev/null 2>&1 || { |
5 | 11 |
echo missing tool $tool |
6 | 12 |
exit 1 |
... | ... |
@@ -10,145 +16,202 @@ done |
10 | 16 |
IFS_=$IFS |
11 | 17 |
|
12 | 18 |
function sqlite_request () { |
13 |
- sqlite3 "$in_ram_database" <<< "$1" |
|
19 |
+ sqlite3 ${2:+-cmd} ${2:+".mode $2"} "$in_ram_database" <<< "$1" |
|
14 | 20 |
} |
15 | 21 |
|
16 | 22 |
function create_database () { |
17 |
- if test ${#groupes[@]} -eq 0; then |
|
18 |
- echo "unable to find groupes in $config_file" >&2 |
|
19 |
- exit 1 |
|
20 |
- fi |
|
21 |
- |
|
22 |
- sqlite_request "create table if not exists votes (id integer primary key, nom text)" |
|
23 |
- sqlite_request "create table if not exists url (id integer primary key autoincrement, url text)" |
|
24 |
- sqlite_request "create table if not exists députés (id integer primary key autoincrement, nom text, groupe integer)" |
|
25 |
- sqlite_request "create table if not exists groupes (id integer primary key autoincrement, nom text, nom_court text)" |
|
26 |
- sqlite_request "create table if not exists scrutins (num integer primary key, date text not null, intitulé text non null, adoption boolean, url integer)" |
|
27 |
- sqlite_request "create table if not exists dépouillement (député integer not null, scrutin integer not null, groupe integer not null, vote integer not null)" |
|
23 |
+ sqlite_request "create table if not exists dossiers (id integer primary key, titre text, url text)" |
|
24 |
+ sqlite_request "create table if not exists votes (id integer primary key, nom text)" |
|
25 |
+ sqlite_request "create table if not exists députés (id integer primary key, nom text, groupe integer, date text)" |
|
26 |
+ sqlite_request "create table if not exists groupes (id integer primary key, nom text unique, nom_court text)" |
|
27 |
+ sqlite_request "create table if not exists scrutins (num integer primary key, séance text, date text not null, intitulé text non null, adoption boolean, dossier integer, mise_au_point text)" |
|
28 |
+ sqlite_request "create table if not exists dépouillements (scrutin integer not null, député integer not null, vote integer not null)" |
|
29 |
+ sqlite_request "create unique index if not exists 'index_députés' on députés (nom, groupe)" |
|
30 |
+ sqlite_request "create unique index if not exists 'index_dossiers' on dossiers (titre, url)" |
|
31 |
+ sqlite_request "create unique index if not exists 'index_dépouillements' on dépouillements (député, scrutin)" |
|
28 | 32 |
|
29 |
- v_id=0 |
|
30 | 33 |
for v in Pour Contre Abstention Non-votant; do |
31 |
- if test -z $(sqlite_request "select nom from votes where id is $v_id"); then |
|
32 |
- sqlite_request "insert into votes values ($v_id, '$v')" |
|
33 |
- else |
|
34 |
- test -z $(sqlite_request "select nom from votes where id is $v_id and nom is '$v'") \ |
|
35 |
- && sqlite_request "update votes set nom = '$v' where id is $v_id)" |
|
36 |
- fi |
|
37 |
- let v_id++ |
|
38 |
- done |
|
39 |
- unset v_id v |
|
40 |
- |
|
41 |
- for g in ${!groupes[@]}; do |
|
42 |
- test -z $(sqlite_request "select id from groupes where nom is '${groupes[$g]}' and nom_court is '$g'") \ |
|
43 |
- && sqlite_request "insert into groupes (nom, nom_court) values ('${groupes[$g]}', '$g')" |
|
34 |
+ sqlite_request "insert or ignore into votes (nom) values ('$v')" |
|
44 | 35 |
done |
45 |
- unset g groupes |
|
46 |
- |
|
47 |
- test -z $(sqlite_request "select id from url where id = 0") \ |
|
48 |
- && sqlite_request "insert into url values (0, '')" |
|
49 | 36 |
} |
50 | 37 |
|
51 | 38 |
function update_database () { |
52 | 39 |
test "$no_db_update" = $true_flag && return |
53 | 40 |
tempfile="/dev/shm/scrutin.$$" |
54 | 41 |
progress=0 |
42 |
+ for r in "${!acronymes[@]}"; do |
|
43 |
+ sqlite_request "update groupes set nom_court = \"${acronymes[$r]}\" where nom = \"$r\"" |
|
44 |
+ done |
|
45 |
+ sqlite_request "create table if not exists dossier_par_scrutin (scrutin integer, url text)" |
|
46 |
+ echo "récupération des dossiers" |
|
47 |
+ wget -qO- "https://www.assemblee-nationale.fr/dyn/$mandature/dossiers" \ |
|
48 |
+ | sed -rn 's/<p class="m-0"><a title="Accéder au dossier législatif" href="([^"]+)">([^<]+)<.+$/\1 \2/p' \ |
|
49 |
+ | sed -r "s/^[[:space:]]*//; s/'/'/g" \ |
|
50 |
+ | awk -v dq='"' '{ |
|
51 |
+ printf("insert or ignore into dossiers (titre, url) values (%s, %s);\n", dq gensub($1 " ", "", "1", $0) dq, dq "https://www.assemblee-nationale.fr" $1 dq) |
|
52 |
+ }' > $tempfile |
|
53 |
+ sqlite3 "$in_ram_database" < $tempfile |
|
55 | 54 |
first_=$first |
56 |
- first=$(sqlite_request "select count(num) from scrutins") |
|
55 |
+ first=$(sqlite_request "select max(num) from scrutins") |
|
57 | 56 |
if test ${first:-0} -lt $last; then |
58 | 57 |
echo "récupération des scrutins n°$((${first:-0}+1)) à n°$last dans "$database" (à conserver autant que possible)" >&2 |
59 | 58 |
|
60 |
- url_database=/dev/shm/url_database |
|
61 |
- : > "$url_database" |
|
62 | 59 |
test $((last % 100)) -ne 0 && last_offset=0 |
63 | 60 |
IFS=$' \t\n' |
64 | 61 |
for offset in $(seq $((last - 100)) -100 ${first:-0} ) $last_offset; do |
65 |
- wget -qO- "http://www2.assemblee-nationale.fr/scrutins/liste/(offset)/$offset/(legislature)/15/(type)/TOUS/(idDossier)/TOUS" \ |
|
66 |
- | awk ' |
|
62 |
+ wget -qO- "http://www2.assemblee-nationale.fr/scrutins/liste/(offset)/$offset/(legislature)/$mandature/(type)/TOUS/(idDossier)/TOUS" \ |
|
63 |
+ | awk -v dq='"' ' |
|
64 |
+ BEGIN { |
|
65 |
+ } |
|
67 | 66 |
/<td class="denom">/ { |
68 |
- scrutin = gensub(/^.+denom.>([[:digit:]]+).*<.td./,"\\1","1",$0) |
|
67 |
+ scrutin = gensub(/^.+denom.>([[:digit:]]+)\\*?<.td./,"\\1","1",$0) |
|
69 | 68 |
} |
70 |
- /<td class="desc">.+dossier<.a/ { |
|
71 |
- a[scrutin] = gensub(/^.+.<a href="(.+)">dossier<.a>.*$/,"\\1","1",$0) |
|
69 |
+ /<td class="desc">/ { |
|
70 |
+ if (match($0, ">dossier<") > 0) |
|
71 |
+ dossier[scrutin] = gensub(/^.+.<a href="([^"]+)">dossier<.a>.*$/,"\\1","1",$0) |
|
72 | 72 |
} |
73 | 73 |
END { |
74 |
- for (i in a) |
|
75 |
- print gensub("*","","1",i) "|" a[i] |
|
76 |
- }' >> "$url_database" |
|
74 |
+ for (i in dossier) { |
|
75 |
+ printf("insert into dossier_par_scrutin (scrutin, url) values (%i, %s);\n", i, dq dossier[i] dq) |
|
76 |
+ } |
|
77 |
+ }' > $tempfile |
|
78 |
+ sqlite3 "$in_ram_database" < $tempfile |
|
77 | 79 |
done |
78 |
- sort -u "$url_database" > "${url_database}.sorted" |
|
79 |
- mv -f "${url_database}.sorted" "$url_database" |
|
80 | 80 |
|
81 |
- IFS=$'\n' |
|
81 |
+ |
|
82 |
+# IFS=$'\n' |
|
82 | 83 |
begin=$(date +%s) |
83 | 84 |
for scrutin in $(seq $((${first:-0}+1)) $last); do |
84 |
- wget -qO- "http://www2.assemblee-nationale.fr/scrutins/detail/(legislature)/15/(num)/$scrutin" \ |
|
85 |
- | sed -r '0,/< *div class="titre-bandeau-bleu +to-print" *>/d; /< *script +type="text\/javascript" *>/,$d' > $tempfile |
|
86 |
- |
|
87 |
- unset title date adoption url id_url |
|
88 |
- |
|
89 |
- title=$(sed -rn '/<h1 class="">Analyse du scrutin n° '$scrutin'/n; s,^.*<h3 class="president-title">(.+).</h3>,\1,p' $tempfile \ |
|
90 |
- | sed "s/;//g; s/[ \t][ \t]+/ /g; s/^Scrutin public sur *//; s/^l[ae']s* *//") |
|
91 |
- date=$(sed -rn 's,^.*<h1 class="">Analyse du scrutin n° '$scrutin'<br/>(.+) </h1>,\1,p' $tempfile) |
|
92 |
- adoption=$(sed -rn 's,^.*<p class="annonce"><span class="annoncevote">(.+).</span></p>.*$,\1,p' $tempfile) |
|
93 |
- test -n "$title" -a -n "$date" -a -n "$adoption" || { |
|
94 |
- echo "erreur dans la récupération du scrutin $scrutin" |
|
95 |
- exit 1 |
|
96 |
- } |
|
97 |
- grep -q 'e a a' <<< "$adoption" && adoption=1 || adoption=0 |
|
98 |
- |
|
99 |
- url=$(awk -F'|' "/^$scrutin\|/{print \$2}" "$url_database") |
|
100 |
- id_url=$(sqlite_request "select id from url where url is '$url'") |
|
101 |
- if test -z "$id_url"; then |
|
102 |
- sqlite_request "insert into url (url) values ('$url')" |
|
103 |
- id_url=$(sqlite_request "select id from url where url is '$url'") |
|
104 |
- fi |
|
85 |
+ w3m -cols 512 -dump "http://www2.assemblee-nationale.fr/scrutins/detail/(legislature)/$mandature/(num)/$scrutin" \ |
|
86 |
+ | sed -n '/^Analyse du scrutin n° /,/^Votes des groupes/{/^Navigation/,/^ • Non inscrits/d;/^[[:space:]]*$/d;p}' \ |
|
87 |
+ | awk -v sq="'" -v dq='"' ' |
|
88 |
+ BEGIN { adoption = -1; map = 0 } |
|
89 |
+ /^Analyse du scrutin/ { scrutin = $NF } |
|
90 |
+ /séance du [0-3][0-9]\/[01][0-9]\/(19|20)[0-9]+/ { date = $NF; seance = $1 } |
|
91 |
+ /^Scrutin public sur / { titre = gensub("^Scrutin public sur l[ae" sq "]s? ?", "", "1") } |
|
92 |
+ /^L.Assemblée .+ adopté/ { adoption = NF == 3 } |
|
93 |
+ /^Nombre de votants :/ { votants = $NF } |
|
94 |
+ /^Nombre de suffrages exprimés :/ { exprimes = $NF } |
|
95 |
+ /^Majorité absolue :/ { majo_absolue = $NF } |
|
96 |
+ /^Pour l.adoption :/ { pour = $NF } |
|
97 |
+ /^Contre :/ { contre = $NF } |
|
98 |
+ /^Groupe / { groupe = gensub("^Groupe (.+) \\([1-9].+$", "\\1", "1") |
|
99 |
+ groupe = gensub("^(la|les|le|l" sq "|du|des|de|de la|d" sq ") ", "", "1", groupe) |
|
100 |
+ } |
|
101 |
+ /^Non inscrits/ { groupe = "Non inscrits" } |
|
102 |
+ /^(Pour|Abstention|Contre):/ { position = gensub(":", "", "1", $1) } |
|
103 |
+ /^Non-votants?:/ { |
|
104 |
+ position = gensub("s?:", "", "1", $1) |
|
105 |
+ nvl = "" |
|
106 |
+ while ($1 != "Groupe" || $0 != "Contenus annexes") { |
|
107 |
+ getline |
|
108 |
+ if ($1 == "Groupe" || $0 == "Contenus annexes") |
|
109 |
+ break |
|
110 |
+ nvl = nvl $0 |
|
111 |
+ } |
|
112 |
+ f = split(nvl, nv, "(, | et )") |
|
113 |
+ for (i=1; i<=f; i++) { |
|
114 |
+ votes[groupe][position][gensub("(^ +|M\\. |Mme |Mlle | \\(.+)", "", "g", nv[i])]++ |
|
115 |
+ } |
|
116 |
+ groupe = gensub("^Groupe (.+) \\([1-9].+$", "\\1", "1") |
|
117 |
+ } |
|
118 |
+ /^ • / { votes[groupe][position][gensub("^[^A-Z]*", "", "1")]++ } |
|
119 |
+ /^Mises au point/,/^Votes des groupes/ { if ($1 != "(Sous") mises_au_point[map++] = $0 } |
|
120 |
+ END { |
|
121 |
+ if (adoption < 0) |
|
122 |
+ adoption = pour >= majo_absolue |
|
123 |
+ |
|
124 |
+ for (i=1; i<map-1; i++) |
|
125 |
+ mise_au_point = sprintf("%s[%s]", mise_au_point, mises_au_point[i]) |
|
126 |
+ |
|
127 |
+ printf("insert into scrutins (num, séance, date, intitulé, adoption, mise_au_point) values (%i, %s, %s, %s, %i, %s);\n", |
|
128 |
+ scrutin, |
|
129 |
+ sq seance sq, |
|
130 |
+ sq date sq, |
|
131 |
+ dq gensub(dq, dq dq, "g", titre) dq, |
|
132 |
+ adoption, |
|
133 |
+ dq gensub(dq, dq dq, "g", mise_au_point) dq, |
|
134 |
+ scrutin) |
|
135 |
+ printf("update scrutins set dossier = ( select id from dossiers inner join dossier_par_scrutin where dossiers.url = dossier_par_scrutin.url and dossier_par_scrutin.scrutin = %i) where num = %i;\n", |
|
136 |
+ scrutin, |
|
137 |
+ scrutin) |
|
138 |
+ for (groupe in votes) { |
|
139 |
+ printf("insert or ignore into groupes (nom) values (%s);\n", dq groupe dq) |
|
140 |
+ for (position in votes[groupe]) { |
|
141 |
+ for (nom in votes[groupe][position]) { |
|
142 |
+ if (nom !~ " \\(.+\\) *$") |
|
143 |
+ printf("insert or ignore into députés (nom, groupe, date) select %s, id, %s from groupes where nom = %s;\n", |
|
144 |
+ dq nom dq, |
|
145 |
+ dq date dq, |
|
146 |
+ dq groupe dq) |
|
147 |
+ printf("insert or ignore into dépouillements (scrutin, député, vote) select %i, députés.id, votes.id from députés inner join votes where députés.nom = %s and votes.nom = %s;\n", |
|
148 |
+ scrutin, |
|
149 |
+ dq nom dq, |
|
150 |
+ dq position dq) |
|
151 |
+ } |
|
152 |
+ } |
|
153 |
+ } |
|
154 |
+ } |
|
155 |
+ ' > $tempfile |
|
156 |
+ sqlite3 "$in_ram_database" < $tempfile |
|
105 | 157 |
|
106 |
- sqlite_request "insert into scrutins values ($scrutin, '$date', \"${title//\"}\", $adoption, ${id_url:-0})" |
|
107 |
- |
|
108 |
- for v in $(sqlite_request "select * from votes"); do |
|
109 |
- for g in $(sqlite_request "select id,nom from groupes"); do |
|
110 |
- for d in $(sed -rn '/<p class="nomgroupe">'${g#*|}' <span class="block topmargin">/,/<div class="TTgroupe topmargin-lg">/p' $tempfile \ |
|
111 |
- | sed -rn '/<p class="typevote">'${v#*|}':/,/<.div>/p' \ |
|
112 |
- | sed 's,</li>,\n,g' \ |
|
113 |
- | sed -rn '/<p class="typevote">/d; s,^\s*<li>\s*,,; s, , ,g; s/^\s*//; s/M(me|\.) //; s/ \(.*$//; s,<b>,,; s,</b>,,p'); do |
|
114 |
- d_id=$(sqlite_request "select id from députés where nom is \"$d\" and groupe is ${g%|*}") |
|
115 |
- if test -z "$d_id"; then |
|
116 |
- sqlite_request "insert into députés (nom, groupe) values (\"$d\", ${g%|*})" |
|
117 |
- d_id=$(sqlite_request "select id from députés where nom is \"$d\" and groupe is ${g%|*}") |
|
118 |
- fi |
|
119 |
- sqlite_request "insert into dépouillement values ($d_id, $scrutin, ${g%|*}, ${v%|*})" |
|
120 |
- done |
|
121 |
- done |
|
122 |
- done |
|
123 | 158 |
|
124 |
- if test $(( ($scrutin - $first) * 100 / ( $last - $first ) )) -ne $progress; then |
|
125 |
- progress=$(( ($scrutin - $first) * 100 / ( $last - $first ) )) |
|
159 |
+ if test $(( ($scrutin - ${first:-0}) * 100 / ( $last - ${first:-0} ) )) -ne ${progress:-0}; then |
|
160 |
+ progress=$(( ($scrutin - ${first:-0}) * 100 / ( $last - ${first:-0} ) )) |
|
126 | 161 |
if test $(($progress % ${update_progress:-1})) -eq 0; then |
127 | 162 |
now=$(date +%s) |
128 | 163 |
delta=$(( $now - $begin )) |
129 |
-# scrutin = first+1 à la première itération |
|
130 |
- echo $progress%, ETA: $(date +%H:%M:%S -d "$(($delta * ($last - $scrutin) / ($scrutin - $first) )) seconds") |
|
164 |
+# scrutin = {first:-0}+1 à la première itération |
|
165 |
+ printf "\r%d%%, ETA %s" $progress $(date +%H:%M:%S -d "$(($delta * ($last - $scrutin) / ($scrutin - ${first:-0}) )) seconds") |
|
131 | 166 |
fi |
132 | 167 |
fi |
133 | 168 |
done |
134 |
- rm -f "$url_database" "$tempfile" |
|
169 |
+ sqlite_request 'drop table dossier_par_scrutin' |
|
170 |
+ |
|
171 |
+ echo -e "\r\033[KTerminé: $(($scrutin - ${first:-0} - 1)) scrutins ajoutés" |
|
172 |
+ rm -f "$tempfile" |
|
135 | 173 |
fi |
136 | 174 |
first=$first_ |
137 | 175 |
} |
138 | 176 |
|
139 | 177 |
function write_comparaison () { |
140 |
- |
|
141 |
- result="comparaisons ${groupe[0]} avec ${groupe_ref:-GDR}${dossier:+ - ${dossier}}" |
|
178 |
+ result="scrutins ($(sum <<< "${groupe[@]}" | cut -b1-5))${dossier:+ - ${dossier}}" |
|
179 |
+ if test "$envoi_par_mail" = $true_flag; then |
|
180 |
+ result="scrutins" |
|
181 |
+ fi |
|
142 | 182 |
content="/dev/shm/$result/content.xml" |
143 |
- id_cols="Scrutin Date Titre Adoption Panurgisme${nom:+ Participation Loyauté}" |
|
144 |
- typevotes=$(sqlite_request "select nom from votes") |
|
145 |
- nb_cols=$(( $(wc -w <<< $id_cols) + $(wc -w <<< $typevotes) * ${#groupe[@]} )) |
|
183 |
+ id_cols=(Scrutin Date Séance Titre Adoption Dossier) |
|
184 |
+ eval $(sqlite_request 'select printf("typevotes[%i]=%s;", id, nom) from votes') |
|
185 |
+ nb_cols=$(( ${#id_cols[@]} + ${#typevotes[@]} * ${#groupe[@]} )) |
|
146 | 186 |
last_col=$(awk -v n=$nb_cols 'BEGIN{printf("%c%c", n < 27 ? "" : int(n/26) + 64, (n % 26) + (n % 26 == 0 ? 26 : 0) + 64)}' | tr -d '\0') |
187 |
+ colors=($(awk -v n=${#groupe[@]} -v from=${from_color:-2A0636} -v to=${to_color:-D09B8A} ' |
|
188 |
+ function rgbL (p) { |
|
189 |
+ r = rgb_from[1] + p * (rgb_to[1] - rgb_from[1]) |
|
190 |
+ g = rgb_from[2] + p * (rgb_to[2] - rgb_from[2]) |
|
191 |
+ b = rgb_from[3] + p * (rgb_to[3] - rgb_from[3]) |
|
192 |
+ L = r * 0.299 + g * 0.587 + b * 0.114 |
|
193 |
+ printf("%02x%02x%02x:%s\n", int(r), int(g), int(b), L > 185 ? "000000" : "ffffff") |
|
194 |
+ } |
|
195 |
+ BEGIN { |
|
196 |
+ for (i = split(gensub("(..)(..)(..)", "\\1,\\2,\\3", "1", from), rgb_from, ","); i > 0; i--) |
|
197 |
+ rgb_from[i] = strtonum(sprintf("%d", strtonum("0x" rgb_from[i]))) |
|
198 |
+ for (i = split(gensub("(..)(..)(..)", "\\1,\\2,\\3", "1", to), rgb_to, ","); i > 0; i--) |
|
199 |
+ rgb_to[i] = strtonum(sprintf("%d", strtonum("0x" rgb_to[i]))) |
|
200 |
+ |
|
201 |
+ print "pour_bash_array_qui_commence_a_index_0" |
|
202 |
+ rgbL(0) |
|
203 |
+ for (i = 1; i < n-1; i++) { |
|
204 |
+ rgbL(i/n) |
|
205 |
+ } |
|
206 |
+ if (n > 1) rgbL(1) |
|
207 |
+ } |
|
208 |
+ ')) |
|
147 | 209 |
function write_cell () { |
148 | 210 |
case $1 in |
149 | 211 |
url) |
150 | 212 |
cell='<table:table-cell office:value-type="string" calcext:value-type="string">' |
151 |
- cell+="<text:p><text:a xlink:href=\"$2\" xlink:type=\"simple\">$3</text:a></text:p>";; |
|
213 |
+ cell+="<text:p><text:a xlink:href=$2 xlink:type=\"simple\">$3</text:a></text:p>" |
|
214 |
+ ;; |
|
152 | 215 |
texte) |
153 | 216 |
cell='<table:table-cell office:value-type="string" calcext:value-type="string">' |
154 | 217 |
cell+="<text:p>$2</text:p>" |
... | ... |
@@ -164,7 +227,9 @@ function write_comparaison () { |
164 | 227 |
echo $cell >> "$content" |
165 | 228 |
} |
166 | 229 |
|
167 |
- echo "génération du fichier $result" |
|
230 |
+ if test -z "$envoi_par_mail"; then |
|
231 |
+ echo "génération du fichier $result" |
|
232 |
+ fi |
|
168 | 233 |
|
169 | 234 |
mkdir -p "/dev/shm/$result/META-INF" |
170 | 235 |
|
... | ... |
@@ -207,36 +272,42 @@ EOcontent |
207 | 272 |
<style:style style:name="ta1" style:family="table" style:master-page-name="Default"> |
208 | 273 |
<style:table-properties table:display="true" style:writing-mode="lr-tb"/> |
209 | 274 |
</style:style> |
210 |
- <style:style style:name="ce1" style:family="table-cell" style:parent-style-name="Default"> |
|
211 |
- <style:table-cell-properties fo:background-color="#cccccc"/> |
|
212 |
- </style:style> |
|
275 |
+EOcontent |
|
276 |
+ |
|
277 |
+ for i in $(seq ${#groupe[@]}); do |
|
278 |
+ cat >> "$content" << EOcontent |
|
279 |
+ <style:style style:name="ce$i" style:family="table-cell" style:parent-style-name="Default"> |
|
280 |
+ <style:table-cell-properties fo:wrap-option="wrap" style:vertical-align="middle" fo:background-color="#${colors[$i]%:*}"/> |
|
281 |
+ <style:text-properties fo:hyphenate="false" fo:color="#${colors[$i]}"/> |
|
282 |
+ </style:style> |
|
283 |
+EOcontent |
|
284 |
+ done |
|
285 |
+ |
|
286 |
+ cat >> "$content" << EOcontent |
|
213 | 287 |
</office:automatic-styles> |
214 | 288 |
<office:body> |
215 | 289 |
<office:spreadsheet> |
216 | 290 |
<table:calculation-settings table:automatic-find-labels="false"/> |
217 | 291 |
<table:table table:name="$result" table:style-name="ta1"> |
218 | 292 |
<office:forms form:automatic-focus="false" form:apply-design-mode="false"/> |
219 |
- <table:table-column table:style-name="co1" table:number-columns-repeated="$(wc -w <<< $id_cols)" table:default-cell-style-name="Default"/> |
|
293 |
+ <table:table-column table:style-name="co1" table:number-columns-repeated="${#id_cols[@]}" table:default-cell-style-name="Default"/> |
|
220 | 294 |
EOcontent |
221 | 295 |
|
222 |
- for i in $(seq $(wc -w <<< $typevotes)); do |
|
223 |
- cat >> "$content" << EOcontent |
|
224 |
- <table:table-column table:style-name="co1" table:default-cell-style-name="ce1"/> |
|
225 |
-EOcontent |
|
226 |
- for (( g = 1; g < ${#groupe[@]}; g++ )); do |
|
296 |
+ for i in $(seq ${#typevotes[@]}); do |
|
297 |
+ for g in $(seq ${#groupe[@]}); do |
|
227 | 298 |
cat >> "$content" << EOcontent |
228 |
- <table:table-column table:style-name="co1" table:default-cell-style-name="Default"/> |
|
299 |
+ <table:table-column table:style-name="co1" table:default-cell-style-name="ce$g"/> |
|
229 | 300 |
EOcontent |
230 | 301 |
done |
231 | 302 |
done |
232 | 303 |
echo '<table:table-row table:style-name="ro1">' >> "$content" |
233 | 304 |
|
234 | 305 |
IFS=$IFS_ |
235 |
- for colonne in $id_cols; do |
|
306 |
+ for colonne in ${id_cols[@]}; do |
|
236 | 307 |
write_cell texte $colonne |
237 | 308 |
done |
238 | 309 |
|
239 |
- for typevote in $typevotes; do |
|
310 |
+ for typevote in ${typevotes[@]}; do |
|
240 | 311 |
for g in "${groupe[@]}"; do |
241 | 312 |
write_cell texte "$typevote - $g" |
242 | 313 |
done |
... | ... |
@@ -249,84 +320,59 @@ EOcontent |
249 | 320 |
line=1 |
250 | 321 |
test -z "$seq" && qty=$(( $last - $first )) |
251 | 322 |
IFS=$'\n' |
323 |
+ scrutin_base_url="https://www2.assemblee-nationale.fr/scrutins/detail/(legislature)/$mandature/(num)/" |
|
252 | 324 |
for scrutin in $(eval ${seq:-seq $first $last}); do |
253 | 325 |
|
254 |
- data=$(sqlite_request "select date,intitulé,adoption,url.url from scrutins inner join url on scrutins.url = url.id where num is $scrutin") |
|
255 |
- date=$(cut -d'|' -sf 1 <<< $data) |
|
256 |
- title=$(cut -d'|' -sf 2 <<< $data) |
|
257 |
- adoption=$(cut -d'|' -sf 3 <<< $data) |
|
258 |
- url=$(cut -d'|' -sf 4 <<< $data) |
|
326 |
+ data=$(sqlite_request "select date,séance,intitulé,adoption,dossiers.url,dossiers.titre from scrutins left join dossiers on scrutins.dossier = dossiers.id where num is $scrutin" json) |
|
327 |
+ date=$(jq -r '.[].date' <<< $data) |
|
328 |
+ seance=$(jq -r '.[]."séance"' <<< $data) |
|
329 |
+ title=$(jq -r '.[]."intitulé" | @html' <<< $data) |
|
330 |
+ adoption=$(jq '.[].adoption' <<< $data) |
|
331 |
+ dossier_url=$(jq '.[].url' <<< $data) |
|
332 |
+ dossier_texte=$(jq -r '.[].titre | @html' <<< $data) |
|
259 | 333 |
test $adoption -eq 1 && adoption='oui' || adoption='non' |
260 | 334 |
|
261 | 335 |
echo '<table:table-row table:style-name="ro1">' >> "$content" |
262 | 336 |
|
263 |
- if test -n "$url"; then |
|
264 |
- write_cell url "$url" $scrutin |
|
265 |
- else |
|
266 |
- write_cell nombre $scrutin |
|
267 |
- fi |
|
337 |
+ write_cell url "\"$scrutin_base_url$scrutin\"" $scrutin |
|
268 | 338 |
write_cell texte "$date" |
269 |
- write_cell texte "${title//\'/'}" |
|
339 |
+ write_cell texte "$seance" |
|
340 |
+ write_cell texte "$title" |
|
270 | 341 |
write_cell texte "$adoption" |
342 |
+ write_cell url "${dossier_url/#null/\"\"}" "${dossier_texte/#null}" |
|
271 | 343 |
|
272 |
- for typevote in $(seq 0 $(( $(wc -w <<< $typevotes) - 1 ))); do |
|
273 |
- vote_cible[$typevote]=$(sqlite_request "select |
|
274 |
- count(député) |
|
275 |
- from |
|
276 |
- dépouillement |
|
277 |
- where |
|
278 |
- scrutin is $scrutin |
|
279 |
- and |
|
280 |
- vote is $typevote |
|
281 |
- and |
|
282 |
- groupe is ${groupe_id[0]} ${nom:+ and député is ${nom%|*}}") |
|
283 |
- done |
|
284 |
- if test \( ${vote_cible[0]} -gt ${vote_cible[1]} -a $adoption = oui \) \ |
|
285 |
- -o \( ${vote_cible[1]} -gt ${vote_cible[0]} -a $adoption = non \); then |
|
286 |
- panurge=1 |
|
287 |
- else |
|
288 |
- panurge=0 |
|
289 |
- fi |
|
290 |
- write_cell nombre $panurge |
|
291 |
- |
|
292 |
- if test -n "$nom"; then |
|
293 |
- for typevote in 0 1; do |
|
294 |
- votes_g0[$typevote]=$(sqlite_request "select |
|
344 |
+ unset votes |
|
345 |
+ for typevote in $(seq ${#typevotes[@]}); do |
|
346 |
+ for (( g = 0; g < ${#groupe[@]}; g++ )); do |
|
347 |
+ votes[${#votes[@]}]=$(sqlite_request "select |
|
295 | 348 |
count(député) |
296 | 349 |
from |
297 |
- dépouillement |
|
350 |
+ dépouillements |
|
351 |
+ inner join |
|
352 |
+ députés, groupes |
|
353 |
+ on |
|
354 |
+ députés.groupe = groupes.id and dépouillements.député = députés.id |
|
298 | 355 |
where |
299 | 356 |
scrutin is $scrutin |
300 | 357 |
and |
301 | 358 |
vote is $typevote |
302 | 359 |
and |
303 |
- groupe is ${groupe_id[0]}") |
|
360 |
+ ${id_groupe[$g]%:*}.nom = '${groupe[$g]//\'/\'\'}'") |
|
304 | 361 |
done |
305 |
- participation=$(( vote_cible[0] + vote_cible[1] + vote_cible[2] + vote_cible[3] )) |
|
306 |
- if test $(( (${votes_g0[0]} - ${votes_g0[1]}) * (${vote_cible[0]} - ${vote_cible[1]}) )) -gt 0; then |
|
307 |
- loyaute=1 |
|
308 |
- else |
|
309 |
- loyaute=0 |
|
310 |
- fi |
|
311 |
- write_cell nombre $participation |
|
312 |
- write_cell nombre $loyaute |
|
313 |
- fi |
|
314 |
- |
|
315 |
- for typevote in $(seq 0 $(( $(wc -w <<< $typevotes) - 1 ))); do |
|
316 |
- write_cell nombre ${vote_cible[$typevote]} |
|
317 |
- for (( g = 1; g < ${#groupe_id[@]}; g++ )); do |
|
318 |
- votes=$(sqlite_request "select |
|
319 |
- count(député) |
|
320 |
- from |
|
321 |
- dépouillement |
|
322 |
- where |
|
323 |
- scrutin is $scrutin |
|
324 |
- and |
|
325 |
- vote is $typevote |
|
326 |
- and |
|
327 |
- groupe is ${groupe_id[$g]}") |
|
328 |
- write_cell nombre $votes |
|
362 |
+ done |
|
363 |
+ for ((j = 0; j < ${#groupe[@]}; j++)); do |
|
364 |
+ presence=1 # `let presence+=0` sort en erreur si variable est unset ou égale à 0 |
|
365 |
+ for ((i = $j; i < ${#votes[@]}; i += ${#groupe[@]})); do |
|
366 |
+ let presence+=${votes[$i]} |
|
329 | 367 |
done |
368 |
+ if test $presence -eq 1; then |
|
369 |
+ for ((i = $j; i < ${#votes[@]}; i += ${#groupe[@]})); do |
|
370 |
+ votes[$i]=-1 |
|
371 |
+ done |
|
372 |
+ fi |
|
373 |
+ done |
|
374 |
+ for ((i = 0; i < ${#votes[@]}; i ++)); do |
|
375 |
+ write_cell nombre ${votes[$i]} |
|
330 | 376 |
done |
331 | 377 |
echo '</table:table-row>' >> "$content" |
332 | 378 |
|
... | ... |
@@ -335,14 +381,13 @@ EOcontent |
335 | 381 |
if test $(( $progress % ${generation_progress:-5} )) -eq 0; then |
336 | 382 |
now=$(date +%s) |
337 | 383 |
delta=$(( $now - $begin )) |
338 |
- echo $progress%, ETA: $(date +%H:%M:%S -d "$(( $delta * (${qty:-$last} - $line) / $line )) seconds") |
|
384 |
+ printf "\r%d%%, ETA %s" $progress $(date +%H:%M:%S -d "$(( $delta * (${qty:-$last} - $line) / $line )) seconds") |
|
339 | 385 |
fi |
340 | 386 |
fi |
341 | 387 |
|
342 | 388 |
let line++ |
343 | 389 |
|
344 | 390 |
done |
345 |
- echo |
|
346 | 391 |
|
347 | 392 |
cat >> "$content" << EOcontent |
348 | 393 |
</table:table> |
... | ... |
@@ -357,98 +402,155 @@ EOcontent |
357 | 402 |
|
358 | 403 |
( cd "/dev/shm/$result" && zip -r ../"$result" * > /dev/null 2>&1 && cd .. && rm -fr "$result" ) |
359 | 404 |
|
360 |
- mv -f "/dev/shm/$result.zip" "$result.ods" |
|
405 |
+ mv -f "/dev/shm/$result.zip" "${destination_path:+$destination_path/}$result.ods" |
|
361 | 406 |
|
362 |
- echo "$result.ods" |
|
407 |
+ if test -z "$envoi_par_mail"; then |
|
408 |
+ echo -e "\r\033[KTerminé : ${destination_path:+$destination_path/}$result.ods" |
|
409 |
+ fi |
|
363 | 410 |
} |
364 | 411 |
|
365 | 412 |
function save_database () { |
413 |
+ rm -f "$token_file" |
|
366 | 414 |
test -n "$result" -a -d "/dev/shm/$result" && rm -fr "/dev/shm/$result" |
367 | 415 |
test -n "$database" -a -n "$in_ram_database" || return |
368 |
- test -r "$in_ram_database" || return |
|
369 |
- if test -r "$database" && md5sum $in_ram_database | sed "s,$in_ram_database,$database," | md5sum --status -c -; then |
|
370 |
- rm -f $in_ram_database |
|
416 |
+ if test "$envoi_par_mail" = $true_flag; then |
|
417 |
+ if test -n "$mailconfig_file" && test -r "$mailconfig_file"; then |
|
418 |
+ source "$mailconfig_file" |
|
419 |
+ elif test -r "/usr/local/etc/${0##*/}.mail.conf"; then |
|
420 |
+ source "/usr/local/etc/${0##*/}.mail.conf" |
|
421 |
+ fi |
|
422 |
+ stat -Lc "(date de mise à jour de la base: %x)" $database |
|
423 |
+ cat > $process_token.headers << EOC |
|
424 |
+From: ${from_mail:?} |
|
425 |
+To: $destinataire |
|
426 |
+Subject: les scrutins demandés |
|
427 |
+EOC |
|
428 |
+ curl_opt=( |
|
429 |
+ --url smtp://${smtp_address:?}:${smtp_port:?} |
|
430 |
+ --mail-rcpt $destinataire |
|
431 |
+ -H @$process_token.headers |
|
432 |
+ -F "=(;type=multipart/alternative" |
|
433 |
+ -F "=<$process_token.txt;encoder=quoted-printable" |
|
434 |
+ -F "=<$process_token.html;encoder=quoted-printable" |
|
435 |
+ -F "=)" |
|
436 |
+ ) |
|
437 |
+ if test -r "${destination_path:+$destination_path/}$result.ods"; then |
|
438 |
+ curl_opt[${#curl_opt[@]}]="-F" |
|
439 |
+ curl_opt[${#curl_opt[@]}]="=@${destination_path:+$destination_path/}$result.ods;encoder=base64" |
|
440 |
+ fi |
|
441 |
+ exec 1>&- |
|
442 |
+ aha -f $process_token.mail -t "envoi automatisé" > $process_token.html |
|
443 |
+ w3m -dump $process_token.html > $process_token.txt |
|
444 |
+ curl ${curl_opt[@]} |
|
445 |
+ rm -f "${destination_path:+$destination_path/}$result.ods" $process_token* |
|
446 |
+ elif test -r "$database" && sqldiff=$(sqldiff $in_ram_database $database) && test -z "$sqldiff"; then |
|
447 |
+ echo "pas de modification" |
|
371 | 448 |
elif test -w "$database"; then |
372 |
- mv -f $in_ram_database "$database" |
|
373 |
- elif ! test -e "$database"; then |
|
374 |
- mv $in_ram_database "$database" |
|
449 |
+ rm -f "$database" |
|
450 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
451 |
+ echo "base de données $database mise à jour" |
|
452 |
+ elif test ! -e "$database" -a -w ${database%/*}; then |
|
453 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
454 |
+ echo "base de données $database créée" |
|
375 | 455 |
else |
376 |
- rm -f $in_ram_database |
|
456 |
+ echo "je ne peux rien faire avec $database !" |
|
377 | 457 |
fi |
458 |
+ rm -f "$in_ram_database" "$tempfile" |
|
378 | 459 |
} |
379 | 460 |
|
380 | 461 |
function dernier_scrutin_public () { |
381 |
- wget -qO- 'http://www2.assemblee-nationale.fr/scrutins/liste/(legislature)/15/(type)/TOUS/(idDossier)/TOUS' \ |
|
382 |
- | sed -rn 's,^.*<td class="denom">(.+)</td>.*$,\1,p' \ |
|
462 |
+ wget -qO- "http://www2.assemblee-nationale.fr/scrutins/liste/(legislature)/$mandature/(type)/TOUS/(idDossier)/TOUS" \ |
|
463 |
+ | sed -rn 's/^.*<td class="denom">([0-9]+)[^0-9].*$/\1/p' \ |
|
383 | 464 |
| head -1 |
384 | 465 |
} |
385 | 466 |
|
386 | 467 |
trap save_database EXIT |
387 | 468 |
|
388 |
-true_flag=$(mktemp --dry-run XXXXX) |
|
469 |
+test -z "$database" && database="${0}.db" |
|
389 | 470 |
|
390 |
-OPTS=$( getopt -l no-db-update,\ |
|
391 |
- db-update-only,\ |
|
392 |
- cible:,\ |
|
393 |
- ref:,\ |
|
394 |
- député:,\ |
|
395 |
- premier-scrutin:,\ |
|
396 |
- dernier-scrutin:,\ |
|
397 |
- période:,\ |
|
398 |
- liste-dossiers,\ |
|
399 |
- liste-députés,\ |
|
400 |
- dossiers,\ |
|
401 |
- dossier:,\ |
|
402 |
- conf:,\ |
|
403 |
- database:,\ |
|
404 |
- progrès-génération:\ |
|
405 |
- progrès-update:,\ |
|
406 |
- help \ |
|
407 |
- -- "$@" ) |
|
408 |
- |
|
409 |
-eval set --$OPTS |
|
471 |
+declare -A acronymes |
|
472 |
+if test -n "$config_file"; then |
|
473 |
+ source "$config_file" |
|
474 |
+else |
|
475 |
+ config_file="${0}.conf" |
|
476 |
+ if test -r "$config_file"; then |
|
477 |
+ source "$config_file" |
|
478 |
+ fi |
|
479 |
+fi |
|
480 |
+ |
|
481 |
+true_flag=$(mktemp --dry-run XXXXX) |
|
482 |
+echo "$0 $@" > $token_file |
|
410 | 483 |
|
411 | 484 |
while [[ $# -gt 0 ]]; do |
412 | 485 |
case "$1" in |
413 | 486 |
"--no-db-update") |
414 | 487 |
#|ne met pas à jour la base de données |
488 |
+ if test ${db_update_only:-OK} = $true_flag; then |
|
489 |
+ echo "option incompatible avec --db-update-only" |
|
490 |
+ exit 1 |
|
491 |
+ fi |
|
415 | 492 |
no_db_update=$true_flag;; |
416 | 493 |
"--db-update-only") |
417 | 494 |
#|ne génère pas de fichier de résultat |
495 |
+ if test ${no_db_update:-OK} = $true_flag; then |
|
496 |
+ echo "option incompatible avec --no-db-update" |
|
497 |
+ exit 1 |
|
498 |
+ fi |
|
418 | 499 |
db_update_only=$true_flag;; |
419 |
- "--cible") |
|
420 |
-#<nom court du groupe>|génère un comparatif pour ce groupe. Par défaut LREM |
|
421 |
- groupe[0]="${2^^}" |
|
500 |
+ "--cible"|"-c") |
|
501 |
+#<nom court du groupe>|ajoute les scrutins de ce groupe, de ce ou cette députée, les colonnes seront dans l'ordre |
|
502 |
+ _groupe[${#_groupe[@]}]="${2//\'/\'\'}" |
|
422 | 503 |
shift;; |
423 |
- "--ref") |
|
424 |
-#<nom court du groupe ou des groupes>|compare avec ce ou ces groupes. Si plusieurs groupes, ils sont séparés par une virgule, sans espace. Par défaut GDR |
|
425 |
- groupe_ref="${2^^}" |
|
504 |
+ "--couleurs") |
|
505 |
+#<nombre hexadécimal>:<nombre hexadécimal>|colore les colonnes en dégradé entre les deux couleurs comprises |
|
506 |
+ if grep -iq '[^0-9A-F:]' <<< ${2:-ERROR}; then |
|
507 |
+ echo "$1 ${2:-ERROR}: format attendu <nombre>:<nombre>" |
|
508 |
+ exit 1 |
|
509 |
+ elif egrep -iq '[0-9A-F]{6}:[0-9A-F]{6}' <<< ${2:-ERROR}; then |
|
510 |
+ from_color=${2%:*} |
|
511 |
+ to_color=${2#*:} |
|
512 |
+ else |
|
513 |
+ echo erreur $2: couleur RGB au format hexadécimal demandé |
|
514 |
+ fi |
|
426 | 515 |
shift;; |
427 |
- "--député") |
|
428 |
-#<nom>|filtre la cible sur un-e député-e sur le groupe cible (par défaut LREM). <nom> est insensible à la casse. Tout ou partie du nom ou du prénom peut être donné, espace compris. Caractère % utilisé comme caractère joker. Si aucune correspondance n'est trouvée avec un-e député-é, sortie en erreur. Si plusieurs député-e-s correspondent la liste est affichée et sortie en erreur. |
|
429 |
- depute=$true_flag |
|
430 |
- nom="$2" |
|
516 |
+ "--mandature") |
|
517 |
+ mandature="$2" |
|
518 |
+ ;; |
|
519 |
+ "--scrutin") |
|
520 |
+#<nombre>[:<nombre>]|commence la génération du résultat pour le scrutin <nombre>, ou entre les deux nombres donnés |
|
521 |
+ if grep -q '[^0-9:]' <<< ${2:-ERROR}; then |
|
522 |
+ echo "$1 ${2:-ERROR}: format attendu <nombre>[:<nombre>]" |
|
523 |
+ exit 1 |
|
524 |
+ elif egrep -q '[1-9][0-9]*(:[1-9][0-9]*)?' <<< ${2:-ERROR}; then |
|
525 |
+ first=${2%:*} |
|
526 |
+ last=${2#*:} |
|
527 |
+ if test $first -gt $last; then |
|
528 |
+ last+=:$first |
|
529 |
+ first=${last%:*} |
|
530 |
+ last=${last#*:} |
|
531 |
+ fi |
|
532 |
+ else |
|
533 |
+ echo "$1 ${2:-ERROR}: <nombre> ne doit pas commencer par 0" |
|
534 |
+ exit 1 |
|
535 |
+ fi |
|
431 | 536 |
shift;; |
432 | 537 |
"--premier-scrutin") |
433 | 538 |
#<numéro>|commence la génération du résultat à partir du scrutin <numéro> |
434 |
- no_db_update=$true_flag |
|
435 | 539 |
first="$2" |
436 | 540 |
shift;; |
437 | 541 |
"--dernier-scrutin") |
438 | 542 |
#<numéro>|termine la génération du résultat au scrutin <numéro> |
439 |
- no_db_update=$true_flag |
|
440 | 543 |
last="$2" |
441 | 544 |
shift;; |
442 | 545 |
"--période") |
443 | 546 |
#<jj/mm/aaaa:JJ/MM/AAAA>|génère un résultat pour les scrutins allant de jj/mm/aaaa à JJ/MM/AAAA |
444 | 547 |
periode=$true_flag |
445 |
- no_db_update=$true_flag |
|
446 | 548 |
periode_value="$2" |
447 | 549 |
shift;; |
448 | 550 |
"--liste-députés-du-groupe") |
449 |
-#<nom court du groupe>|liste les député-e-s du groupe <nom court du groupe> sur la mandature |
|
551 |
+#<groupe>|liste les député·e·s du groupe <groupe> |
|
450 | 552 |
liste_deputes=$true_flag |
451 |
- liste_deputes_value="${2^^}" |
|
553 |
+ liste_deputes_value="${2}" |
|
452 | 554 |
shift;; |
453 | 555 |
"--liste-députés") |
454 | 556 |
#|liste tou-te-s les député-e-s de la mandature |
... | ... |
@@ -472,12 +574,31 @@ while [[ $# -gt 0 ]]; do |
472 | 574 |
} |
473 | 575 |
config_file="$2" |
474 | 576 |
shift;; |
577 |
+ "--mailconf") |
|
578 |
+#<fichier>|indique le chemin vers le fichier de configuration. Par défaut "{_}.conf" |
|
579 |
+ test -r "$2" || { |
|
580 |
+ echo "config introuvable $2" >&2 |
|
581 |
+ options_error=$true_flag |
|
582 |
+ } |
|
583 |
+ mailconfig_file="$2" |
|
584 |
+ shift;; |
|
585 |
+ "--dest") |
|
586 |
+#<répertoire>|génère le fichier dans le répertoire spécifié. Par défaut $PWD |
|
587 |
+ if test -n "$2" && test -d "$2" -a -r "$2"; then |
|
588 |
+ destination_path="$2" |
|
589 |
+ shift |
|
590 |
+ else |
|
591 |
+ echo "$2 n'est pas un répertoire ou n'est pas autorisé en écriture" >&2 |
|
592 |
+ exit 1 |
|
593 |
+ fi;; |
|
475 | 594 |
"--database") |
476 | 595 |
#<fichier>|indique le chemin vers la base de données SQLite3 contenant les informations. Par défaut "{_}.db" |
477 |
- test -r "$2" && file -b "$2" | grep -q '^SQLite 3.x database' || { |
|
596 |
+ if test -r "$2" && file -Lb "$2" | grep -q '^SQLite 3.x database'; then |
|
597 |
+ : |
|
598 |
+ else |
|
478 | 599 |
echo "erreur sur option database: fichier '$2' introuvable ou pas une base SQLite 3" >&2 |
479 | 600 |
options_error=$true_flag |
480 |
- } |
|
601 |
+ fi |
|
481 | 602 |
database="$2" |
482 | 603 |
shift;; |
483 | 604 |
"--progrès-génération") |
... | ... |
@@ -488,10 +609,18 @@ while [[ $# -gt 0 ]]; do |
488 | 609 |
#<chiffre>|affiche de la progression de la mise à jour de la base de données tous les <chiffre>%. Par défaut 1 |
489 | 610 |
update_progress="$2" |
490 | 611 |
shift;; |
612 |
+ "--mail") |
|
613 |
+ envoi_par_mail=$true_flag |
|
614 |
+ destinataire="$2" |
|
615 |
+ no_db_update=$true_flag |
|
616 |
+ destination_path=/dev/shm |
|
617 |
+ generation_progress=1000 |
|
618 |
+ exec > $process_token.mail 2>&1 |
|
619 |
+ shift;; |
|
491 | 620 |
"--help") |
492 | 621 |
#|affiche cette aide et quitte |
493 | 622 |
echo "$0 [options]" |
494 |
- echo "génère un classeur ODS pour comparer les scrutins publics de la 15ème mandature à l'Assemblée Nationale" |
|
623 |
+ echo "génère un classeur ODS pour comparer les scrutins publics de la 16ème mandature à l'Assemblée Nationale" |
|
495 | 624 |
echo |
496 | 625 |
sed -rn '/^ *"--.+"\)/N; s/^ *"(--.+)"\)\n#(.+)$/\1|\2/p' "$0" \ |
497 | 626 |
| awk -F'|' -v marge=' ' -v prog="$0" '{ |
... | ... |
@@ -504,47 +633,65 @@ done |
504 | 633 |
|
505 | 634 |
test "$options_error" = $true_flag && exit 1 |
506 | 635 |
|
507 |
-test -z "$database" && database="${0}.db" |
|
508 |
- |
|
509 |
-declare -A groupes |
|
510 |
-if test -n "$config_file"; then |
|
511 |
- source "$config_file" |
|
512 |
-else |
|
513 |
- config_file="${0}.conf" |
|
514 |
- if test -r "$config_file"; then |
|
515 |
- source "$config_file" |
|
636 |
+while true; do |
|
637 |
+ if ls -1rt /dev/shm/*."${0##*/}" | head -1 | grep -q "^$token_file$"; then |
|
638 |
+ # c'est notre tour |
|
639 |
+ break |
|
640 |
+ else |
|
641 |
+ sleep 5 |
|
516 | 642 |
fi |
517 |
-fi |
|
518 |
- |
|
519 |
-IFS=',' groupe=(${groupe[0]:-LREM} ${groupe_ref:-GDR}) |
|
643 |
+done |
|
520 | 644 |
|
521 |
-in_ram_database=$(mktemp --dry-run /dev/shm/XXXXXXXXXXXX) |
|
645 |
+in_ram_database=$process_token.db |
|
522 | 646 |
if test -r "$database"; then |
523 | 647 |
cp "$database" "$in_ram_database" |
524 | 648 |
else |
525 | 649 |
create_database |
526 | 650 |
fi |
527 | 651 |
|
528 |
-for (( g = 0; g < ${#groupe[@]}; g++ )); do |
|
529 |
- groupe_id[$g]=$(sqlite_request "select id from groupes where nom_court is '${groupe[$g]}'") |
|
530 |
- if test -z "${groupe_id[$g]}"; then |
|
531 |
- echo "groupe ${groupe[$g]} inconnu" >&2 |
|
532 |
- exit 1 |
|
533 |
- fi |
|
534 |
-done |
|
535 |
- |
|
536 | 652 |
if test "$periode" = $true_flag; then |
537 |
- first=$(sqlite_request "select num from scrutins where date like '% du ${periode_value%:*}' order by num asc" | head -1) |
|
538 |
- last=$(sqlite_request "select num from scrutins where date like '% du ${periode_value#*:}' order by num asc" | tail -1) |
|
539 |
- test -z "$first" && echo "date de début inconnue: ${periode_value#*:}" >&2 && rm -f $in_ram_database && exit 1 |
|
540 |
- test -z "$last" && echo "date de fin inconnue: ${periode_value%:*}" >&2 && rm -f $in_ram_database && exit 1 |
|
653 |
+ function get_date () { |
|
654 |
+ sqlite_request "select distinct(date) from scrutins order by num asc" | awk -v d="$1" -v comp=$2 ' |
|
655 |
+ function norm_date (date) { |
|
656 |
+ split(date, a, "/") |
|
657 |
+ return sprintf("%s%s%s", |
|
658 |
+ length(a[3]) == 4 ? a[3] : length(a[3]) == 2 ? "20" a[3] : strftime("%Y", systime()), |
|
659 |
+ length(a[2]) == 2 ? a[2] : "0" a[2], |
|
660 |
+ length(a[1]) == 2 ? a[1] : "0" a[1]) |
|
661 |
+ } |
|
662 |
+ function output (date) { |
|
663 |
+ print date |
|
664 |
+ found = 1 |
|
665 |
+ exit |
|
666 |
+ } |
|
667 |
+ BEGIN { d = norm_date(d) } |
|
668 |
+ { |
|
669 |
+ s = norm_date($1) |
|
670 |
+ if (NR == 1 && s > d && comp == "first") output($1) |
|
671 |
+ if (s >= d && comp == "first") output($1) |
|
672 |
+ if (s == d && comp == "last") output($1) |
|
673 |
+ if (s > d && comp == "last") output(previous) |
|
674 |
+ previous = $1 |
|
675 |
+ } |
|
676 |
+ END { |
|
677 |
+ if (!found) print previous |
|
678 |
+ }' |
|
679 |
+ } |
|
680 |
+ first=$(sqlite_request "select min(num) from scrutins where date = '$(get_date ${periode_value%:*} first)'") |
|
681 |
+ last=$(sqlite_request "select max(num) from scrutins where date = '$(get_date ${periode_value#*:} last)'") |
|
682 |
+ if test "$envoi_par_mail" = $true_flag; then |
|
683 |
+ texte_periode="du $(get_date ${periode_value%:*} first) (scrutin n°$first) au $(get_date ${periode_value#*:} last) (scrutin n°$last)" |
|
684 |
+ fi |
|
541 | 685 |
elif test "$dossier" != $true_flag; then |
542 | 686 |
test -z "$last" && last=$(dernier_scrutin_public) |
543 | 687 |
test -z "$first" && first=1 |
544 | 688 |
fi |
545 | 689 |
|
546 | 690 |
if test "$liste_dossiers" = $true_flag; then |
547 |
- sqlite_request "select printf('%s - %s', id, url) from url" | sed 's,https*://.*/dossiers/,,; s/_/ /g; s/.asp$//' |
|
691 |
+ if test "$envoi_par_mail" = $true_flag; then |
|
692 |
+ echo "Voici la liste des dossiers actuellement à l'étude" |
|
693 |
+ fi |
|
694 |
+ sqlite_request "select printf('• %s (%s)', titre, url) from dossiers" |
|
548 | 695 |
exit |
549 | 696 |
fi |
550 | 697 |
|
... | ... |
@@ -557,49 +704,77 @@ fi |
557 | 704 |
|
558 | 705 |
if test "$liste_deputes" = $true_flag; then |
559 | 706 |
if test -n "$liste_deputes_value"; then |
560 |
- sqlite_request "select printf('%s - %s', députés.nom, groupes.nom_court) from députés inner join groupes on groupes.id = députés.groupe where groupes.nom_court is '$liste_deputes_value'" |
|
707 |
+ if test "$envoi_par_mail" = $true_flag; then |
|
708 |
+ echo "Voici la liste des député·e·s du groupe dont le nom correspond au critère $liste_deputes_value" |
|
709 |
+ fi |
|
710 |
+ sqlite_request "select printf('%s - %s%s', |
|
711 |
+ députés.nom, |
|
712 |
+ groupes.nom, |
|
713 |
+ iif(groupes.nom_court is not null, ' [' || groupes.nom_court || ']', '')) |
|
714 |
+ from députés |
|
715 |
+ inner join groupes on groupes.id = députés.groupe |
|
716 |
+ where |
|
717 |
+ groupes.nom like '%$liste_deputes_value%' |
|
718 |
+ or |
|
719 |
+ groupes.nom_court = '$liste_deputes_value'" |
|
561 | 720 |
else |
562 |
- sqlite_request "select printf('%s - %s', députés.nom, groupes.nom_court) from députés inner join groupes on groupes.id = députés.groupe order by groupes.nom_court asc" |
|
563 |
- fi |
|
564 |
- exit |
|
565 |
-fi |
|
566 |
- |
|
567 |
-if test "$depute" = $true_flag; then |
|
568 |
- if test -n "$nom"; then |
|
569 |
- match=$(sqlite_request "select count(députés.id) from députés inner join groupes on groupes.id = députés.groupe where députés.nom like '%$nom%' and groupes.nom_court is '$groupe' collate nocase") |
|
570 |
- if test $match -ne 1; then |
|
571 |
- if test $match -eq 0; then |
|
572 |
- echo "pas de député correspondant dans le groupe $groupe" |
|
573 |
- else |
|
574 |
- echo "plusieurs députés correspondent:" |
|
575 |
- sqlite_request "select députés.nom from députés inner join groupes on groupes.id = députés.groupe where députés.nom like '%$nom%' and groupes.nom_court is '$groupe' collate nocase" |
|
576 |
- fi |
|
577 |
- exit 1 |
|
578 |
- else |
|
579 |
- nom=$(sqlite_request "select députés.id,députés.nom from députés inner join groupes on groupes.id = députés.groupe where députés.nom like '%$nom%' and groupes.nom_court is '$groupe' collate nocase") |
|
580 |
- groupe[0]="${nom#*|} (${groupe[0]})" |
|
721 |
+ if test "$envoi_par_mail" = $true_flag; then |
|
722 |
+ echo "Voici la liste des député·e·s" |
|
581 | 723 |
fi |
724 |
+ sqlite_request "select printf('%s - %s%s', |
|
725 |
+ députés.nom, |
|
726 |
+ groupes.nom, |
|
727 |
+ iif(groupes.nom_court is not null, ' [' || groupes.nom_court || ']', '')) |
|
728 |
+ from députés |
|
729 |
+ inner join groupes on groupes.id = députés.groupe |
|
730 |
+ order by groupes.nom asc" |
|
582 | 731 |
fi |
732 |
+ exit |
|
583 | 733 |
fi |
584 | 734 |
|
585 |
-if test "$dossier" = $true_flag; then |
|
586 |
- last=$(dernier_scrutin_public) |
|
587 |
- if test -z "$dossier_value"; then |
|
588 |
- IFS=$'\n' |
|
589 |
- select dossier in $(sqlite_request "select url from url" | sed 's,^.*/dossiers/,,; s/_/ /g; s/.asp$//'); do |
|
590 |
- if test -n "$dossier"; then |
|
591 |
- seq="sqlite_request \"select num from scrutins inner join url on url.id = scrutins.url where url.url like '%/dossiers/${dossier// /_}%' order by num asc\"" |
|
592 |
- qty=$(sqlite_request "select count(num) from scrutins inner join url on url.id = scrutins.url where url.url like '%/dossiers/${dossier// /_}%' order by num asc") |
|
593 |
- break |
|
594 |
- fi |
|
595 |
- done |
|
596 |
- IFS=$IFS_ |
|
735 |
+for (( g = 0; g < ${#_groupe[@]}; g++ )); do |
|
736 |
+ # on vérifie si c'est un ou une député |
|
737 |
+ depute_count=$(sqlite_request "select count(distinct nom) from députés where nom like '%${_groupe[$g]}%'") |
|
738 |
+ groupe_count=$(sqlite_request "select count(distinct nom) from groupes where nom like \"%${_groupe[$g]}%\" or nom_court is '${_groupe[$g]}'") |
|
739 |
+ if test $depute_count -eq 1 -a $groupe_count -ne 1; then |
|
740 |
+ groupe[$g]=$(sqlite_request "select distinct nom from députés where nom like '%${_groupe[$g]}%'") |
|
741 |
+ id_groupe[$g]=députés:$(sqlite_request "select group_concat(id) from députés where nom is '${groupe[$g]//\'/\'\'}'") |
|
742 |
+ elif test $groupe_count -eq 1 -a $depute_count -ne 1; then |
|
743 |
+ groupe[$g]=$(sqlite_request "select distinct nom from groupes where nom like \"%${_groupe[$g]}%\" or nom_court is '${_groupe[$g]}'") |
|
744 |
+ id_groupe[$g]=groupes:$(sqlite_request "select id from groupes where nom is '${groupe[$g]//\'/\'\'}'") |
|
745 |
+ elif test $groupe_count -eq 1 -a $depute_count -eq 1; then |
|
746 |
+ echo "dénomination ambigüe pour « ${_groupe[$g]} »" |
|
747 |
+ sqlite_request "select printf('député·e: %s', distinct nom) from députés where nom like '%${_groupe[$g]}%'" | grep --color=always -i "${_groupe[$g]}" |
|
748 |
+ sqlite_request "select printf('groupe : %s', distinct nom) from groupes where nom like \"%${_groupe[$g]}%\" or nom_court is '${_groupe[$g]}'" | grep --color=always -i "${_groupe[$g]}" |
|
749 |
+ echo |
|
750 |
+ elif test $depute_count -gt 1; then |
|
751 |
+ echo "plusieurs député·e·s trouvé·e·s correspondant à « ${_groupe[$g]} »" |
|
752 |
+ sqlite_request "select distinct nom from députés where nom like '%${_groupe[$g]}%'" | grep --color=always -i "${_groupe[$g]}" |
|
753 |
+ echo |
|
754 |
+ elif test $groupe_count -gt 1; then |
|
755 |
+ echo "plusieurs groupes trouvés correspondant à « ${_groupe[$g]} »" |
|
756 |
+ sqlite_request "select distinct nom from groupes where nom like \"%${_groupe[$g]}%\" or nom_court is '${_groupe[$g]}'" | grep --color=always -i "${_groupe[$g]}" |
|
757 |
+ echo |
|
597 | 758 |
else |
598 |
- seq="sqlite_request \"select num from scrutins inner join url on url.id = scrutins.url where url.id is $dossier_value order by num asc\"" |
|
599 |
- qty=$(sqlite_request "select count(num) from scrutins inner join url on url.id = scrutins.url where url.id is $dossier_value order by num asc") |
|
600 |
- dossier=$(sqlite_request "select url from url where id is $dossier_value" | sed 's,^.*/dossiers/,,; s/_/ /g; s/.asp$//') |
|
759 |
+ echo "aucun·e député·e ou groupe ne correspond au critère « ${_groupe[$g]} »" |
|
760 |
+ echo |
|
601 | 761 |
fi |
762 |
+done |
|
763 |
+ |
|
764 |
+if test -s $process_token.mail; then |
|
765 |
+ exit 1 |
|
602 | 766 |
fi |
603 | 767 |
|
604 | 768 |
update_database |
605 | 769 |
write_comparaison |
770 |
+ |
|
771 |
+if test "$envoi_par_mail" = $true_flag; then |
|
772 |
+ echo Vous pourrez trouver en pièce-jointe les résultats demandés avec ces critères: |
|
773 |
+ if test ${#groupe[@]} -gt 0; then |
|
774 |
+ echo "votes des groupes et député·e·s suivant·e·s:" |
|
775 |
+ printf " • %s\n" "${groupe[@]}" |
|
776 |
+ fi |
|
777 |
+ if test "$periode" = $true_flag; then |
|
778 |
+ echo sur la période allant $texte_periode |
|
779 |
+ fi |
|
780 |
+fi |
... | ... |
@@ -1,10 +1,13 @@ |
1 |
-groupes[LREM]='Groupe La République en Marche' |
|
2 |
-groupes[LR]='Groupe Les Républicains' |
|
3 |
-groupes[MODEM]='Groupe du Mouvement Démocrate et apparentés' |
|
4 |
-groupes[PS]='Groupe Socialistes et apparentés' |
|
5 |
-groupes[UDI]='Groupe UDI, Agir et Indépendants' |
|
6 |
-groupes[NG]='Groupe Nouvelle Gauche' |
|
7 |
-groupes[FI]='Groupe La France insoumise' |
|
8 |
-groupes[GDR]='Groupe de la Gauche démocrate et républicaine' |
|
9 |
-groupes[LT]='Groupe Libertés et Territoires' |
|
10 |
-groupes[NI]='Non inscrits' |
|
1 |
+acronymes["Démocrate (MoDem et Indépendants)"]=DEM |
|
2 |
+acronymes["Gauche démocrate et républicaine - NUPES"]=GDR |
|
3 |
+acronymes["Horizons et apparentés"]=HOR |
|
4 |
+acronymes["La France insoumise - Nouvelle Union Populaire écologique et sociale"]=LFI |
|
5 |
+acronymes["Les Républicains"]=LR |
|
6 |
+acronymes["Libertés, Indépendants, Outre-mer et Territoires"]=LIOMT |
|
7 |
+acronymes["Non inscrits"]=NI |
|
8 |
+acronymes["Rassemblement National"]=RN |
|
9 |
+acronymes["Renaissance"]=REN |
|
10 |
+acronymes["Socialistes et apparentés (membre de l'intergroupe NUPES)"]=SOCS |
|
11 |
+acronymes["Écologiste - NUPES"]=ECOLO |
|
12 |
+ |
|
13 |
+mandature=16 |
... | ... |
@@ -0,0 +1,328 @@ |
1 |
+#!/usr/bin/env python3 |
|
2 |
+# basé sur l'idée de Shivam Aggarwal sur https://shivama205.medium.com/audio-signals-comparison-23e431ed2207 |
|
3 |
+# WTFL |
|
4 |
+ |
|
5 |
+import argparse |
|
6 |
+import subprocess |
|
7 |
+import numpy |
|
8 |
+import os |
|
9 |
+import sys |
|
10 |
+import time |
|
11 |
+import multiprocessing |
|
12 |
+ |
|
13 |
+def initialize(): |
|
14 |
+ defaults = { |
|
15 |
+ 'sample_time' : 500, # seconds to sample audio file for fingerprint calculation |
|
16 |
+ 'span' : 150, # number of points to scan cross correlation over |
|
17 |
+ 'step' : 1, # step size (in points) of cross correlation |
|
18 |
+ 'min_overlap' : 20, # minimum number of points that must overlap in cross correlation |
|
19 |
+ # exception is raised if this cannot be met |
|
20 |
+ 'threshold' : 80, # % |
|
21 |
+ 'processor' : os.cpu_count(), |
|
22 |
+ 'separator' : ';' |
|
23 |
+ } |
|
24 |
+ |
|
25 |
+ def check_nproc(arg): |
|
26 |
+ try: |
|
27 |
+ n = int(arg) |
|
28 |
+ except ValueError: |
|
29 |
+ raise argparse.ArgumentTypeError("il faut un nombre entier") |
|
30 |
+ if n < 1 or n > os.cpu_count(): |
|
31 |
+ raise argparse.ArgumentTypeError("{} n'est pas compris entre 1 et {:d}".format(n, os.cpu_count())) |
|
32 |
+ return n |
|
33 |
+ |
|
34 |
+ def check_threshold(arg): |
|
35 |
+ try: |
|
36 |
+ n = float(arg) |
|
37 |
+ except ValueError: |
|
38 |
+ raise argparse.ArgumentTypeError("il faut un nombre") |
|
39 |
+ if n < 0 or n > 100: |
|
40 |
+ raise argparse.ArgumentTypeError("{} n'est pas compris entre 0 et 100 inclus".format(n)) |
|
41 |
+ return n |
|
42 |
+ |
|
43 |
+ def parse_input_files(input_file, source_files): |
|
44 |
+ if isinstance(input_file, list): |
|
45 |
+ for f in input_file: |
|
46 |
+ parse_input_files(f, source_files) |
|
47 |
+ else: |
|
48 |
+ if os.path.isfile(input_file): |
|
49 |
+ source_files[input_file] = 1 |
|
50 |
+ elif os.path.isdir(input_file): |
|
51 |
+ for root, dirs, files in os.walk(input_file): |
|
52 |
+ for f in files: |
|
53 |
+ parse_input_files(os.path.join(root, f), source_files) |
|
54 |
+ |
|
55 |
+ parser = argparse.ArgumentParser(__file__) |
|
56 |
+ parser.add_argument("-i ", "--source-file", |
|
57 |
+ action = 'append', |
|
58 |
+ nargs = '+', |
|
59 |
+ help = "répertoire ou fichier" |
|
60 |
+ ) |
|
61 |
+ parser.add_argument("-t ", "--threshold", |
|
62 |
+ type = check_threshold, |
|
63 |
+ default = defaults['threshold'], |
|
64 |
+ help = "seuil en pourcentage sous lequel il est considéré qu'il n'y a pas de corrélation (défaut: %(default)d)" |
|
65 |
+ ) |
|
66 |
+ parser.add_argument("-p ", "--processor", |
|
67 |
+ type = check_nproc, |
|
68 |
+ default = defaults['processor'], |
|
69 |
+ help = "le nombre de processus parallèles lancés (défaut: %(default)d)" |
|
70 |
+ ) |
|
71 |
+ parser.add_argument("--sample-time", |
|
72 |
+ type = int, |
|
73 |
+ default = defaults['sample_time'], |
|
74 |
+ help = "seconds to sample audio file for fpcalc (défaut: %(default)d)" |
|
75 |
+ ) |
|
76 |
+ parser.add_argument("--span", |
|
77 |
+ type = int, |
|
78 |
+ default = defaults['span'], |
|
79 |
+ help = "finesse en points pour scanner la corrélation (défaut: %(default)d)" |
|
80 |
+ ) |
|
81 |
+ parser.add_argument("--step", |
|
82 |
+ type = int, |
|
83 |
+ default = defaults['step'], |
|
84 |
+ help = "valeur du pas en points de corrélation (défaut: %(default)d)" |
|
85 |
+ ) |
|
86 |
+ parser.add_argument("--min-overlap", |
|
87 |
+ type = int, |
|
88 |
+ default = defaults['min_overlap'], |
|
89 |
+ help = "nombre minimal de points de correspondance (défaut %(default)d)" |
|
90 |
+ ) |
|
91 |
+ parser.add_argument("--separator", |
|
92 |
+ type = str, |
|
93 |
+ default = defaults['separator'], |
|
94 |
+ help = "séparateur des champs de résultat (défaut '%(default)s')" |
|
95 |
+ ) |
|
96 |
+ |
|
97 |
+ args = parser.parse_args() |
|
98 |
+ |
|
99 |
+ source_files = {} |
|
100 |
+ for f in args.source_file: |
|
101 |
+ parse_input_files(f, source_files) |
|
102 |
+ |
|
103 |
+ return list(source_files.keys()), args |
|
104 |
+ |
|
105 |
+def prime(i, primes): |
|
106 |
+ for prime in primes: |
|
107 |
+ if not (i == prime or i % prime): |
|
108 |
+ return False |
|
109 |
+ primes.add(i) |
|
110 |
+ return i |
|
111 |
+ |
|
112 |
+def nPrimes(n): |
|
113 |
+ primes = set([2]) |
|
114 |
+ i, p = 2, 0 |
|
115 |
+ while True: |
|
116 |
+ if prime(i, primes): |
|
117 |
+ p += 1 |
|
118 |
+ if p == n: |
|
119 |
+ return primes |
|
120 |
+ i += 1 |
|
121 |
+ |
|
122 |
+def getPrimes(n, ids): |
|
123 |
+ a = 0 |
|
124 |
+ b = 0 |
|
125 |
+ for i in ids: |
|
126 |
+ if n % i == 0: |
|
127 |
+ a = i |
|
128 |
+ b = int(n / i) |
|
129 |
+ break |
|
130 |
+ return a, b |
|
131 |
+ |
|
132 |
+# calculate fingerprint |
|
133 |
+def calculate_fingerprints(filename): |
|
134 |
+ fpcalc_out = subprocess.getoutput('fpcalc -raw -length {} "{}"'.format(args.sample_time, filename)) |
|
135 |
+ fingerprint_index = fpcalc_out.find('FINGERPRINT=') + 12 |
|
136 |
+ |
|
137 |
+ return fpcalc_out[fingerprint_index:] |
|
138 |
+ |
|
139 |
+# returns correlation between lists |
|
140 |
+def correlation(listx, listy): |
|
141 |
+ if len(listx) == 0 or len(listy) == 0: |
|
142 |
+ # Error checking in main program should prevent us from ever being |
|
143 |
+ # able to get here. |
|
144 |
+ raise Exception('Empty lists cannot be correlated.') |
|
145 |
+ |
|
146 |
+ if len(listx) > len(listy): |
|
147 |
+ listx = listx[:len(listy)] |
|
148 |
+ elif len(listx) < len(listy): |
|
149 |
+ listy = listy[:len(listx)] |
|
150 |
+ |
|
151 |
+ covariance = 0 |
|
152 |
+ for i in range(len(listx)): |
|
153 |
+ covariance += 32 - bin(listx[i] ^ listy[i]).count("1") |
|
154 |
+ covariance = covariance / float(len(listx)) |
|
155 |
+ |
|
156 |
+ return covariance/32 |
|
157 |
+ |
|
158 |
+# return cross correlation, with listy offset from listx |
|
159 |
+def cross_correlation(listx, listy, offset): |
|
160 |
+ if offset > 0: |
|
161 |
+ listx = listx[offset:] |
|
162 |
+ listy = listy[:len(listx)] |
|
163 |
+ elif offset < 0: |
|
164 |
+ offset = -offset |
|
165 |
+ listy = listy[offset:] |
|
166 |
+ listx = listx[:len(listy)] |
|
167 |
+ if min(len(listx), len(listy)) < args.min_overlap: |
|
168 |
+ # Error checking in main program should prevent us from ever being |
|
169 |
+ # able to get here. |
|
170 |
+ return |
|
171 |
+ |
|
172 |
+ return correlation(listx, listy) |
|
173 |
+ |
|
174 |
+# cross correlate listx and listy with offsets from -span to span |
|
175 |
+def compare(listx, listy, span, step): |
|
176 |
+ if span > min(len(list(listx)), len(list(listy))): |
|
177 |
+ # Error checking in main program should prevent us from ever being |
|
178 |
+ # able to get here. |
|
179 |
+ raise Exception('span >= sample size: %i >= %i\n' |
|
180 |
+ % (span, min(len(list(listx)), len(list(listy)))) |
|
181 |
+ + 'Reduce span, reduce crop or increase sample_time.') |
|
182 |
+ corr_xy = [] |
|
183 |
+ for offset in numpy.arange(-span, span + 1, step): |
|
184 |
+ corr_xy.append(cross_correlation(listx, listy, offset)) |
|
185 |
+ return corr_xy |
|
186 |
+ |
|
187 |
+def get_max_corr(corr, source, target): |
|
188 |
+ max_corr_index = corr.index(max(corr)) |
|
189 |
+ max_corr_offset = -args.span + max_corr_index * args.step |
|
190 |
+# report matches |
|
191 |
+ if corr[max_corr_index] * 100 >= args.threshold: |
|
192 |
+ return corr[max_corr_index], max_corr_offset |
|
193 |
+ |
|
194 |
+def correlate(source, target): |
|
195 |
+ corr = compare(source, target, args.span, args.step) |
|
196 |
+ return get_max_corr(corr, source, target) |
|
197 |
+ |
|
198 |
+def get_tests_nbr(n): |
|
199 |
+ return n * n - n * ( n + 1 ) / 2 |
|
200 |
+ |
|
201 |
+def get_ETA(start, total, done): |
|
202 |
+ now = time.time() |
|
203 |
+ return time.ctime(now + (now - start) / done * (total - done)) |
|
204 |
+ |
|
205 |
+def eprint(*args, **kwargs): |
|
206 |
+ print(*args, file=sys.stderr, **kwargs) |
|
207 |
+ |
|
208 |
+def mp_calculate_fingerprints(key): |
|
209 |
+ try: |
|
210 |
+ ziques[key] = { |
|
211 |
+ 'fingerprint': list(map(int, calculate_fingerprints(ziques[key]['path']).split(','))), |
|
212 |
+ 'path': ziques[key]['path'] |
|
213 |
+ } |
|
214 |
+ except: |
|
215 |
+ erreurs.append(ziques[key]['path']) |
|
216 |
+ del ziques[key] |
|
217 |
+ pass |
|
218 |
+ |
|
219 |
+def mp_correlate(key): |
|
220 |
+ try: |
|
221 |
+ c, o = correlate( |
|
222 |
+ ziques[comparaison[key]['a']]['fingerprint'], |
|
223 |
+ ziques[comparaison[key]['b']]['fingerprint']) |
|
224 |
+ comparaison[key] = { |
|
225 |
+ 'a': comparaison[key]['a'], |
|
226 |
+ 'b': comparaison[key]['b'], |
|
227 |
+ 'correlation': c, |
|
228 |
+ 'offset': o |
|
229 |
+ } |
|
230 |
+ except: |
|
231 |
+ del comparaison[key] |
|
232 |
+ pass |
|
233 |
+ |
|
234 |
+ |
|
235 |
+if __name__ == "__main__": |
|
236 |
+ global args |
|
237 |
+ source_files, args= initialize() |
|
238 |
+ |
|
239 |
+ if len(source_files) < 2: |
|
240 |
+ print("au moins deux fichiers sont nécessaires") |
|
241 |
+ sys.exit() |
|
242 |
+ |
|
243 |
+ ids = list(nPrimes(len(source_files))) |
|
244 |
+ total_ids = len(ids) |
|
245 |
+ |
|
246 |
+ manager = multiprocessing.Manager() |
|
247 |
+ ziques = manager.dict() |
|
248 |
+ comparaison = manager.dict() |
|
249 |
+ erreurs = manager.list() |
|
250 |
+ pool = multiprocessing.Pool(args.processor) |
|
251 |
+ |
|
252 |
+ for f in range(len(source_files)): |
|
253 |
+ ziques[ids[f]] = { 'path': source_files[f] } |
|
254 |
+ |
|
255 |
+ del source_files |
|
256 |
+ |
|
257 |
+ nb_erreurs = len(erreurs) |
|
258 |
+ start = time.time() |
|
259 |
+ for i, _ in enumerate(pool.imap_unordered(mp_calculate_fingerprints, ziques.keys()), 1): |
|
260 |
+ nb_erreurs = len(erreurs) |
|
261 |
+ print('calcul des empreintes{:s}: {:.1f}% (ETA {:s})'.format( |
|
262 |
+ ("", " (" + str(nb_erreurs) + " erreur{})".format(("", "s")[nb_erreurs > 1]))[nb_erreurs > 0], |
|
263 |
+ i / total_ids * 100, |
|
264 |
+ get_ETA(start, total_ids, i)), |
|
265 |
+ end='\r') |
|
266 |
+ sys.stdout.write("\033[K") #clear line |
|
267 |
+ print('calcul des empreintes terminé ({:d} fichiers traités{:s})'.format( |
|
268 |
+ len(ziques), |
|
269 |
+ ("", " et " + str(nb_erreurs) + " erreur{}".format(("", "s")[nb_erreurs > 1]))[nb_erreurs > 0])) |
|
270 |
+ |
|
271 |
+ if len(erreurs): |
|
272 |
+ print("Fichier{} en erreur:".format(("", "s")[len(erreurs) > 1])) |
|
273 |
+ for k in erreurs: |
|
274 |
+ print(k) |
|
275 |
+ print() |
|
276 |
+ |
|
277 |
+ erreurs[:] = [] # vide la liste d'erreurs |
|
278 |
+ nb_erreurs = len(erreurs) |
|
279 |
+ nb_tests = get_tests_nbr(len(ziques)) |
|
280 |
+ done = 0 |
|
281 |
+ |
|
282 |
+ start = time.time() |
|
283 |
+ for a in ziques.keys(): |
|
284 |
+ for b in ziques.keys(): |
|
285 |
+ id_correl = a * b |
|
286 |
+ if a == b or id_correl in comparaison: |
|
287 |
+ continue |
|
288 |
+ comparaison[id_correl] = { |
|
289 |
+ 'a': a, |
|
290 |
+ 'b': b |
|
291 |
+ } |
|
292 |
+ done += 1 |
|
293 |
+ print("construction liste: {:.1f}% (ETA {:s})".format( |
|
294 |
+ done / nb_tests * 100, |
|
295 |
+ get_ETA(start, nb_tests, done)), |
|
296 |
+ end='\r') |
|
297 |
+ sys.stdout.write("\033[K") #clear line |
|
298 |
+ |
|
299 |
+ tests_nbr = len(comparaison) |
|
300 |
+ |
|
301 |
+ start = time.time() |
|
302 |
+ for i, _ in enumerate(pool.imap_unordered(mp_correlate, comparaison.keys()), 1): |
|
303 |
+ found = len(comparaison) + i - tests_nbr |
|
304 |
+ print('{:s} corrélation{pluriel:s} trouvée{pluriel:s}: {:.1f}% (ETA {:s}){:s}'.format( |
|
305 |
+ ("aucune", str(found))[found > 0], |
|
306 |
+ i / tests_nbr * 100, |
|
307 |
+ get_ETA(start, tests_nbr, i), |
|
308 |
+ ' ', |
|
309 |
+ pluriel = ("", "s")[found > 1]), |
|
310 |
+ end='\r') |
|
311 |
+ |
|
312 |
+ sys.stdout.write("\033[K") #clear line |
|
313 |
+ print('comparaison terminée:\n{0:d} comparaison{pluriel1} effectuée{pluriel1}\n{1} corrélation{pluriel2} trouvée{pluriel2} (seuil {2}%)'.format( |
|
314 |
+ tests_nbr, |
|
315 |
+ len(comparaison), |
|
316 |
+ args.threshold, |
|
317 |
+ pluriel1=("", "s")[tests_nbr > 1], |
|
318 |
+ pluriel2=("", "s")[len(comparaison) > 1], |
|
319 |
+ )) |
|
320 |
+ |
|
321 |
+ for k in comparaison.keys(): |
|
322 |
+ print("{:s}{sep}{:s}{sep}{:.2f}%{sep}{:d}".format( |
|
323 |
+ ziques[comparaison[k]['a']]['path'], |
|
324 |
+ ziques[comparaison[k]['b']]['path'], |
|
325 |
+ comparaison[k]['correlation'] * 100, |
|
326 |
+ comparaison[k]['offset'], |
|
327 |
+ sep = args.separator |
|
328 |
+ )) |
... | ... |
@@ -0,0 +1,519 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+set -e |
|
4 |
+ |
|
5 |
+declare -A datatypes=( |
|
6 |
+ [/sim/description]=text |
|
7 |
+ [/sim/long-description]=text |
|
8 |
+ [/sim/author]=text |
|
9 |
+ [/sim/flight-model]=text |
|
10 |
+ [/sim/type]=text |
|
11 |
+ [/sim/model/path]=text |
|
12 |
+ [/sim/rating/FDM]="integer DEFAULT 0" |
|
13 |
+ [/sim/rating/systems]="integer DEFAULT 0" |
|
14 |
+ [/sim/rating/cockpit]="integer DEFAULT 0" |
|
15 |
+ [/sim/rating/model]="integer DEFAULT 0" |
|
16 |
+) |
|
17 |
+ |
|
18 |
+missing_data_check=( /sim/model/path ) |
|
19 |
+ |
|
20 |
+database=${DB:-$0.db} |
|
21 |
+test -r "$0.conf" && source "$0.conf" |
|
22 |
+ |
|
23 |
+#locale=fr |
|
24 |
+ |
|
25 |
+tempid=$(mktemp --dry-run XXXXXXX) |
|
26 |
+temppath=/dev/shm |
|
27 |
+ |
|
28 |
+setxml=$temppath/setxml-$tempid |
|
29 |
+json_file=$temppath/github_json-$tempid |
|
30 |
+in_ram_database=$temppath/${database##*/}-$tempid |
|
31 |
+ |
|
32 |
+function xmlgetnext () { |
|
33 |
+ local IFS='>' |
|
34 |
+ read -d '<' TAG VALUE |
|
35 |
+ # by design, the first TAG/VALUE pair is empty |
|
36 |
+ # to avoid infinite loops at end of file parsing we return an error |
|
37 |
+ # the next time we find an empty TAG |
|
38 |
+ if test -z "$TAG"; then |
|
39 |
+ test ${xmlgetnext_empty_tag:-0} -gt 0 && return 1 |
|
40 |
+ xmlgetnext_empty_tag=$(( xmlgetnext_empty_tag + 1 )) |
|
41 |
+ fi |
|
42 |
+ # process $TAG only if necessary |
|
43 |
+ local _TAG=$(printf '%q' $TAG) |
|
44 |
+ if test ${_TAG:0:1} = '$'; then |
|
45 |
+ TAG=$(tr '\n' ' ' <<< $TAG | sed 's/ */ /g; s/ *$//') |
|
46 |
+ fi |
|
47 |
+} |
|
48 |
+ |
|
49 |
+function json () { |
|
50 |
+ jq --raw-output "$1" < ${2:-${json_file:?}} |
|
51 |
+} |
|
52 |
+ |
|
53 |
+rm -f $temppath/sqlite_request |
|
54 |
+function sqlite_request () { |
|
55 |
+ echo -e "## REQ $(( ++sqlite_request_count ))\n${1}\n" >> $temppath/sqlite_request |
|
56 |
+ sqlite3 "$in_ram_database" <<< "$1" |
|
57 |
+} |
|
58 |
+ |
|
59 |
+function xmlremovecomments () { |
|
60 |
+ sed -ri 's/<(!--|script>)/\n&/;s/(<\/script|--)>/&\n/' $setxml |
|
61 |
+ sed -ri '/<(script>|!--).*(<\/script|--)>/d;/<(script>|!--)/,/(<\/script|--)>/d' $setxml |
|
62 |
+ sed -i 's/\xef\xbb\xbf//;s/\r//' $setxml # removes BOM and ^M |
|
63 |
+} |
|
64 |
+ |
|
65 |
+function trap_break () { |
|
66 |
+ trap '' INT |
|
67 |
+ echo "stop requested" |
|
68 |
+} |
|
69 |
+ |
|
70 |
+function trap_exit () { |
|
71 |
+ trapped_rc=$? |
|
72 |
+ trap '' INT |
|
73 |
+ |
|
74 |
+ if declare -f on_exit > /dev/null; then |
|
75 |
+ on_exit |
|
76 |
+ fi |
|
77 |
+ |
|
78 |
+ if test ! -e $in_ram_database; then |
|
79 |
+ exit |
|
80 |
+ fi |
|
81 |
+ echo "updating installation status" |
|
82 |
+ for ac in $(sqlite_request 'select printf("%i:%s/%s", aircrafts.id, aircrafts.name, setxml.file) |
|
83 |
+ from aircrafts inner join setxml |
|
84 |
+ where aircrafts.id = setxml.variantof and setxml.installed != 0;'); do |
|
85 |
+ ac_path=${ac#*:} |
|
86 |
+ if test ! -e ${hangar[path]}/$ac_path-set.xml; then |
|
87 |
+ sqlite_request "update setxml set installed = 0 where file = '${ac_path#*/}' and variantof = ${ac%:*}" |
|
88 |
+ fi |
|
89 |
+ done |
|
90 |
+ for ac in ${hangar[path]}/*/*-set.xml; do |
|
91 |
+ ac=${ac/${hangar[path]}} |
|
92 |
+ sx=${ac##*/} |
|
93 |
+ ac=${ac%/*} |
|
94 |
+ if test -d ${hangar[path]}/$ac/.svn; then |
|
95 |
+ install_type=1 |
|
96 |
+ elif test -d ${hangar[path]}/$ac/.git; then |
|
97 |
+ install_type=2 |
|
98 |
+ else |
|
99 |
+ install_type=3 |
|
100 |
+ fi |
|
101 |
+ sqlite_request "update setxml set installed = $install_type |
|
102 |
+ where exists ( |
|
103 |
+ select 1 |
|
104 |
+ from aircrafts |
|
105 |
+ where name = '${ac/\/}' and setxml.variantof = id |
|
106 |
+ )" |
|
107 |
+ done |
|
108 |
+ local missing_setxml=$(sqlite_request "select printf(' - %s (%s)', aircrafts.name, hangars.name) |
|
109 |
+ from aircrafts inner join hangars |
|
110 |
+ where hangars.id = aircrafts.hangar and aircrafts.id not in (select variantof from setxml)") |
|
111 |
+ if test -n "$missing_setxml"; then |
|
112 |
+ echo -e "missing setxml config for :\n$missing_setxml" |
|
113 |
+ fi |
|
114 |
+ |
|
115 |
+ for data_presence_check in ${missing_data_check[@]}; do |
|
116 |
+ if [[ -v datatypes[$data_presence_check] ]]; then |
|
117 |
+ local missing_data=$(sqlite_request "select count(setxml.file) |
|
118 |
+ from aircrafts inner join setxml |
|
119 |
+ where aircrafts.id = setxml.variantof and setxml.\`$data_presence_check\` = ''") |
|
120 |
+ if test $missing_data -gt 0; then |
|
121 |
+ echo "$missing_data aircrafts without $data_presence_check information" |
|
122 |
+ if test $missing_data -le 10; then |
|
123 |
+ echo "aircrafts without $data_presence_check information:" |
|
124 |
+ sqlite_request "select printf(' - %s/%s (%s)', aircrafts.name, setxml.file, hangars.name) |
|
125 |
+ from aircrafts inner join setxml, hangars |
|
126 |
+ where |
|
127 |
+ aircrafts.id = setxml.variantof |
|
128 |
+ and |
|
129 |
+ aircrafts.hangar = hangars.id |
|
130 |
+ and |
|
131 |
+ setxml.\`$data_presence_check\` = ''" |
|
132 |
+ fi |
|
133 |
+ fi |
|
134 |
+ fi |
|
135 |
+ done |
|
136 |
+ |
|
137 |
+ if test -r "$database" && sqldiff=$(sqldiff $in_ram_database $database) && test -z "$sqldiff"; then |
|
138 |
+ echo "no changes in $database" |
|
139 |
+ elif test -w "$database"; then |
|
140 |
+ rm -f "$database" |
|
141 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
142 |
+ echo "database $database updated" |
|
143 |
+ elif test ! -e "$database" -a -w ${database%/*}; then |
|
144 |
+ sqlite_request '.dump' | sqlite3 "$database" |
|
145 |
+ echo "database $database created" |
|
146 |
+ else |
|
147 |
+ echo "nothing can be done with $database !" |
|
148 |
+ fi |
|
149 |
+ find $temppath -type f -name "*-$tempid" -delete |
|
150 |
+} |
|
151 |
+ |
|
152 |
+function update_database () { |
|
153 |
+ sqlite_request "insert into aircrafts (name, author, revision, date, hangar) |
|
154 |
+ select name, author, revision, date, hangar from recover_aircrafts |
|
155 |
+ where recover_aircrafts.name = '$ac' and recover_aircrafts.hangar = ${hangar[id]} |
|
156 |
+ on conflict (name, hangar) where aircrafts.name = '$ac' and aircrafts.hangar = ${hangar[id]} do |
|
157 |
+ update set |
|
158 |
+ author = (select author from recover_aircrafts where name = '$ac'), |
|
159 |
+ revision = (select revision from recover_aircrafts where name = '$ac'), |
|
160 |
+ date = (select date from recover_aircrafts where name = '$ac') |
|
161 |
+ where aircrafts.name = '$ac' and aircrafts.hangar = ${hangar[id]}" |
|
162 |
+ |
|
163 |
+ id=$(sqlite_request "select id from aircrafts where name is '${ac}' and hangar = ${hangar[id]}") |
|
164 |
+ |
|
165 |
+ echo $(sqlite_request "select printf('[ %i/%i ] $ac', count(sx), count(distinct ac)) from recover_setxml") |
|
166 |
+ |
|
167 |
+ for sx in $(sqlite_request "select distinct sx from recover_setxml where ac = '$ac'"); do |
|
168 |
+ unset data |
|
169 |
+ declare -A data |
|
170 |
+ |
|
171 |
+ printf " -> $sx" |
|
172 |
+ getfromrepo ${ac}/$sx-set.xml > $setxml |
|
173 |
+ |
|
174 |
+ unset xmlgetnext_empty_tag property include include_rootpath ac_save |
|
175 |
+ while xmlgetnext; do |
|
176 |
+ if [[ "$TAG" =~ ^"PropertyList include=" ]]; then |
|
177 |
+ include_rootpath=${include%/*} |
|
178 |
+ test $include = $include_rootpath && unset include_rootpath |
|
179 |
+ |
|
180 |
+ eval $(echo ${TAG#* }) # include="..." |
|
181 |
+ |
|
182 |
+ if [[ "$include" =~ ^Aircraft/Generic/ ]]; then |
|
183 |
+ unset include include_rootpath |
|
184 |
+ continue |
|
185 |
+ |
|
186 |
+ elif [[ "$include" =~ ^'../' ]]; then |
|
187 |
+ if test -n "$include_rootpath"; then |
|
188 |
+ if [[ "$include_rootpath" =~ '/' ]]; then |
|
189 |
+ include_rootpath=${include_rootpath%/*} |
|
190 |
+ else |
|
191 |
+ unset include_rootpath |
|
192 |
+ fi |
|
193 |
+ else |
|
194 |
+ ac_save=$ac |
|
195 |
+ unset ac |
|
196 |
+ fi |
|
197 |
+ include=${include/\.\.\/} |
|
198 |
+ fi |
|
199 |
+ getfromrepo ${ac}/${include_rootpath:+$include_rootpath/}$include >> $setxml |
|
200 |
+ fi |
|
201 |
+ done < $setxml |
|
202 |
+ |
|
203 |
+ test -n "$ac_save" && ac=$ac_save |
|
204 |
+ |
|
205 |
+# some aircrafts (mostly from the helijah's files architecture template) |
|
206 |
+# break because of infinite loop in middle of file |
|
207 |
+# I can't find the reason of this infinite loop |
|
208 |
+# this is the reason of this double-pass |
|
209 |
+ unset xmlgetnext_empty_tag property |
|
210 |
+ while xmlgetnext; do |
|
211 |
+ case "${TAG:0:1}" in |
|
212 |
+ ''|'?'|'!') |
|
213 |
+ continue;; |
|
214 |
+ /) |
|
215 |
+ property=${property%/*};; |
|
216 |
+ *) |
|
217 |
+ if test "${TAG: -1}" != '/'; then |
|
218 |
+ property+=/${TAG%% *} |
|
219 |
+ fi;; |
|
220 |
+ esac |
|
221 |
+ |
|
222 |
+ if [[ "$property" = /PropertyList@($data_pattern) ]]; then |
|
223 |
+ if test -z "${data[${property/\/PropertyList}]}"; then |
|
224 |
+ eval "data[${property/\/PropertyList}]=\"${VALUE//\"/\\\"}\"" |
|
225 |
+ data[${property/\/PropertyList}]=$(tr '\n' ' ' <<< ${data[${property/\/PropertyList}]} | sed -r 's/^\s*//;s/\s+/ /g;s/\s*$//') |
|
226 |
+ fi |
|
227 |
+ fi |
|
228 |
+ |
|
229 |
+ # continue parsing (while loop) until everything's found |
|
230 |
+ for col in ${!datatypes[@]}; do |
|
231 |
+ test -z "${data[$col]}" && continue 2 |
|
232 |
+ done |
|
233 |
+ break # everything's found |
|
234 |
+ done < $setxml |
|
235 |
+ |
|
236 |
+ if eval "test -z \"$data_test_null\""; then |
|
237 |
+ printf "\nWARNING: no info found, skipping\n" |
|
238 |
+ mkdir -p $temppath/no-data-ac |
|
239 |
+ cp -f $setxml $temppath/no-data-ac/${ac}-${sx} |
|
240 |
+ else |
|
241 |
+ insert_values="'$sx', $id, " |
|
242 |
+ insert_col='file, variantof, ' |
|
243 |
+ update_values='' |
|
244 |
+ for col in ${!data[@]}; do |
|
245 |
+ if test ${datatypes[$col]%% *} = 'text'; then |
|
246 |
+ single_quote="'" |
|
247 |
+ elif [[ ${datatypes[$col]%% *} = 'integer' && "${data[$col]// }" = +([0-9]) ]]; then |
|
248 |
+ single_quote="" |
|
249 |
+ else |
|
250 |
+ unset datatypes[$col] |
|
251 |
+ continue |
|
252 |
+ fi |
|
253 |
+ insert_col+="\`$col\`, " |
|
254 |
+ insert_values+="$single_quote${data[$col]//\'/\'\'}$single_quote, " |
|
255 |
+ if test -n "${data[$col]}"; then |
|
256 |
+ update_values+="\`$col\` = $single_quote${data[$col]//\'/\'\'}$single_quote, " |
|
257 |
+ fi |
|
258 |
+ done |
|
259 |
+ local flag_new= |
|
260 |
+ local flag_status= |
|
261 |
+ if test $(sqlite_request "select count(file) from setxml where file = '$sx' and variantof = $id") -eq 0; then |
|
262 |
+ flag_new="NEW" |
|
263 |
+ fi |
|
264 |
+ for criteria in FDM model systems cockpit; do |
|
265 |
+ if test ${data[/sim/rating/$criteria]:-0} -ge 4; then |
|
266 |
+ flag_status+='*' |
|
267 |
+ fi |
|
268 |
+ done |
|
269 |
+ if test -n "$flag_new" -o -n "$flag_status"; then |
|
270 |
+ printf " (${flag_new:+$flag_new }$flag_status)" |
|
271 |
+ fi |
|
272 |
+ printf "\n" |
|
273 |
+ sqlite_request "insert into setxml (${insert_col%,*}, installed) values (${insert_values%,*}, 0) |
|
274 |
+ on conflict (file, variantof) where file = '$sx' and variantof = $id do |
|
275 |
+ update set |
|
276 |
+ ${update_values%,*}, installed = 0 |
|
277 |
+ where |
|
278 |
+ file = '$sx' and variantof = $id" |
|
279 |
+ fi |
|
280 |
+ |
|
281 |
+ sqlite_request "delete from recover_setxml where ac = '$ac' and sx = '$sx'" |
|
282 |
+ done |
|
283 |
+} |
|
284 |
+ |
|
285 |
+function add_record () { |
|
286 |
+ ac_ass_array[$1]="$2" |
|
287 |
+} |
|
288 |
+ |
|
289 |
+function get_record () { |
|
290 |
+ if test -n "$1"; then |
|
291 |
+ echo "${ac_ass_array[$1]}" |
|
292 |
+ else |
|
293 |
+ for k in ${!ac_ass_array[@]}; do |
|
294 |
+ echo $k = ${ac_ass_array[$k]} |
|
295 |
+ done |
|
296 |
+ fi |
|
297 |
+} |
|
298 |
+ |
|
299 |
+function add_aircraft () { |
|
300 |
+ for key in name revision date author; do |
|
301 |
+ test -n "${ac_ass_array[$key]}" # exit if missing data (with the help of "set -e") |
|
302 |
+ done |
|
303 |
+ local new_revision=$(sqlite_request "select revision from recover_aircrafts |
|
304 |
+ where name = '${ac_ass_array[name]}'") |
|
305 |
+ if test -z "${new_revision}"; then |
|
306 |
+ sqlite_request "insert into recover_aircrafts (name, revision, date, author, hangar) |
|
307 |
+ values ( |
|
308 |
+ '${ac_ass_array[name]}', |
|
309 |
+ ${ac_ass_array[revision]}, |
|
310 |
+ ${ac_ass_array[date]}, |
|
311 |
+ '${ac_ass_array[author]}', |
|
312 |
+ ${hangar[id]})" |
|
313 |
+ elif test ${new_revision} -lt ${ac_ass_array[revision]//\"}; then |
|
314 |
+ sqlite_request "update recover_aircrafts |
|
315 |
+ set |
|
316 |
+ revision = ${ac_ass_array[revision]}, |
|
317 |
+ date = ${ac_ass_array[date]}, |
|
318 |
+ author = '${ac_ass_array[author]}', |
|
319 |
+ hangar = ${hangar[id]} |
|
320 |
+ where name = '${ac_ass_array[name]}'" |
|
321 |
+ fi |
|
322 |
+ for key in name revision date author; do |
|
323 |
+ ac_ass_array[$key]='' |
|
324 |
+ done |
|
325 |
+} |
|
326 |
+ |
|
327 |
+function add_setxml_for_aircraft () { |
|
328 |
+ sqlite_request "insert into recover_setxml values ('$1', '${2/%-set.xml}') |
|
329 |
+ on conflict (ac, sx) where ac = '$1' and sx = '${2/%-set.xml}' |
|
330 |
+ do nothing" |
|
331 |
+} |
|
332 |
+ |
|
333 |
+function apply_revision () { |
|
334 |
+ for ac in $(sqlite_request "select name from recover_aircrafts"); do |
|
335 |
+ # delete aircrafts that have been deleted from the repo |
|
336 |
+ sqlite_request "delete from setxml |
|
337 |
+ where (file, variantof) in ( |
|
338 |
+ select file, variantof from setxml |
|
339 |
+ inner join aircrafts |
|
340 |
+ where aircrafts.id = setxml.variantof |
|
341 |
+ and aircrafts.name = '$ac' |
|
342 |
+ and aircrafts.hangar = ${hangar[id]} |
|
343 |
+ and setxml.file not in ( |
|
344 |
+ select sx from recover_setxml where ac = '$ac' |
|
345 |
+ ) |
|
346 |
+ )" |
|
347 |
+ |
|
348 |
+ # delete aircrafts without setxml found |
|
349 |
+ sqlite_request "delete from recover_aircrafts |
|
350 |
+ where name not in (select distinct ac from recover_setxml)" |
|
351 |
+ |
|
352 |
+ update_database |
|
353 |
+ if test -d ${hangar[path]}/${ac}/.${hangar[type]} \ |
|
354 |
+ && \ |
|
355 |
+ case ${hangar[type]} in |
|
356 |
+ svn) test "$(svn info --show-item=url ${hangar[path]}/${ac})" != "${hangar[url]}/${ac}";; |
|
357 |
+ git) test "$(git -C ${hangar[path]}/${ac} config --get remote.origin.url)" != "${hangar[url]}/${ac}.git";; |
|
358 |
+ esac \ |
|
359 |
+ || test -d ${hangar[path]}/${ac} -a ! -d ${hangar[path]}/${ac}/.${hangar[type]}; then |
|
360 |
+ echo "INFO: local ${ac} installed out from repo" >&2 |
|
361 |
+ fi |
|
362 |
+ sqlite_request "delete from recover_aircrafts where name = '$ac'" |
|
363 |
+ done |
|
364 |
+} |
|
365 |
+ |
|
366 |
+trap trap_break INT |
|
367 |
+trap trap_exit EXIT |
|
368 |
+ |
|
369 |
+stty -echoctl |
|
370 |
+ |
|
371 |
+declare -A hangar |
|
372 |
+data_pattern=$(printf "%s|" ${!datatypes[@]}) |
|
373 |
+data_pattern=${data_pattern:0:-1} |
|
374 |
+data_test_null=$(printf '${data[%s]}' ${!datatypes[@]}) |
|
375 |
+ |
|
376 |
+if test -e $database; then |
|
377 |
+ cp $database $in_ram_database |
|
378 |
+ |
|
379 |
+ sql_cols=$(sqlite_request "pragma table_info(setxml)" \ |
|
380 |
+ | cut --delimiter="|" --fields=2,3 --output-delimiter=" " \ |
|
381 |
+ | sort \ |
|
382 |
+ | md5sum) |
|
383 |
+ datatypes[file]=text |
|
384 |
+ datatypes[variantof]=integer |
|
385 |
+ datatypes[installed]=integer |
|
386 |
+ script_cols=$(for col in ${!datatypes[@]}; do echo "$col ${datatypes["$col"]%% *}"; done \ |
|
387 |
+ | sort \ |
|
388 |
+ | md5sum) |
|
389 |
+ unset datatypes[file] datatypes[variantof] datatypes[installed] |
|
390 |
+ if test "$sql_cols" != "$script_cols"; then |
|
391 |
+ echo "ALERT: datbase version mismatch !" |
|
392 |
+ exit 1 |
|
393 |
+ fi |
|
394 |
+ if sqlite_request '.tables' | grep -q 'recover_'; then |
|
395 |
+ hangar[id]=$(sqlite_request "select hangar from recover_aircrafts limit 1") |
|
396 |
+ if test -n "${hangar[id]}"; then |
|
397 |
+ echo "recovering from previous saved state" |
|
398 |
+ eval $(sqlite_request "select printf('hangar[name]=%s;hangar[url]=%s;hangar[type]=%s;hangar[source]=%s', |
|
399 |
+ name, url, type, source) |
|
400 |
+ from hangars |
|
401 |
+ where id = '${hangar[id]}'") |
|
402 |
+ source $(grep -l "^\s*hangar\[name\]=${hangar[name]}\s*$" ${0%*/}.d/*.hangar) |
|
403 |
+ eval "getfromrepo () {$(declare -f getfromrepo | sed '1,2d;$d'); xmlremovecomments;}" |
|
404 |
+ apply_revision |
|
405 |
+ exit |
|
406 |
+ else |
|
407 |
+ sqlite_request 'drop table recover_aircrafts' |
|
408 |
+ sqlite_request 'drop table recover_setxml' |
|
409 |
+ fi |
|
410 |
+ fi |
|
411 |
+fi |
|
412 |
+ |
|
413 |
+sqlite_request "create table if not exists hangars ( |
|
414 |
+ id integer primary key, |
|
415 |
+ name text, |
|
416 |
+ source text, |
|
417 |
+ type text, |
|
418 |
+ url text, |
|
419 |
+ path text, |
|
420 |
+ active integer)" |
|
421 |
+ |
|
422 |
+sqlite_request 'create unique index if not exists "index_hangars" on hangars (url)' |
|
423 |
+ |
|
424 |
+sqlite_request "create table if not exists aircrafts ( |
|
425 |
+ id integer primary key, |
|
426 |
+ name text, |
|
427 |
+ revision integer, |
|
428 |
+ date integer, |
|
429 |
+ author text, |
|
430 |
+ hangar integer)" |
|
431 |
+ |
|
432 |
+sqlite_request 'create unique index if not exists "index_aircrafts" on aircrafts (name, hangar)' |
|
433 |
+ |
|
434 |
+sqlite_request "create table if not exists setxml ( |
|
435 |
+ file text, |
|
436 |
+ variantof integer, |
|
437 |
+ $(for col in ${!datatypes[@]}; do printf "'%s' %s, " $col "${datatypes[$col]}"; done) |
|
438 |
+ installed integer)" |
|
439 |
+ |
|
440 |
+sqlite_request 'create unique index if not exists "index_setxml" on setxml (file, variantof)' |
|
441 |
+ |
|
442 |
+for file in $(find ${0%*/}.d -type f -name "*.hangar"); do |
|
443 |
+ unset hangar error_message |
|
444 |
+ unset -f getfromrepo parse_repo_history |
|
445 |
+ declare -A hangar |
|
446 |
+ source $file |
|
447 |
+ |
|
448 |
+ test -n "${hangar[name]}" \ |
|
449 |
+ -a -n "${hangar[source]}" \ |
|
450 |
+ -a -n "${hangar[type]}" \ |
|
451 |
+ -a -n "${hangar[url]}" \ |
|
452 |
+ -a -n "${hangar[active]}" || \ |
|
453 |
+ error_message="${error_message:+$error_message, }missing hangar data" |
|
454 |
+ |
|
455 |
+ declare -f getfromrepo > /dev/null || \ |
|
456 |
+ error_message="${error_message:+$error_message, }missing getfromrepo function" |
|
457 |
+ |
|
458 |
+ declare -f parse_repo_history > /dev/null || \ |
|
459 |
+ error_message="${error_message:+$error_message, }missing parse_repo_history function" |
|
460 |
+ |
|
461 |
+ if test -n "$error_message"; then |
|
462 |
+ echo "file $file isn't a valid hangar ($error_message)" |
|
463 |
+ continue |
|
464 |
+ fi |
|
465 |
+ |
|
466 |
+ sqlite_request "insert into hangars (name, source, type, url, path, active) |
|
467 |
+ values ( |
|
468 |
+ '${hangar[name]}', '${hangar[source]}', '${hangar[type]}', |
|
469 |
+ '${hangar[url]}', '${hangar[path]}', ${hangar[active]}) |
|
470 |
+ on conflict (url) where url = '${hangar[url]}' do |
|
471 |
+ update set |
|
472 |
+ name = '${hangar[name]}', |
|
473 |
+ path = '${hangar[path]}', |
|
474 |
+ active = ${hangar[active]} |
|
475 |
+ where url = '${hangar[url]}'" |
|
476 |
+done |
|
477 |
+ |
|
478 |
+unset hangar |
|
479 |
+unset -f getfromrepo parse_repo_history |
|
480 |
+declare -A hangar ac_ass_array |
|
481 |
+for h_id in $(sqlite_request "select id from hangars where active = 1"); do |
|
482 |
+ |
|
483 |
+ sqlite_request 'create table if not exists recover_aircrafts ( |
|
484 |
+ name text, |
|
485 |
+ revision integer, |
|
486 |
+ date integer, |
|
487 |
+ author text, |
|
488 |
+ hangar integer)' |
|
489 |
+ |
|
490 |
+ sqlite_request 'create table if not exists recover_setxml ( |
|
491 |
+ ac text, |
|
492 |
+ sx text)' |
|
493 |
+ |
|
494 |
+ sqlite_request 'create unique index if not exists "index_recover_setxml" on recover_setxml (ac, sx)' |
|
495 |
+ |
|
496 |
+ eval $(sqlite_request "select printf('hangar[id]=%i;hangar[source]=%s;', id, source) |
|
497 |
+ from hangars |
|
498 |
+ where id = '${h_id}'") |
|
499 |
+ |
|
500 |
+ source $(grep -l "^\s*hangar\[source\]=${hangar[source]}\s*$" ${0%*/}.d/*.hangar) |
|
501 |
+ |
|
502 |
+ eval "getfromrepo () {$(declare -f getfromrepo | sed '1,2d;$d'); xmlremovecomments;}" |
|
503 |
+ |
|
504 |
+ echo -e "=${hangar[name]//?/=}=\n ${hangar[name]} \n=${hangar[name]//?/=}=" |
|
505 |
+ |
|
506 |
+ latest_revision=$(( $(sqlite_request "select max(revision) |
|
507 |
+ from aircrafts inner join hangars |
|
508 |
+ where hangars.id = aircrafts.hangar and hangars.name = '${hangar[name]}'") + 1 )) |
|
509 |
+ |
|
510 |
+ parse_repo_history |
|
511 |
+ |
|
512 |
+ if declare -f on_exit > /dev/null; then |
|
513 |
+ on_exit |
|
514 |
+ fi |
|
515 |
+ sqlite_request "drop index 'index_recover_setxml'" |
|
516 |
+ sqlite_request "drop table recover_aircrafts" |
|
517 |
+ sqlite_request "drop table recover_setxml" |
|
518 |
+done |
|
519 |
+ |
... | ... |
@@ -0,0 +1,78 @@ |
1 |
+Files with name endind by .hangar are sourced (bash syntax). |
|
2 |
+Please have a look into the two provided to check how it is done |
|
3 |
+ |
|
4 |
+Each file must contains: |
|
5 |
+- variable `hangar` definition as an associative array with at least the |
|
6 |
+ following keys: |
|
7 |
+ name: the human identifier of the hangar |
|
8 |
+ url: the url of the remote repository this is the unique identifier for |
|
9 |
+ each hangar, can't be the same in more than one hangar) |
|
10 |
+ type: the type of repo (git, svn, ...) at the moment, only git and svn |
|
11 |
+ repos are known and verified by installation status routine |
|
12 |
+ path: the local path in which the remote repo (or part of it) is cloned |
|
13 |
+ active: is the hangar active (1) or not (0) |
|
14 |
+ |
|
15 |
+- function `parse_repo_history` which describes how to get the hangar content |
|
16 |
+ (initial import and updates) |
|
17 |
+- function `getfromrepo` which describes how to get |
|
18 |
+- optional function `on_exit` to describe what to do when exiting the hangar |
|
19 |
+ |
|
20 |
+Some functions are provided by the main script to ease the database management: |
|
21 |
+- add_record <key> <value> |
|
22 |
+ this function record the key with the value, these |
|
23 |
+ keys are intended to be information for aircraft. |
|
24 |
+ Mandatory keys are: |
|
25 |
+ name : the name of the aircraft |
|
26 |
+ revision : the revision from the repo |
|
27 |
+ date : date of the last update |
|
28 |
+ author : author of the commit |
|
29 |
+ |
|
30 |
+- get_record [key] |
|
31 |
+ returns the value recorded for the key if no key is |
|
32 |
+ provided, prints all the recorded keys in the form: |
|
33 |
+ key1 = value1 |
|
34 |
+ key2 = value2 |
|
35 |
+ ... (maybe useful for debugging) |
|
36 |
+ |
|
37 |
+- add_aircraft |
|
38 |
+ adds aircraft in the database in a buffer table, ready to be |
|
39 |
+ integrated into the main aircrafts table by using `apply_revision` function. |
|
40 |
+ If one or more of the mandatory keys as decribed in `add_record` is or are |
|
41 |
+ missing, the function may exit as an error and the the whole script exits |
|
42 |
+ |
|
43 |
+- add_setxml_for_aircraft <aircraft> <-set.xml file> |
|
44 |
+ add in special buffer table the -set.xml entry for aircraft |
|
45 |
+ the trailing "-set.xml" is removed if found |
|
46 |
+ |
|
47 |
+- xmlgetnext |
|
48 |
+ in a while loop to read the XML content of a file, export $TAG |
|
49 |
+ (formatted) and $VALUE (not formatted) By design the first couple TAG/VALUE is |
|
50 |
+ always empty, some files have an unidentified issue that make enter teh while |
|
51 |
+ loop in an infinite loop. To avoid this, please use the following syntax: |
|
52 |
+ |
|
53 |
+ unset xmlgetnext_empty_tag |
|
54 |
+ while xmlgetnext; do |
|
55 |
+ # PUT HERE YOUR STUFF |
|
56 |
+ done < /your/xml/file |
|
57 |
+ |
|
58 |
+- sqlite_request <SQLite request> |
|
59 |
+ perform the request on database (actually in |
|
60 |
+ a copied database which will be dumped into the original at the end of |
|
61 |
+ the script). |
|
62 |
+ Don't touch the original database, and always use this |
|
63 |
+ wrapper, unless you will lose your changes at the end. |
|
64 |
+ Moreover this wrapper write in a file /dev/shm/sqlite_request all the |
|
65 |
+ requests, so it is useful to debug, or just watch what is doing |
|
66 |
+ |
|
67 |
+- apply_revision |
|
68 |
+ use the buffered tables to feed the main tables with all the |
|
69 |
+ information it will find by parsing the new or updated aircrafts config files |
|
70 |
+ |
|
71 |
+Some variables are available |
|
72 |
+- $latest_revision the revision to starts the history remote retrieving, |
|
73 |
+ defaults to 1 |
|
74 |
+ |
|
75 |
+- $tempid a single identifer to create temporary files (useful to get all the |
|
76 |
+ files generated by the script ending with the same ID) |
|
77 |
+ |
|
78 |
+Enjoy adding your preferred hangar :) |
... | ... |
@@ -0,0 +1,128 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+hangar[path]=$HOME/.fgfs/flightgear-fgaddon/Aircraft |
|
4 |
+ |
|
5 |
+hangar[name]=FGADDON |
|
6 |
+hangar[source]=sourceforge |
|
7 |
+hangar[type]=svn |
|
8 |
+hangar[url]=https://svn.code.sf.net/p/flightgear/fgaddon/trunk/Aircraft |
|
9 |
+hangar[active]=1 |
|
10 |
+ |
|
11 |
+function getfromrepo () { |
|
12 |
+ svn cat ${hangar[url]}/$1 |
|
13 |
+} |
|
14 |
+ |
|
15 |
+function on_exit () { |
|
16 |
+ rm -f $aircrafts $aircraft |
|
17 |
+} |
|
18 |
+ |
|
19 |
+function parse_repo_history () { |
|
20 |
+ function getaircraftinfo () { # $1 = aircraft |
|
21 |
+ svn list --xml --depth files ${hangar[url]}/$1 > $aircraft |
|
22 |
+ unset xmlgetnext_empty_tag |
|
23 |
+ while xmlgetnext; do |
|
24 |
+ if test "$TAG" = 'name' && test "${VALUE/%-set.xml}" != "$VALUE"; then |
|
25 |
+ add_setxml_for_aircraft $1 ${VALUE/%-set.xml} |
|
26 |
+ fi |
|
27 |
+ done < $aircraft |
|
28 |
+ } |
|
29 |
+ |
|
30 |
+ aircrafts=$temppath/Aircraft-$tempid |
|
31 |
+ aircraft=$temppath/aircraft-$tempid |
|
32 |
+ |
|
33 |
+ if test $latest_revision -eq 1; then |
|
34 |
+ echo getting repository list |
|
35 |
+ if ! svn list --xml --depth immediates ${hangar[url]} > $aircrafts; then |
|
36 |
+ echo "error while retrieving list" |
|
37 |
+ exit |
|
38 |
+ fi |
|
39 |
+ total=$(grep -c '<entry' $aircrafts) |
|
40 |
+ is_ac=0 |
|
41 |
+ else |
|
42 |
+ if test ${latest_revision:-0} -gt $(svn info --show-item revision ${hangar[url]}); then |
|
43 |
+ echo "already latest revisison" |
|
44 |
+ return |
|
45 |
+ fi |
|
46 |
+ echo "downloading history from revision ${latest_revision:-0}" |
|
47 |
+ if ! svn log --revision ${latest_revision:-0}:HEAD --xml --verbose ${hangar[url]} > $aircrafts; then |
|
48 |
+ echo "error while retrieving history" |
|
49 |
+ exit |
|
50 |
+ fi |
|
51 |
+ total=$(grep -c '<logentry' $aircrafts) |
|
52 |
+ fi |
|
53 |
+ |
|
54 |
+ progress=0 |
|
55 |
+ |
|
56 |
+ echo parsing repository |
|
57 |
+ |
|
58 |
+ unset xmlgetnext_empty_tag |
|
59 |
+ while xmlgetnext; do |
|
60 |
+ |
|
61 |
+ if test $latest_revision -eq 1; then |
|
62 |
+ if test "$TAG" = 'entry kind="dir"'; then |
|
63 |
+ is_ac=1 |
|
64 |
+ continue |
|
65 |
+ elif test $is_ac -eq 0 -a "$TAG" != '/list'; then |
|
66 |
+ continue |
|
67 |
+ fi |
|
68 |
+ else |
|
69 |
+ if test "${TAG%% *}" = 'logentry'; then |
|
70 |
+ is_ac=1 |
|
71 |
+ elif test ${is_ac:-0} -eq 0 -a "$TAG" != '/log'; then |
|
72 |
+ continue |
|
73 |
+ fi |
|
74 |
+ fi |
|
75 |
+ |
|
76 |
+ case "$TAG" in |
|
77 |
+ 'name') |
|
78 |
+ add_record name $VALUE |
|
79 |
+ ;; |
|
80 |
+ 'logentry revision='*|'commit revision='*) |
|
81 |
+ add_record revision ${TAG#*=} |
|
82 |
+ ;; |
|
83 |
+ 'author') |
|
84 |
+ add_record author ${VALUE//\'/\'\'} |
|
85 |
+ ;; |
|
86 |
+ 'date') |
|
87 |
+ add_record date $(date +%s -d "$VALUE") |
|
88 |
+ ;; |
|
89 |
+ 'path '*) |
|
90 |
+ TAG=${TAG#* } |
|
91 |
+ TAG=${TAG// /;} |
|
92 |
+ TAG=${TAG//-/_} |
|
93 |
+ eval $(echo ${TAG// /;}) |
|
94 |
+ path=(${VALUE//\// }) |
|
95 |
+ if test $kind = 'dir' -a ${#path[@]} -eq 3 -a $action = 'D'; then |
|
96 |
+ sqlite_request "delete from setxml |
|
97 |
+ where variantof in ( |
|
98 |
+ select id from aircrafts |
|
99 |
+ where name = '${path[2]}' |
|
100 |
+ and hangar = ${hangar[id]} |
|
101 |
+ )" |
|
102 |
+ sqlite_request "delete from aircrafts |
|
103 |
+ where name = '${path[2]}' |
|
104 |
+ and hangar = ${hangar[id]}" |
|
105 |
+ is_ac=0 |
|
106 |
+ continue |
|
107 |
+ fi |
|
108 |
+ is_ac=1 |
|
109 |
+ add_record name ${path[2]} |
|
110 |
+ ;; |
|
111 |
+ '/logentry'|'/entry') |
|
112 |
+ getaircraftinfo $(get_record name) |
|
113 |
+ add_aircraft |
|
114 |
+ newprogress=$((++entry * 100 / $total)) |
|
115 |
+ if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then |
|
116 |
+ progress=$newprogress |
|
117 |
+ printf "\r%d%% (%d)" $progress $(sqlite_request 'select count(name) from recover_aircrafts') |
|
118 |
+ fi |
|
119 |
+ is_ac=0 |
|
120 |
+ ;; |
|
121 |
+ '/list'|'/log') |
|
122 |
+ printf "\r\033[K" |
|
123 |
+ apply_revision |
|
124 |
+ break |
|
125 |
+ ;; |
|
126 |
+ esac |
|
127 |
+ done < $aircrafts |
|
128 |
+} |
... | ... |
@@ -0,0 +1,104 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+hangar[path]=$HOME/.fgfs/flightgear-fgaddon/Aircraft |
|
4 |
+ |
|
5 |
+hangar[name]=FGMEMBERS |
|
6 |
+hangar[source]=github |
|
7 |
+hangar[type]=git |
|
8 |
+hangar[url]=https://github.com/FGMEMBERS |
|
9 |
+hangar[active]=1 |
|
10 |
+ |
|
11 |
+function getfromrepo () { |
|
12 |
+ curl -s https://raw.githubusercontent.com/FGMEMBERS/${1%%/*}/master/${1#*/} |
|
13 |
+} |
|
14 |
+ |
|
15 |
+function on_exit () { |
|
16 |
+ rm -f $gh_curl_content $json_file $contents |
|
17 |
+ unset contents gh_curl_content |
|
18 |
+} |
|
19 |
+ |
|
20 |
+function parse_repo_history () { |
|
21 |
+ function github_curl () { |
|
22 |
+ test $# -eq 0 && return $(test -n "$githup_token") |
|
23 |
+ curl ${githup_token:+-u $githup_token} -si $1 > $gh_curl_content |
|
24 |
+ eval $(sed -rn '1,/^\s*$/{s/^X-Ratelimit-Remaining:\s*([0-9]+).*$/remaining=\1/ip;s/^X-Ratelimit-Reset:\s*([0-9]+).*$/reset=\1/ip}' $gh_curl_content) |
|
25 |
+ if test ${remaining:-1} -eq 0; then |
|
26 |
+ ((reset = reset + 10)) # just to be prudent |
|
27 |
+ echo "github API limit: waiting $(date +%H:%M -d@$reset) to continue" >&2 |
|
28 |
+ if [[ $1 =~ '/contents/'$ ]]; then |
|
29 |
+ echo "process already found data so far" >&2 |
|
30 |
+ apply_revision >&2 |
|
31 |
+ fi |
|
32 |
+ sleep $(( $reset - $(date +%s) )) |
|
33 |
+ curl ${githup_token:+-u $githup_token} -s $1 # not sure that was really fetched |
|
34 |
+ else |
|
35 |
+ sed -n '/^\s*$/,$p' $gh_curl_content |
|
36 |
+ fi |
|
37 |
+ } |
|
38 |
+ |
|
39 |
+ gh_curl_content=$temppath/github-$tempid |
|
40 |
+ contents=$temppath/contents-$tempid |
|
41 |
+ github_orga_repos='https://api.github.com/orgs/FGMEMBERS/repos?sort=updated&type=all&per_page=100&page=_page_' |
|
42 |
+ page=1 |
|
43 |
+ |
|
44 |
+ echo '[' > ${json_file}_recent |
|
45 |
+ |
|
46 |
+ while github_curl "${github_orga_repos/_page_/$((page++))}" > $json_file; do |
|
47 |
+ jq_length=$(json 'length') |
|
48 |
+ test $jq_length -eq 0 && break |
|
49 |
+ |
|
50 |
+ for ((i = 0; i < $jq_length; i++)); do |
|
51 |
+ if test $(date +%s -d $(json ".[$i].updated_at")) -gt $latest_revision; then |
|
52 |
+ json ".[$i]" >> ${json_file}_recent |
|
53 |
+ echo ',' >> ${json_file}_recent |
|
54 |
+ else |
|
55 |
+ break 2 |
|
56 |
+ fi |
|
57 |
+ done |
|
58 |
+ done |
|
59 |
+ |
|
60 |
+ sed -i '${/^,/d}' ${json_file}_recent |
|
61 |
+ echo ']' >> ${json_file}_recent |
|
62 |
+ |
|
63 |
+ mv -f ${json_file}_recent $json_file |
|
64 |
+ jq_length=$(json 'length') |
|
65 |
+ local progress=0 |
|
66 |
+ local repo |
|
67 |
+ |
|
68 |
+ if test $latest_revision -eq 1; then |
|
69 |
+ if github_curl; then |
|
70 |
+ max_requests_per_hour=5000 |
|
71 |
+ else |
|
72 |
+ max_requests_per_hour=60 |
|
73 |
+ fi |
|
74 |
+ echo "the initial import may take more than $(($jq_length / $max_requests_per_hour)) hours to perform" |
|
75 |
+ fi |
|
76 |
+ |
|
77 |
+ for ((i = 0; i < $jq_length; i++)); do |
|
78 |
+ local repo=$(json ".[$i].name") |
|
79 |
+ add_record name $repo |
|
80 |
+ |
|
81 |
+ github_curl "https://api.github.com/repos/FGMEMBERS/${repo}/contents/" > $contents |
|
82 |
+ for sx in $(json '.[] | select(.type == "file") | .path | capture("(?<setxml>.+)-set.xml") | .setxml' $contents); do |
|
83 |
+ add_setxml_for_aircraft $repo $sx |
|
84 |
+ done |
|
85 |
+ |
|
86 |
+ if test -n "$sx"; then |
|
87 |
+ add_record revision $(date +%s -d $(json ".[$i].updated_at")) |
|
88 |
+ add_record date $(date +%s -d $(json ".[$i].updated_at")) |
|
89 |
+ add_record author ${hangar[name]} |
|
90 |
+ add_aircraft |
|
91 |
+ else |
|
92 |
+ sqlite_request "delete from aircrafts where name = '$repo' and hangar = ${hangar[i]}" |
|
93 |
+ fi |
|
94 |
+ |
|
95 |
+ newprogress=$((i * 100 / $jq_length)) |
|
96 |
+ if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then |
|
97 |
+ progress=$newprogress |
|
98 |
+ printf "\r%d%% (%d)" $progress $(sqlite_request 'select count(name) from recover_aircrafts') |
|
99 |
+ fi |
|
100 |
+ done |
|
101 |
+ printf "\r\033[K" |
|
102 |
+ |
|
103 |
+ apply_revision |
|
104 |
+} |
... | ... |
@@ -0,0 +1,69 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+source_path=${1:-"$HOME/scripts/flightgear/source"} |
|
4 |
+ |
|
5 |
+function xmlgetnext () { |
|
6 |
+ local IFS='>' |
|
7 |
+ read -d '<' TAG VALUE |
|
8 |
+ # by design, the first TAG/VALUE pair is empty |
|
9 |
+ # to avoid infinite loops at end of file parsing we return an error |
|
10 |
+ # the next time we find an empty TAG |
|
11 |
+ if test -z "$TAG"; then |
|
12 |
+ test ${xmlgetnext_firstentry:-1} -eq 1 && xmlgetnext_firstentry=0 || return 1; |
|
13 |
+ fi |
|
14 |
+ # process $TAG only if necessary |
|
15 |
+ local _TAG=$(printf '%q' $TAG) |
|
16 |
+ if test ${_TAG:0:1} = '$'; then |
|
17 |
+ TAG=$(tr '\n' ' ' <<< $TAG | sed 's/ */ /g; s/ *$//') |
|
18 |
+ fi |
|
19 |
+} |
|
20 |
+ |
|
21 |
+exit_trap () { |
|
22 |
+ rm $apixml |
|
23 |
+} |
|
24 |
+ |
|
25 |
+function showRSS () { |
|
26 |
+ echo -e "${1^^}\n${1//[[:alnum:]]/-}" |
|
27 |
+ revparse=$(git -C "$source_path/$1" rev-parse HEAD) |
|
28 |
+ wget -qO- https://sourceforge.net/p/flightgear/$1/feed | while xmlgetnext; do |
|
29 |
+ case "${TAG:0:1}" in |
|
30 |
+ ''|'?'|'!') |
|
31 |
+ continue;; |
|
32 |
+ /) |
|
33 |
+ property=${property%/*};; |
|
34 |
+ *) |
|
35 |
+ if test "${TAG: -1}" != '/'; then |
|
36 |
+ property+=/${TAG%% *} |
|
37 |
+ fi;; |
|
38 |
+ esac |
|
39 |
+ |
|
40 |
+ case "$property" in |
|
41 |
+ '/rss/channel/item/title') |
|
42 |
+ title=$VALUE |
|
43 |
+ ;; |
|
44 |
+ '/rss/channel/item/link') |
|
45 |
+ sha1=${VALUE/https:\/\/sourceforge.net\/p\/flightgear\/${1}\/ci\/} |
|
46 |
+ ;; |
|
47 |
+ '/rss/channel/item/pubDate') |
|
48 |
+ pubDate=$(date +'%e %B' -d "$VALUE") |
|
49 |
+ ;; |
|
50 |
+ esac |
|
51 |
+ |
|
52 |
+ if test "$TAG" = "/item"; then |
|
53 |
+ if test $revparse/ = $sha1; then |
|
54 |
+ title="* ${title}" |
|
55 |
+ else |
|
56 |
+ title=" ${title}" |
|
57 |
+ fi |
|
58 |
+ echo "$title (${pubDate/# })" |
|
59 |
+ fi |
|
60 |
+ done |
|
61 |
+} |
|
62 |
+ |
|
63 |
+cat << EOF |
|
64 |
+$(showRSS simgear) |
|
65 |
+ |
|
66 |
+$(showRSS flightgear) |
|
67 |
+ |
|
68 |
+$(showRSS fgdata) |
|
69 |
+EOF |
... | ... |
@@ -0,0 +1,677 @@ |
1 |
+#!/bin/zsh |
|
2 |
+ |
|
3 |
+#FGDIR=$HOME/scripts/flightgear |
|
4 |
+#FGADDON=$HOME/.fgfs/flightgear-fgaddon |
|
5 |
+function fgfs () { |
|
6 |
+ local fgfs_source=$FGDIR/source |
|
7 |
+ local fgfs_build=$FGDIR/build |
|
8 |
+ local fgfs_install=$FGDIR/install |
|
9 |
+ function update_fg () { |
|
10 |
+ case $1 in |
|
11 |
+ fgaddon) |
|
12 |
+ test -r $HOME/.fgfs/fgaddon && DB="$FGADDON/fgaddon.db" bash $HOME/.fgfs/fgaddon |
|
13 |
+ ;; |
|
14 |
+ check) |
|
15 |
+ test -r $HOME/.fgfs/jenkins-status && bash $HOME/.fgfs/jenkins-status |
|
16 |
+ ;; |
|
17 |
+ rss) |
|
18 |
+ test -r $HOME/.fgfs/fgcoderss && bash $HOME/.fgfs/fgcoderss |
|
19 |
+ ;; |
|
20 |
+ data) |
|
21 |
+ for control_system update_command in ${(kv)control_system_data}; do |
|
22 |
+ find $FGADDON \ |
|
23 |
+ -maxdepth 3 \ |
|
24 |
+ -mindepth 1 \ |
|
25 |
+ -type d \ |
|
26 |
+ -name .${control_system} \ |
|
27 |
+ -printf "\n[ %h ]\n" \ |
|
28 |
+ -execdir ${control_system} ${update_command} \; |
|
29 |
+ done |
|
30 |
+ ;; |
|
31 |
+ source) |
|
32 |
+ for component in $(<$fgfs_source/.$2); do |
|
33 |
+ for control_system update_command in ${(kv)control_system_data}; do |
|
34 |
+ find $fgfs_source/$component \ |
|
35 |
+ -maxdepth 1 \ |
|
36 |
+ -type d \ |
|
37 |
+ -name .${control_system} \ |
|
38 |
+ -printf "\n[ %h ]\n" \ |
|
39 |
+ -execdir ${control_system} ${update_command} \; |
|
40 |
+ done |
|
41 |
+ done |
|
42 |
+ ;; |
|
43 |
+ build) |
|
44 |
+ local simultaneous=$(nproc) |
|
45 |
+ local previously_installed=() |
|
46 |
+ mkdir -p $fgfs_install |
|
47 |
+ for component in $(<$fgfs_source/.$2); do |
|
48 |
+ if test -d $fgfs_source/$component/.git; then |
|
49 |
+ local branch=$(git -C $fgfs_source/$component name-rev --name-only --no-undefined --always HEAD) |
|
50 |
+ elif test -d $fgfs_source/${component}/.svn; then |
|
51 |
+ local branch=${${(s:/:)$(svn info --show-item relative-url $fgfs_source/$component)}[2]} |
|
52 |
+ fi |
|
53 |
+ |
|
54 |
+# TODO: prendre en compte les cas sans cmake |
|
55 |
+ if test -r $fgfs_source/$component/CMakeLists.txt; then |
|
56 |
+ local cmake_options=("-DCMAKE_BUILD_TYPE=Release" "-DCMAKE_INSTALL_PREFIX=$fgfs_install/$component") |
|
57 |
+ test -e $fgfs_source/${component}.specific && source $fgfs_source/${component}.specific |
|
58 |
+ if test ${#previously_installed[@]} -gt 0; then |
|
59 |
+ cmake_options+=(${(j. .)${:--DCMAKE_PREFIX_PATH=$fgfs_install/${^previously_installed}}}) |
|
60 |
+ cmake_options+=(${(j. .)${:--DCMAKE_INCLUDE_PATH=$fgfs_install/${^previously_installed}/include}}) |
|
61 |
+ fi |
|
62 |
+ cmake_options+=("-j$simultaneous") |
|
63 |
+ |
|
64 |
+ title="*** ${component:u}${branch:+ [$branch]} ***" |
|
65 |
+ printf "\n%s\n%s\n%s\n" "${(l:${#title}::*:)}" "$title" "${(l:${#title}::*:)}" |
|
66 |
+ mkdir -p $fgfs_build/$component |
|
67 |
+ cd $fgfs_build/$component |
|
68 |
+ |
|
69 |
+ echo cmake ${cmake_options[@]//$HOME/\$HOME} ${fgfs_source/#$HOME/\$HOME}/$component |
|
70 |
+ |
|
71 |
+ cmake ${cmake_options[@]} $fgfs_source/$component > /dev/null \ |
|
72 |
+ && make -j$simultaneous > /dev/null \ |
|
73 |
+ && { |
|
74 |
+ make install | grep --color=always -v '^-- Up-to-date:' |
|
75 |
+ } \ |
|
76 |
+ || { |
|
77 |
+ echo "erreur construction $component" |
|
78 |
+ cd $FGDIR |
|
79 |
+ return |
|
80 |
+ } |
|
81 |
+ |
|
82 |
+ cd - > /dev/null |
|
83 |
+ fi |
|
84 |
+ |
|
85 |
+ previously_installed+=($component) |
|
86 |
+ done |
|
87 |
+ unset component |
|
88 |
+ ;; |
|
89 |
+ esac |
|
90 |
+ } |
|
91 |
+ function ld_library_path () { |
|
92 |
+ for component in $(<$fgfs_source/.flightgear); do |
|
93 |
+ if test -d $fgfs_install/$component/lib; then |
|
94 |
+ lib="$fgfs_install/$component/lib" |
|
95 |
+ egrep -q "(^|:)$lib(:|$)" <<< "${LD_LIBRARY_PATH}" || LD_LIBRARY_PATH="${lib}${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}" |
|
96 |
+ fi |
|
97 |
+ done |
|
98 |
+ export LD_LIBRARY_PATH |
|
99 |
+ } |
|
100 |
+ function check_fgaddon () { |
|
101 |
+ if ! test -r $FGADDON/fgaddon.db; then |
|
102 |
+ echo "aircraft DB not found at $FGADDON/fgaddon.db" |
|
103 |
+ echo "please run $0 --update-fgaddon" |
|
104 |
+ return 1 |
|
105 |
+ fi |
|
106 |
+ if ! command -v sqlite3 > /dev/null; then |
|
107 |
+ which sqlite3 |
|
108 |
+ return 1 |
|
109 |
+ fi |
|
110 |
+ } |
|
111 |
+ local aircrafts="$FGADDON/Aircraft" |
|
112 |
+ local fgfs_args=("--fg-root=$FGDIR/source/fgdata") |
|
113 |
+ local aircraft= |
|
114 |
+ local airport= |
|
115 |
+ if [[ -o BASH_REMATCH ]]; then |
|
116 |
+ local bash_rematch_set=1 |
|
117 |
+ fi |
|
118 |
+ |
|
119 |
+ for fgfs_arg in $@; do |
|
120 |
+ if test ${fgfs_arg#--} = ${fgfs_arg}; then |
|
121 |
+############ APPAREIL DANS FGADDON ? |
|
122 |
+ if test -n "$(find $aircrafts -maxdepth 2 -type f -name ${fgfs_arg}-set.xml -print -quit)"; then |
|
123 |
+ fgfs_args+=("--aircraft=$fgfs_arg") |
|
124 |
+ fgfs_args+=("--fg-aircraft=$FGADDON/Aircraft") |
|
125 |
+ |
|
126 |
+############ APPAREIL DANS FGDIR/source/fgdata ? |
|
127 |
+ elif official_aircraft=$(find "$fgfs_source/fgdata/Aircraft" -maxdepth 2 -type f -name ${fgfs_arg}-set.xml -printf "%h" -quit) && test -n "$official_aircraft"; then |
|
128 |
+ fgfs_args+=("--aircraft=$fgfs_arg") |
|
129 |
+ fgfs_args+=("--aircraft-dir=$official_aircraft") |
|
130 |
+ unset official_aircraft |
|
131 |
+ |
|
132 |
+############ APPAREIL DISPONIBLE DANS UN HANGAR CONNU ? |
|
133 |
+ elif which sqlite3 > /dev/null 2>&1 \ |
|
134 |
+ && test -r $FGADDON/fgaddon.db \ |
|
135 |
+ && test $(sqlite3 $FGADDON/fgaddon.db <<< "select count(file) from setxml where file is '${fgfs_arg}'") -gt 0; then |
|
136 |
+ local PS3='which aircraft ? ' |
|
137 |
+ local IFS=$'\n' |
|
138 |
+ select ac in $(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
139 |
+ aircrafts.name, |
|
140 |
+ hangars.name, |
|
141 |
+ date(aircrafts.date, 'unixepoch')) |
|
142 |
+ from aircrafts |
|
143 |
+ inner join setxml, hangars |
|
144 |
+ where |
|
145 |
+ aircrafts.hangar = hangars.id |
|
146 |
+ and |
|
147 |
+ setxml.variantof = aircrafts.id |
|
148 |
+ and |
|
149 |
+ setxml.file = '${fgfs_arg}' |
|
150 |
+ order by aircrafts.date desc"); do |
|
151 |
+ test -z "$ac" && continue |
|
152 |
+ read -q "REPLY?download ${${(@s/ /)ac}[1]} ? (y/N) " |
|
153 |
+ if test -n "$REPLY" && test ${REPLY:l} = "y"; then |
|
154 |
+ declare -A hangar |
|
155 |
+ eval $(sqlite3 $FGADDON/fgaddon.db <<< "select printf('hangar[type]=%s;hangar[url]=%s;', type, url) |
|
156 |
+ from hangars where name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
157 |
+ case ${hangar[type]} in |
|
158 |
+ git) |
|
159 |
+ git -C $FGADDON/Aircraft clone ${hangar[url]}/${${(@s/ /)ac}[1]}.git;; |
|
160 |
+ svn) |
|
161 |
+ svn checkout ${hangar[url]}/${${(@s/ /)ac}[1]} $FGADDON/Aircraft/${${(@s/ /)ac}[1]};; |
|
162 |
+ esac |
|
163 |
+ fgfs_args+=("--aircraft=$fgfs_arg") |
|
164 |
+ fgfs_args+=("--fg-aircraft=$FGADDON/Aircraft") |
|
165 |
+ unset -v hangar |
|
166 |
+ else |
|
167 |
+ echo "falling back to default" |
|
168 |
+ fi |
|
169 |
+ break |
|
170 |
+ done |
|
171 |
+ |
|
172 |
+############ SERVEUR MULTIPLAY ? |
|
173 |
+ elif set -o BASH_REMATCH && [[ $fgfs_arg =~ "^mp([0-9]+)$" ]]; then |
|
174 |
+ fgfs_args+=("--multiplay=out,10,mpserver${BASH_REMATCH[2]}.flightgear.org,5000") |
|
175 |
+ if test -n "$(ls -rt1 $HOME/.fgfs/autosave_*.xml | tail -1)"; then |
|
176 |
+ callsign=$(sed -rn 's/^.*<callsign.*>(.+)<.callsign>.*$/\1/p' $(ls -rt1 $HOME/.fgfs/autosave_*.xml | tail -1)) |
|
177 |
+ fi |
|
178 |
+ if test -n "$callsign"; then |
|
179 |
+ fgfs_args+=("--callsign=$callsign") |
|
180 |
+ fi |
|
181 |
+ test -z "$bash_rematch_set" && set +o BASH_REMATCH |
|
182 |
+ |
|
183 |
+############ DEMANDE DE TRACE DE VOL ? |
|
184 |
+ elif [[ $fgfs_arg == "log" ]]; then |
|
185 |
+ # option construite plus tard avec nom de l'appareil |
|
186 |
+ local log_requested=1 |
|
187 |
+ |
|
188 |
+############ AEROPORT ? |
|
189 |
+ else |
|
190 |
+ local candidate_airport=${fgfs_arg%:*}; [[ $candidate_airport == $fgfs_arg ]] && candidate_airport=${fgfs_arg%+*} |
|
191 |
+ local candidate_parking=${fgfs_arg#*:}; [[ $candidate_parking == $fgfs_arg ]] && unset candidate_parking |
|
192 |
+ local candidate_runway=${fgfs_arg#*+}; [[ $candidate_runway == $fgfs_arg ]] && unset candidate_runway |
|
193 |
+ local terrafs=${candidate_airport#-} |
|
194 |
+ |
|
195 |
+ if [[ $terrafs != $candidate_airport && -x $HOME/.fgfs/terrafs && -d $HOME/.fgfs/terrafs.d ]]; then |
|
196 |
+ candidate_airport=$terrafs |
|
197 |
+ $HOME/.fgfs/terrafs $HOME/.fgfs/terrafs.d |
|
198 |
+ fgfs_args+=(--fg-scenery=$HOME/.fgfs/terrafs.d) |
|
199 |
+ fgfs_args+=(--disable-terrasync) |
|
200 |
+ local scenes_dir=$HOME/.fgfs/terrafs.d |
|
201 |
+ else |
|
202 |
+ fgfs_args+=(--terrasync-dir=$HOME/.fgfs/TerraSync) |
|
203 |
+ fgfs_args+=(--enable-terrasync) |
|
204 |
+ local scenes_dir=$HOME/.fgfs/TerraSync |
|
205 |
+ fi |
|
206 |
+ |
|
207 |
+ local airport_data="$scenes_dir/Airports/$candidate_airport[1]:u/$candidate_airport[2]:u/$candidate_airport[3]:u/${candidate_airport:u}" |
|
208 |
+ if ! test -r "${airport_data}.threshold.xml"; then |
|
209 |
+ echo "airport ${candidate_airport:u} not found !" |
|
210 |
+ fi |
|
211 |
+ |
|
212 |
+ if test -n "$candidate_parking" && test "$candidate_parking" = '?'; then |
|
213 |
+ if test -r "${airport_data}.groundnet.xml"; then |
|
214 |
+ echo "Parkings ${candidate_airport:u}:" |
|
215 |
+ sed -rn "/<parkingList/,/parkingList>/s/^.* name=\"([^\"]+).*$/\1/p" "${airport_data}.groundnet.xml" |
|
216 |
+ else |
|
217 |
+ echo "no information for parkings available on ${candidate_airport:u}" |
|
218 |
+ fi |
|
219 |
+ return |
|
220 |
+ elif test -n "$candidate_runway" && test "$candidate_runway" = '?'; then |
|
221 |
+ if test -r "${airport_data}.threshold.xml"; then |
|
222 |
+ echo "Runways ${candidate_airport:u}:" |
|
223 |
+ sed -rn 's|^.*<rwy>(.+)</rwy>.*$|\1|p' "${airport_data}.threshold.xml" |
|
224 |
+ else |
|
225 |
+ echo "no information for runways available on ${candidate_airport:u}" |
|
226 |
+ fi |
|
227 |
+ return |
|
228 |
+ fi |
|
229 |
+ |
|
230 |
+ if test -r "${airport_data}.threshold.xml"; then |
|
231 |
+ fgfs_args+=(--airport=$candidate_airport) |
|
232 |
+ if [[ -a "${airport_data}.groundnet.xml" && -n "$candidate_parking" ]]; then |
|
233 |
+ if sed -rn "/<parkingList/,/parkingList>/p" "${airport_data}.groundnet.xml" | grep -q "name=\"${candidate_parking}\""; then |
|
234 |
+ fgfs_args+=("--parkpos='$candidate_parking'") |
|
235 |
+ else |
|
236 |
+ echo "$candidate_parking isn't a valid parking position" |
|
237 |
+ fi |
|
238 |
+ elif test -n "$candidate_runway"; then |
|
239 |
+ if grep -q "<rwy>${candidate_runway}</rwy>" "${airport_data}.threshold.xml"; then |
|
240 |
+ fgfs_args+=("--runway=$candidate_runway") |
|
241 |
+ else |
|
242 |
+ echo "$candidate_runway isn't a valid runway" |
|
243 |
+ fi |
|
244 |
+ fi |
|
245 |
+ fi |
|
246 |
+ fi |
|
247 |
+ |
|
248 |
+######## AUTRE OPTION |
|
249 |
+ else |
|
250 |
+ case $fgfs_arg in |
|
251 |
+ --update(-data|-source|-build|)) |
|
252 |
+ echo 10 met à jour les data, les sources, ou compile. Prend en option le projet > /dev/null |
|
253 |
+ if test ! -r $fgfs_source/.${2:-flightgear}; then |
|
254 |
+ echo "${2:+unknown set $2\n}usage: --update|--update-data|--update-source|--update-build <set>" |
|
255 |
+ echo "available sets :" ${$(find $fgfs_source -maxdepth 1 -type f -name ".*" -printf "%f ")//#.} |
|
256 |
+ return 1 |
|
257 |
+ fi |
|
258 |
+ for component in $(<$fgfs_source/.${2:-flightgear}); do |
|
259 |
+ if ! test -d $fgfs_source/$component; then |
|
260 |
+ echo component $component not found |
|
261 |
+ return 1 |
|
262 |
+ fi |
|
263 |
+ done |
|
264 |
+ typeset -A control_system_data=( |
|
265 |
+ git pull |
|
266 |
+ svn up |
|
267 |
+ ) |
|
268 |
+ for up in ${${=${fgfs_arg#--update}:-data source build}#-}; do |
|
269 |
+ update_fg $up ${2:-flightgear} |
|
270 |
+ done |
|
271 |
+ unset control_system_data control_system update_command up |
|
272 |
+ return |
|
273 |
+ ;; |
|
274 |
+ --update-(fgaddon|check|rss)) |
|
275 |
+ echo 11 utilise les scripts externe fgaddon, check ou rss > /dev/null |
|
276 |
+ update_fg ${fgfs_arg#--update-} |
|
277 |
+ return |
|
278 |
+ ;; |
|
279 |
+ --help) |
|
280 |
+ echo 1 affiche cette aide et quitte > /dev/null |
|
281 |
+ ( |
|
282 |
+ cat << EOH |
|
283 |
+VARIABLES IMPORTANTES: |
|
284 |
+FGDIR = ${FGDIR:-/!\\ ATTENTION FGDIR VIDE /!\\} |
|
285 |
+FGADDON = ${FGADDON:-/!\\ ATTENTION FGADDON VIDE /!\\} |
|
286 |
+ |
|
287 |
+OPTIONS SPÉCIFIQUES AU SCRIPT: |
|
288 |
+les options commençant par "--" sont: |
|
289 |
+$(declare -f $0 \ |
|
290 |
+ | sed -rn "s|^\s*\((--.+)\) echo ([0-9]+) \"?(.+)\"? > /dev/null$|\1:\2:\3|p" \ |
|
291 |
+ | sed 's/"$//' \ |
|
292 |
+ | awk -F':' -v dq='"' ' |
|
293 |
+ BEGIN { |
|
294 |
+ maxlength = 0 |
|
295 |
+ } |
|
296 |
+ { |
|
297 |
+ order[$1] = strtonum($2) |
|
298 |
+ option[$1] = $3 |
|
299 |
+ if (length($1) > maxlength) maxlength = length($1) |
|
300 |
+ } |
|
301 |
+ END { |
|
302 |
+ PROCINFO["sorted_in"] = "@val_num_asc" |
|
303 |
+ for (o in order) |
|
304 |
+ printf("%s%s%*s%s\n", order[o] % 10 == 0 ? "\n" : "", o, maxlength - length(o) + 1, " ", option[o]) |
|
305 |
+ }') |
|
306 |
+ |
|
307 |
+les options ne commençant pas par "--" peuvent être |
|
308 |
+* log: crée un log au format igc et enregistré dans le fichier au nom comprenant date, heure et nom de l'appareil, |
|
309 |
+ avec une fréquence de 1Hz |
|
310 |
+* mp[0-9]+: un serveur de multiplayer sur lequel se connecter, le callsign sera aussi configuré avec celui trouvé dans autosave, |
|
311 |
+ par exemple mp01 pour une connexion sur mpserver01.flightgear.org:5000 |
|
312 |
+* un appareil: c172p, si l'appareil n'est pas installé mais présent dans la base de données, il sera proposé de l'installer |
|
313 |
+* un aéroport: |
|
314 |
+ * sans emplacement précisé: lfbd |
|
315 |
+ * avec une piste: lfbd+09, si la piste n'est pas trouvée sortie en erreur |
|
316 |
+ il est possible de lister les pistes disponible avec lfbt+\\? (notez l'échappement \\ pour éviter une |
|
317 |
+ interprétation du caractère \`?' par le shell: |
|
318 |
+ $ fgfs ksfo+\\? |
|
319 |
+ Runways KSFO: |
|
320 |
+ 01L |
|
321 |
+ 19R |
|
322 |
+ 01R |
|
323 |
+ 19L |
|
324 |
+ 10L |
|
325 |
+ 28R |
|
326 |
+ 10R |
|
327 |
+ 28L |
|
328 |
+ * avec un emplacement de parking: lfbt:parking_1, si le parking n'est pas trouvé un message l'indique et il est ignoré |
|
329 |
+ comme avec les pistes il est possible de lister les emplacements disponibles: |
|
330 |
+ $ fgfs biis:\\? |
|
331 |
+ Parkings BIIS: |
|
332 |
+ Gate |
|
333 |
+ Ramp_Start_1 |
|
334 |
+ Ramp_Start_2 |
|
335 |
+ |
|
336 |
+Au lancement, la ligne de commande utilisée est affichée (dans l'exemple ci-dessous les options montrées mais son stipulées dans les paramètres proviennent de fgfsrc: |
|
337 |
+$ fgfs ercoupe biis |
|
338 |
+--fg-root=\$HOME/scripts/flightgear/source/fgdata --aircraft=ercoupe --fg-aircraft=\$HOME/.fgfs/flightgear-fgaddon/Aircraft --terrasync-dir=\$HOME/.fgfs/TerraSync --enable-terrasync --airport=biis --addon=\$HOME/.fgfs/flightgear-fgaddon/Addons/SpokenATC |
|
339 |
+... |
|
340 |
+ |
|
341 |
+OPTIONS SPÉCIFIQUES À FG: |
|
342 |
+$(ld_library_path; $FGDIR/install/flightgear/bin/fgfs $fgfs_arg --verbose 2>/dev/null | sed '1d; /: fgfs \[/,+1d') |
|
343 |
+EOH |
|
344 |
+ ) | pager |
|
345 |
+ return |
|
346 |
+ ;; |
|
347 |
+ --show-aircraft) |
|
348 |
+ echo 32 liste les appareils (wrapper pour la même option de FG) > /dev/null |
|
349 |
+ ld_library_path |
|
350 |
+ $FGDIR/install/flightgear/bin/fgfs --show-aircraft --fg-aircraft=$FGADDON/Aircraft 2>/dev/null | pager |
|
351 |
+ return |
|
352 |
+ ;; |
|
353 |
+ --info) |
|
354 |
+ echo 33 "donne des infos sur l'appareil en paramètre" > /dev/null |
|
355 |
+ check_fgaddon || return 1 |
|
356 |
+ local IFS=$'\n' |
|
357 |
+ function _info () { |
|
358 |
+ local _info=$(sqlite3 $FGADDON/fgaddon.db <<< "select \`$1\` |
|
359 |
+ from setxml |
|
360 |
+ inner join aircrafts, hangars |
|
361 |
+ where |
|
362 |
+ aircrafts.hangar = hangars.id |
|
363 |
+ and |
|
364 |
+ setxml.variantof = aircrafts.id |
|
365 |
+ and |
|
366 |
+ setxml.file = '$file' |
|
367 |
+ and |
|
368 |
+ hangars.name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
369 |
+ if test -n "$_info"; then |
|
370 |
+ printf "%s: %s\n" "$2" "$_info" |
|
371 |
+ fi |
|
372 |
+ } |
|
373 |
+ local ac_list=($(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
374 |
+ aircrafts.name, |
|
375 |
+ hangars.name, |
|
376 |
+ date(aircrafts.date, 'unixepoch')) |
|
377 |
+ from aircrafts |
|
378 |
+ inner join setxml, hangars |
|
379 |
+ where |
|
380 |
+ aircrafts.hangar = hangars.id |
|
381 |
+ and |
|
382 |
+ setxml.variantof = aircrafts.id |
|
383 |
+ and |
|
384 |
+ setxml.file = '$2' |
|
385 |
+ order by aircrafts.date desc")) |
|
386 |
+ if test ${#ac_list[@]} -gt 1; then |
|
387 |
+ local PS3='which aircraft ? ' |
|
388 |
+ select ac in ${ac_list[@]}; do |
|
389 |
+ test -z "$ac" && continue |
|
390 |
+ break |
|
391 |
+ done |
|
392 |
+ elif test ${#ac_list[@]} -eq 1; then |
|
393 |
+ ac=${ac_list[1]} |
|
394 |
+ else |
|
395 |
+ return |
|
396 |
+ fi |
|
397 |
+ local file=$2 |
|
398 |
+ _info /sim/description "Short description" |
|
399 |
+ _info /sim/long-description "Long description" |
|
400 |
+ _info /sim/author "Author(s)" |
|
401 |
+ _info /sim/flight-model "Flight model" |
|
402 |
+ _info /sim/type "Type" |
|
403 |
+ echo Ratings |
|
404 |
+ for r in FDM systems cockpit model; do |
|
405 |
+ _info /sim/rating/$r " $r" |
|
406 |
+ done |
|
407 |
+ return |
|
408 |
+ ;; |
|
409 |
+ --show-thumbnail) |
|
410 |
+ echo 32 "affiche la vignette de l'appareil en paramètre (cf. fgaddon)" > /dev/null |
|
411 |
+ check_fgaddon || return 1 |
|
412 |
+ local PS3='which aircraft ? ' |
|
413 |
+ local IFS=$'\n' |
|
414 |
+ local ac_list=($(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
415 |
+ aircrafts.name, |
|
416 |
+ hangars.name, |
|
417 |
+ date(aircrafts.date, 'unixepoch')) |
|
418 |
+ from aircrafts |
|
419 |
+ inner join setxml, hangars |
|
420 |
+ where |
|
421 |
+ aircrafts.hangar = hangars.id |
|
422 |
+ and |
|
423 |
+ setxml.variantof = aircrafts.id |
|
424 |
+ and ( |
|
425 |
+ setxml.file like '%$2%' |
|
426 |
+ or |
|
427 |
+ aircrafts.name like '%$2%' |
|
428 |
+ ) |
|
429 |
+ order by aircrafts.date desc")) |
|
430 |
+ if test ${#ac_list[@]} -gt 1; then |
|
431 |
+ local PS3='which aircraft ? ' |
|
432 |
+ select ac in ${ac_list[@]}; do |
|
433 |
+ test -z "$ac" && continue |
|
434 |
+ break |
|
435 |
+ done |
|
436 |
+ elif test ${#ac_list[@]} -eq 1; then |
|
437 |
+ ac=${ac_list[1]} |
|
438 |
+ else |
|
439 |
+ return |
|
440 |
+ fi |
|
441 |
+ local url=$(sqlite3 $FGADDON/fgaddon.db <<< "select printf('%s/${${(@s/ /)ac}[1]}', url) |
|
442 |
+ from hangars where name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
443 |
+ if test ${${(@s/ /)ac}[2]:1:-1} = 'FGMEMBERS'; then |
|
444 |
+ url="https://raw.githubusercontent.com/FGMEMBERS/${${(@s/ /)ac}[1]}/master" |
|
445 |
+ fi |
|
446 |
+ if wget --quiet --spider "$url/thumbnail.jpg"; then |
|
447 |
+ curl -s "$url/thumbnail.jpg" \ |
|
448 |
+ | convert - -resize '200%' -normalize -sharpen '0.0x1.0' - \ |
|
449 |
+ | display |
|
450 |
+ else |
|
451 |
+ echo "can't find or get thumbnail for ${${(@s/ /)ac}[1]} (${${(@s/ /)ac}[2]:1:-1}'s hangar)" |
|
452 |
+ fi |
|
453 |
+ return |
|
454 |
+ ;; |
|
455 |
+ --search) |
|
456 |
+ echo 30 "cherche un appareil dans la base de données par le nom" > /dev/null |
|
457 |
+ check_fgaddon || return 1 |
|
458 |
+ sqlite3 $FGADDON/fgaddon.db <<< "select printf('[%s, %s] %s (%s): %s ', |
|
459 |
+ hangars.name, |
|
460 |
+ date(aircrafts.date, 'unixepoch'), |
|
461 |
+ setxml.file, |
|
462 |
+ setxml.\`/sim/flight-model\`, |
|
463 |
+ setxml.\`/sim/description\`) |
|
464 |
+ from setxml |
|
465 |
+ inner join aircrafts, hangars |
|
466 |
+ where |
|
467 |
+ aircrafts.hangar = hangars.id |
|
468 |
+ and |
|
469 |
+ setxml.variantof = aircrafts.id |
|
470 |
+ and ( |
|
471 |
+ setxml.file like '%$2%' |
|
472 |
+ or |
|
473 |
+ aircrafts.name like '%$2%' |
|
474 |
+ ) |
|
475 |
+ order by aircrafts.date desc" |
|
476 |
+ return |
|
477 |
+ ;; |
|
478 |
+ --search-rating) |
|
479 |
+ echo 31 "cherche un appareil dans la base de données par l'évaluation" > /dev/null |
|
480 |
+ check_fgaddon || return 1 |
|
481 |
+ if test -z $(sqlite3 $FGADDON/fgaddon.db <<< "select 1 from pragma_table_info('setxml') |
|
482 |
+ where name == '/sim/rating/${2:-void}'"); then |
|
483 |
+ echo "rating ${2:-void} non trouvé, ratings disponibles:" |
|
484 |
+ sqlite3 $FGADDON/fgaddon.db <<< "select name from pragma_table_info('setxml') |
|
485 |
+ where name like '/sim/rating/%'" | sed 's,/sim/rating/, - ,' |
|
486 |
+ return |
|
487 |
+ fi |
|
488 |
+ sqlite3 $FGADDON/fgaddon.db <<< "select printf('[%s, %s] %s (%s): %s ', |
|
489 |
+ hangars.name, |
|
490 |
+ date(aircrafts.date, 'unixepoch'), |
|
491 |
+ setxml.file, |
|
492 |
+ setxml.\`/sim/flight-model\`, |
|
493 |
+ setxml.\`/sim/description\`) |
|
494 |
+ from setxml |
|
495 |
+ inner join aircrafts, hangars |
|
496 |
+ where |
|
497 |
+ aircrafts.hangar = hangars.id |
|
498 |
+ and |
|
499 |
+ setxml.variantof = aircrafts.id |
|
500 |
+ and |
|
501 |
+ setxml.\`/sim/rating/$2\` >= ${3:-4} |
|
502 |
+ order by aircrafts.date desc" |
|
503 |
+ return |
|
504 |
+ ;; |
|
505 |
+ --mp-list) |
|
506 |
+ echo 40 renvoie la liste des appareils visibles sur multiplayer depuis mpserver01 > /dev/null |
|
507 |
+ local mplist=$(mktemp --dry-run /dev/shm/XXXXXXXXX) |
|
508 |
+ declare -A installed_model fgaddon_model unknown_model |
|
509 |
+ local ac_name= |
|
510 |
+ local MPSERVER=${MPSERVER:-mpserver01.flightgear.org} |
|
511 |
+ local MPPORT=${MPPORT:-5001} |
|
512 |
+ echo data from $MPSERVER:$MPPORT |
|
513 |
+ telnet $MPSERVER $MPPORT 2>/dev/null > $mplist |
|
514 |
+ grep 'pilot(s) online' $mplist |
|
515 |
+ echo |
|
516 |
+ local IFS=$'\n' |
|
517 |
+ for model in $(awk '/@/{a[$NF]++}END{for (i in a) printf("%s (%i)\n", i, a[i])}' $mplist); do |
|
518 |
+ if test -r $FGADDON/${model% *} \ |
|
519 |
+ -o -r $FGDIR/source/fgdata/${model% *}; then |
|
520 |
+ |
|
521 |
+ ((++installed_model[${${(s:/:)model}[2]} ${model#* }])) |
|
522 |
+ |
|
523 |
+ elif test -r $FGDIR/source/fgdata/AI/${model% *}; then |
|
524 |
+ |
|
525 |
+ ((++installed_model[*${${(s:/:)model}[2]} ${model#* }])) |
|
526 |
+ |
|
527 |
+ elif test -n "$(command -v sqlite3)" -a -r $FGADDON/fgaddon.db; then |
|
528 |
+ ac_name=$(sqlite3 $FGADDON/fgaddon.db <<< 'select printf("%s/%s", aircrafts.name, setxml.file) |
|
529 |
+ from aircrafts |
|
530 |
+ inner join setxml |
|
531 |
+ where aircrafts.id = setxml.variantof |
|
532 |
+ and setxml.`/sim/model/path` = "'${model% *}'" |
|
533 |
+ limit 1') |
|
534 |
+ if test -n "$ac_name"; then |
|
535 |
+ ((++fgaddon_model[${ac_name} ${model#* }])) |
|
536 |
+ else |
|
537 |
+ ((++unknown_model[${model}])) |
|
538 |
+ fi |
|
539 |
+ |
|
540 |
+ else |
|
541 |
+ ((++unknown_model[${model}])) |
|
542 |
+ fi |
|
543 |
+ done |
|
544 |
+ if test ${#installed_model[@]} -gt 0; then |
|
545 |
+ echo "${(j:\n:)${(Ok)installed_model[@]}}" > $mplist |
|
546 |
+ echo -e "${#installed_model[@]} models installed (*AI model only):\n$(column -c$(tput cols) $mplist)\n" |
|
547 |
+ fi |
|
548 |
+ if test ${#fgaddon_model[@]} -gt 0; then |
|
549 |
+ echo "${(j:\n:)${(Ok)fgaddon_model[@]}}" > $mplist |
|
550 |
+ echo -e "${#fgaddon_model[@]} models available in FGADDON:\n$(column -c$(tput cols) $mplist)\n" |
|
551 |
+ fi |
|
552 |
+ if test ${#unknown_model[@]} -gt 0; then |
|
553 |
+ echo "${(j:\n:)${(Ok)unknown_model[@]}}" > $mplist |
|
554 |
+ echo -e "${#unknown_model[@]} unknown models:\n$(column -c$(tput cols) $mplist)" |
|
555 |
+ fi |
|
556 |
+ unset installed_model unknown_model fgaddon_model |
|
557 |
+ rm $mplist |
|
558 |
+ return |
|
559 |
+ ;; |
|
560 |
+ --install) |
|
561 |
+ echo 35 "installe l'appareil donné en paramètre" > /dev/null |
|
562 |
+ check_fgaddon || return 1 |
|
563 |
+ local PS3='which aircraft ? ' |
|
564 |
+ local IFS=$'\n' |
|
565 |
+ select ac in $(sqlite3 $FGADDON/fgaddon.db <<< "select distinct printf('%s [%s, %s]', |
|
566 |
+ aircrafts.name, |
|
567 |
+ hangars.name, |
|
568 |
+ date(aircrafts.date, 'unixepoch')) |
|
569 |
+ from aircrafts |
|
570 |
+ inner join setxml, hangars |
|
571 |
+ where |
|
572 |
+ aircrafts.hangar = hangars.id |
|
573 |
+ and |
|
574 |
+ setxml.variantof = aircrafts.id |
|
575 |
+ and ( |
|
576 |
+ setxml.file like '%$2%' |
|
577 |
+ or |
|
578 |
+ aircrafts.name like '%$2%' |
|
579 |
+ ) |
|
580 |
+ order by aircrafts.date desc"); do |
|
581 |
+ test -z "$ac" && continue |
|
582 |
+ done |
|
583 |
+ test -z "$ac" && return |
|
584 |
+ declare -A hangar |
|
585 |
+ eval $(sqlite3 $FGADDON/fgaddon.db <<< "select printf('hangar[type]=%s;hangar[url]=%s;', type, url) |
|
586 |
+ from hangars where name = '${${(@s/ /)ac}[2]:1:-1}'") |
|
587 |
+ case ${hangar[type]} in |
|
588 |
+ git) |
|
589 |
+ git -C $FGADDON/Aircraft clone ${hangar[url]}/${${(@s/ /)ac}[1]}.git;; |
|
590 |
+ svn) |
|
591 |
+ svn checkout ${hangar[url]}/${${(@s/ /)ac}[1]} $FGADDON/Aircraft/${${(@s/ /)ac}[1]};; |
|
592 |
+ esac |
|
593 |
+ unset hangar |
|
594 |
+ return |
|
595 |
+ ;; |
|
596 |
+ --mumble) |
|
597 |
+ echo 90 lance FG avec mumble > /dev/null |
|
598 |
+ if pgrep -u $USER mumble > /dev/null; then |
|
599 |
+ echo "there is already a mumble instance launched" |
|
600 |
+ else |
|
601 |
+ mumble_desktop_entry=($(if test -n "$XDG_DATA_DIRS"; then find "${(s/:/)XDG_DATA_DIRS}" -type f,l -iname "*mumble*desktop"; fi)) |
|
602 |
+ if test ${#mumble_desktop_entry[@]} -gt 0; then |
|
603 |
+ echo lauching ${mumble_desktop_entry[1]} |
|
604 |
+ nohup gtk-launch ${mumble_desktop_entry[1]##*/} > /dev/null 2>&1 & |
|
605 |
+ else |
|
606 |
+ echo "no desktop entry found for mumble (XDG_DATA_DIRS=$XDG_DATA_DIRS)" |
|
607 |
+ return 1 |
|
608 |
+ fi |
|
609 |
+ fi |
|
610 |
+ ;& # on continue avec l'addon fgcom |
|
611 |
+ --fgcom) |
|
612 |
+ echo 91 lance FG avec fgcom-mumble > /dev/null |
|
613 |
+ if test -r $fgfs_source/fgcom-mumble/client/fgfs-addon/addon-metadata.xml; then |
|
614 |
+ fgfs_args+=("--addon=$fgfs_source/fgcom-mumble/client/fgfs-addon") |
|
615 |
+ else |
|
616 |
+ echo "can't find addon fgcom-mumble" |
|
617 |
+ return 1 |
|
618 |
+ fi |
|
619 |
+ ;; |
|
620 |
+ --map|--lnm) |
|
621 |
+ echo 92 lance LittleNavMap en parallèle ainsi que la liaison avec FG > /dev/null |
|
622 |
+ if ! pgrep -u $USER -f "python3 ./fgconnect.py" > /dev/null; then |
|
623 |
+ if test -d $FGADDON/Addons/littlenavmap -a -x $fgfs_source/fgconnect/fgconnect.py; then |
|
624 |
+ ( cd $fgfs_source/fgconnect; python3 ./fgconnect.py -s )& |
|
625 |
+ else |
|
626 |
+ echo "can't find FGconnect or littlenavmap addon" |
|
627 |
+ return 1 |
|
628 |
+ fi |
|
629 |
+ else |
|
630 |
+ echo "FGconnect already lauched" |
|
631 |
+ fi |
|
632 |
+ fgfs_args+=("--addon=$FGADDON/Addons/littlenavmap") |
|
633 |
+ ;; |
|
634 |
+ --addon=*) |
|
635 |
+ echo 99 lance FG avec un addon spécifique installé > /dev/null |
|
636 |
+ addon_path="${fgfs_arg#*=}" |
|
637 |
+ if test -d "$addon_path"; then |
|
638 |
+ fgfs_args+=("--addon=$addon_path") |
|
639 |
+ elif test -d "$FGADDON/Addons/$addon_path"; then |
|
640 |
+ fgfs_args+=("--addon=$FGADDON/Addons/$addon_path") |
|
641 |
+ else |
|
642 |
+ echo "can't find requested addon in $addon_path or $FGADDON/$addon_path !" |
|
643 |
+ fi |
|
644 |
+ ;; |
|
645 |
+ *) |
|
646 |
+ completion_source="$FGDIR/install/flightgear/share/zsh/site-functions/_fgfs" |
|
647 |
+ if test -r "$completion_source"; then |
|
648 |
+ if egrep --quiet "^\s*'${fgfs_arg%=*}=?\[" "$completion_source"; then |
|
649 |
+ fgfs_args+=($fgfs_arg) |
|
650 |
+ else |
|
651 |
+ echo unknown option: ${fgfs_arg%=*} |
|
652 |
+ return 1 |
|
653 |
+ fi |
|
654 |
+ else |
|
655 |
+ fgfs_args+=($fgfs_arg) |
|
656 |
+ fi |
|
657 |
+ ;; |
|
658 |
+ esac |
|
659 |
+ fi |
|
660 |
+ done |
|
661 |
+ unset fgfs_arg |
|
662 |
+ if [[ -n "$log_requested" && -z "${fgfs_args[(r)--igc=*]}" ]]; then |
|
663 |
+ fgfs_args+=(--igc=file,out,1,$(date +%Y%m%d-%H%M-${${${fgfs_args[(r)--aircraft=*]}#--aircraft=}:-$(sed -rn 's|^.+aircraft>(.+)</aircraft.+$|\1|p' $FGDIR/source/fgdata/defaults.xml)}.igc)) |
|
664 |
+ fi |
|
665 |
+ |
|
666 |
+ ld_library_path |
|
667 |
+ |
|
668 |
+ fgfsrc=$HOME/.fgfs/fgfsrc |
|
669 |
+ echo ${fgfs_args[@]//$HOME/\$HOME} $(test -r $fgfsrc && sed -r "/^\s*(#|$)/d;s|$HOME|\$HOME|" $fgfsrc | tr '\n' ' ') |
|
670 |
+# TODO: ne lancer avec primusrun que si c'est nécesaire, d'autres solution existent ? |
|
671 |
+ primusrun $FGDIR/install/flightgear/bin/fgfs ${fgfs_args[@]} |
|
672 |
+ |
|
673 |
+ if grep -q $HOME/.fgfs/terrafs.d /proc/mounts; then |
|
674 |
+ fusermount -u $HOME/.fgfs/terrafs.d |
|
675 |
+ fi |
|
676 |
+ unset fgfs_args |
|
677 |
+} |
... | ... |
@@ -0,0 +1,71 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+source_path=${1:-"$HOME/scripts/flightgear/source"} |
|
4 |
+job=fgfs |
|
5 |
+ |
|
6 |
+function xmlgetnext () { |
|
7 |
+ local IFS='>' |
|
8 |
+ read -d '<' TAG VALUE |
|
9 |
+ # by design, the first TAG/VALUE pair is empty |
|
10 |
+ # to avoid infinite loops at end of file parsing we return an error |
|
11 |
+ # the next time we find an empty TAG |
|
12 |
+ if test -z "$TAG"; then |
|
13 |
+ test ${xmlgetnext_firstentry:-1} -eq 1 && xmlgetnext_firstentry=0 || return 1; |
|
14 |
+ fi |
|
15 |
+ # process $TAG only if necessary |
|
16 |
+ local _TAG=$(printf '%q' $TAG) |
|
17 |
+ if test ${_TAG:0:1} = '$'; then |
|
18 |
+ TAG=$(tr '\n' ' ' <<< $TAG | sed 's/ */ /g; s/ *$//') |
|
19 |
+ fi |
|
20 |
+} |
|
21 |
+ |
|
22 |
+exit_trap () { |
|
23 |
+ rm $apixml |
|
24 |
+} |
|
25 |
+ |
|
26 |
+set -e |
|
27 |
+trap exit_trap EXIT |
|
28 |
+ |
|
29 |
+apixml=$(mktemp --dry-run /dev/shm/XXXXXXXXXX) |
|
30 |
+ |
|
31 |
+wget -qO- --header 'Accept:application/xml' http://build.flightgear.org:8080/job/$job/api/xml > $apixml |
|
32 |
+ |
|
33 |
+declare -A code |
|
34 |
+ |
|
35 |
+while xmlgetnext; do |
|
36 |
+ case "${TAG:0:1}" in |
|
37 |
+ ''|'?'|'!') |
|
38 |
+ continue;; |
|
39 |
+ /) |
|
40 |
+ property=${property%/*};; |
|
41 |
+ *) |
|
42 |
+ if test "${TAG: -1}" != '/'; then |
|
43 |
+ property+=/${TAG%% *} |
|
44 |
+ fi;; |
|
45 |
+ esac |
|
46 |
+ |
|
47 |
+ case "$property" in |
|
48 |
+ /workflowJob/healthReport/score) |
|
49 |
+ score=$VALUE;; |
|
50 |
+ |
|
51 |
+ /workflowJob/lastSuccessfulBuild/url) |
|
52 |
+ wget -qO- --header 'Accept:application/xml' ${VALUE}/api/xml >> $apixml;; |
|
53 |
+ |
|
54 |
+ /workflowRun/action/lastBuiltRevision/branch/SHA1) |
|
55 |
+ sha1=$VALUE;; |
|
56 |
+ |
|
57 |
+ /workflowRun/action/remoteUrl) |
|
58 |
+ [[ ${VALUE##*/} = @(flight|sim)gear ]] && code[${VALUE##*/}]=$sha1;; |
|
59 |
+ |
|
60 |
+ /workflowRun/result) |
|
61 |
+ result=$VALUE |
|
62 |
+ for path in ${!code[@]}; do |
|
63 |
+ if test $(git -C "$source_path/$path" rev-parse HEAD) != ${code[$path]}; then |
|
64 |
+ echo "mismatch revision from jenkins info" |
|
65 |
+ exit |
|
66 |
+ fi |
|
67 |
+ done |
|
68 |
+ echo "result: $result, score $score" |
|
69 |
+ exit;; |
|
70 |
+ esac |
|
71 |
+done < $apixml |
... | ... |
@@ -0,0 +1,4 @@ |
1 |
+OpenSceneGraph |
|
2 |
+simgear |
|
3 |
+flightgear |
|
4 |
+fgdata |
... | ... |
@@ -0,0 +1 @@ |
1 |
+jsbsim |
... | ... |
@@ -0,0 +1 @@ |
1 |
+OpenVSP |
... | ... |
@@ -0,0 +1,17 @@ |
1 |
+mkdir -p $fgfs_build/OpenVSPlibs |
|
2 |
+cd $fgfs_build/OpenVSPlibs |
|
3 |
+cmake -DVSP_USE_SYSTEM_LIBXML2=true \ |
|
4 |
+ -DVSP_USE_SYSTEM_FLTK=true \ |
|
5 |
+ -DVSP_USE_SYSTEM_GLM=true \ |
|
6 |
+ -DVSP_USE_SYSTEM_GLEW=true \ |
|
7 |
+ -DVSP_USE_SYSTEM_CMINPACK=true \ |
|
8 |
+ -DVSP_USE_SYSTEM_LIBIGES=false \ |
|
9 |
+ -DVSP_USE_SYSTEM_EIGEN=false \ |
|
10 |
+ -DVSP_USE_SYSTEM_CODEELI=false \ |
|
11 |
+ -DVSP_USE_SYSTEM_CPPTEST=false \ |
|
12 |
+ -DCMAKE_BUILD_TYPE=Release \ |
|
13 |
+ $fgfs_source/OpenVSP/Libraries \ |
|
14 |
+&& make -j$simultaneous \ |
|
15 |
+&& cmake_options+=("-DVSP_LIBRARY_PATH=$fgfs_build/OpenVSPlibs") \ |
|
16 |
+&& cmake_options+=("-DCMAKE_BUILD_TYPE=Release") |
|
17 |
+cd - |
... | ... |
@@ -0,0 +1,9 @@ |
1 |
+cmake_options+=("-DENABLE_COMPOSITOR=ON") |
|
2 |
+cmake_options+=("-DFG_DATA_DIR:PATH=$fgfs_source/fgdata") |
|
3 |
+cmake_options+=("-DTRANSLATIONS_SRC_DIR:PATH=$fgfs_source/fgdata/Translations") |
|
4 |
+cmake_options+=("-DSimGear_DIR=$fgfs_install/simgear/lib/cmake/SimGear") |
|
5 |
+#cmake_options+=("CFLAGS="--std=c++14"") |
|
6 |
+cmake_options+=("-DCMAKE_CXX_COMPILER=g++-9") |
|
7 |
+cmake_options+=("-DCMAKE_CC_COMPILER=gcc-9") |
|
8 |
+ |
|
9 |
+export OSG_DIR=$fgfs_install/OpenSceneGraph |
... | ... |
@@ -0,0 +1,4 @@ |
1 |
+#cmake_options+=("CFLAGS="--std=c++14"") |
|
2 |
+cmake_options+=("-DCMAKE_CXX_COMPILER=g++-9") |
|
3 |
+cmake_options+=("-DCMAKE_CC_COMPILER=gcc-9") |
|
4 |
+ |
... | ... |
@@ -25,6 +25,26 @@ function core_alim () { |
25 | 25 |
test $volt_fmt -lt 12000 -o $volt_fmt -gt 15000 && echo "$volt" |
26 | 26 |
} |
27 | 27 |
|
28 |
+function clean_swap () { |
|
29 |
+ _turn 2 || return |
|
30 |
+ echo 1 > /proc/sys/vm/drop_caches |
|
31 |
+ if LANG=C free -m | awk ' |
|
32 |
+ /^Swap:/ { |
|
33 |
+ used = $3 |
|
34 |
+ } |
|
35 |
+ END { |
|
36 |
+ bash_exit_code = !(used > 31) |
|
37 |
+ exit bash_exit_code |
|
38 |
+ } |
|
39 |
+ '; then |
|
40 |
+ systemctl stop dphys-swapfile \ |
|
41 |
+ && systemctl stop zram \ |
|
42 |
+ && systemctl start zram \ |
|
43 |
+ && systemctl start dphys-swapfile \ |
|
44 |
+ || echo 'erreur' |
|
45 |
+ fi |
|
46 |
+} |
|
47 |
+ |
|
28 | 48 |
function mem_pressure () { |
29 | 49 |
local meminfo=($(</proc/meminfo)) |
30 | 50 |
local level=(danger severe warning) |
... | ... |
@@ -21,7 +21,7 @@ function sys_state () { |
21 | 21 |
echo "$SystemState mode ($NFailedUnits units failed):" |
22 | 22 |
echo "$(systemctl --failed --no-legend | awk ' |
23 | 23 |
{ |
24 |
- split($1, units, ".") |
|
24 |
+ split(gensub(/^[^[:alnum:]]*([^ ]+).*$/, "\\1", "1"), units, ".") |
|
25 | 25 |
failed[units[2]][units[1]]=1 |
26 | 26 |
numfailed[units[2]]++ |
27 | 27 |
} |
... | ... |
@@ -99,3 +99,8 @@ function upgrades () { |
99 | 99 |
local status=$(cut -f3 -d' ' <<< $lastline) |
100 | 100 |
test $(($lastentry + 86400)) -lt $now && echo "pas de mise à jour depuis le ${lastline%% *}" |
101 | 101 |
} |
102 |
+ |
|
103 |
+function chrony () { |
|
104 |
+ local synced=$(chronyc sources | grep -c '^\^\*') |
|
105 |
+ test $synced -ne 1 && echo "NTP pas synchro" |
|
106 |
+} |
... | ... |
@@ -70,8 +70,8 @@ function core_temp () { |
70 | 70 |
local min=60 |
71 | 71 |
local level=("warning" "severe" "danger!") |
72 | 72 |
local step=$(( $(</sys/class/thermal/thermal_zone0/temp) - (min * 1000) )) |
73 |
- if test $step -gt 1; then |
|
74 |
-# if test $step -gt 5000; then # en été il fait plus chaud, c'est normal que ça monte à 60° régulièrement |
|
73 |
+# if test $step -gt 1; then |
|
74 |
+ if test $step -gt 5000; then # en été il fait plus chaud, c'est normal que ça monte à 60° régulièrement |
|
75 | 75 |
step=$((step / 10000)) |
76 | 76 |
test $step -le 2 || step=2 |
77 | 77 |
echo "${level[step]}: température > $((min + step * 10))°" |
... | ... |
@@ -87,15 +87,14 @@ function core_alim () { |
87 | 87 |
|
88 | 88 |
function clean_swap () { |
89 | 89 |
_turn 2 || return |
90 |
- if free -m | awk ' |
|
91 |
- /^Mem:/ { |
|
92 |
- free = $4 |
|
93 |
- } |
|
90 |
+ echo 1 > /proc/sys/vm/drop_caches |
|
91 |
+ if LANG=C free -m | awk ' |
|
94 | 92 |
/^Swap:/ { |
95 | 93 |
used = $3 |
96 | 94 |
} |
97 | 95 |
END { |
98 |
- exit !(used > 31 && free - used > 0) |
|
96 |
+ bash_exit_code = !(used > 62) |
|
97 |
+ exit bash_exit_code |
|
99 | 98 |
} |
100 | 99 |
'; then |
101 | 100 |
systemctl stop dphys-swapfile \ |
... | ... |
@@ -0,0 +1,34 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+# horrible pansement pour permettre à bumblebee de fonctionner |
|
4 |
+# ref: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=963980 |
|
5 |
+ |
|
6 |
+# PATH=/usr/local/bin:/usr/bin:/bin:/usr/games:/usr/sbin:/usr/sbin |
|
7 |
+# $0 : /usr/local/bin/primusrun |
|
8 |
+ |
|
9 |
+if dpkg -l primus > /dev/null 2>&1; then |
|
10 |
+ eval "$(egrep '^\s*(export +)?[A-Za-z_]+=' /usr/bin/primusrun)" |
|
11 |
+# autres variables (cf. /usr/bin/primusrun): |
|
12 |
+# export PRIMUS_SYNC=${PRIMUS_SYNC:-0} |
|
13 |
+# export PRIMUS_VERBOSE=${PRIMUS_VERBOSE:-1} |
|
14 |
+# export PRIMUS_UPLOAD=${PRIMUS_UPLOAD:-0} |
|
15 |
+# export PRIMUS_SLEEP=${PRIMUS_SLEEP:-90} |
|
16 |
+# export PRIMUS_DISPLAY=${PRIMUS_DISPLAY:-:8} |
|
17 |
+# export PRIMUS_libGLa=${PRIMUS_libGLa:-'/usr/$LIB/nvidia/libGL.so.1'} |
|
18 |
+# export PRIMUS_libGLd=${PRIMUS_libGLd:-'/usr/$LIB/libGL.so.1'} |
|
19 |
+ ( |
|
20 |
+ path=/proc/driver/nvidia |
|
21 |
+ if ! test -e $path; then |
|
22 |
+ IFS=':' read -a radio <<< $(LANG=C nmcli -t radio) |
|
23 |
+ nmcli radio wifi off |
|
24 |
+ while ! test -e $path; do |
|
25 |
+ continue |
|
26 |
+ done |
|
27 |
+ test ${radio[1]} = 'enabled' && nmcli radio wifi on |
|
28 |
+ fi |
|
29 |
+ )& |
|
30 |
+ exec "$@" |
|
31 |
+else |
|
32 |
+ echo "primus isn't installed" |
|
33 |
+ exit 1 |
|
34 |
+fi |
... | ... |
@@ -13,7 +13,7 @@ for i in $(seq 4); do |
13 | 13 |
snmpget -v 1 -c public $printer \ |
14 | 14 |
${prtMarkerColorantValue}.$i \ |
15 | 15 |
${prtMarkerSuppliesLevel}.$i \ |
16 |
- ${prtMarkerSuppliesMaxCapacity}.$i | awk ' |
|
16 |
+ ${prtMarkerSuppliesMaxCapacity}.$i | awk -v dq='"' ' |
|
17 | 17 |
BEGIN{ |
18 | 18 |
i=0 |
19 | 19 |
} |
... | ... |
@@ -22,7 +22,7 @@ for i in $(seq 4); do |
22 | 22 |
i++ |
23 | 23 |
} |
24 | 24 |
END{ |
25 |
- printf("%s: %d%%\n",a[0],a[1]/a[2]*100) |
|
25 |
+ printf("%s: %d%%\n",gensub(dq, "", "g", a[0]),a[1]/a[2]*100) |
|
26 | 26 |
}' |
27 | 27 |
done |
28 | 28 |
echo |
... | ... |
@@ -0,0 +1,207 @@ |
1 |
+#!/bin/bash |
|
2 |
+ |
|
3 |
+# script pour renommer des personnages ou lieux dans un document Manuskript |
|
4 |
+# ./renomme <document manuskript> <ancien nom> <nouveau nom> [ins=poids] [del=poids] [rep=poids] |
|
5 |
+# |
|
6 |
+# Le document d'origine est sauvegardé (voir variable $backup) |
|
7 |
+# |
|
8 |
+# Si <nouveau nom> est "check", ou "prox" ou "leven", une étude de proximité est alors effectuée |
|
9 |
+# sur l'algorithme Levenshtein. Les paramètres de poids sont des nombres entiers, et permettent |
|
10 |
+# de pondérer l'ajout (ins=), la suppression (del=) et le remplacement (rep=) de caractère. |
|
11 |
+# par défaut chacun des trois paramètres est égal à 1. |
|
12 |
+# Il n'y a pas d'ordre obligatoire pour le paramétrage, et si un paramétrage est effectué plusieurs fois |
|
13 |
+# c'est le plus à gauche qui prend la priorité. Il n'y a pas de backup effectué pour l'opération de vérification de la proximité |
|
14 |
+# crédit algo: https://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance |
|
15 |
+ |
|
16 |
+set -e |
|
17 |
+ |
|
18 |
+manuscrit=${1:?} |
|
19 |
+ancien=${2:?} |
|
20 |
+nouveau=${3:?} |
|
21 |
+ |
|
22 |
+if test "${manuscrit:0:1}" != '/'; then |
|
23 |
+ manuscrit="$PWD/$manuscrit" |
|
24 |
+fi |
|
25 |
+test -r "$manuscrit" || exit 1 |
|
26 |
+test $(file --brief --mime-type --dereference "$manuscrit") == 'application/zip' || exit 2 |
|
27 |
+ |
|
28 |
+backup="${manuscrit/%.msk} (avant renommage de «${ancien}» en «${nouveau}»).msk" |
|
29 |
+ |
|
30 |
+function trap_exit () { |
|
31 |
+ rm -fr $temp |
|
32 |
+ cd - > /dev/null |
|
33 |
+} |
|
34 |
+ |
|
35 |
+function determinant () { |
|
36 |
+ eval "local nom=\"\$$1\"" |
|
37 |
+ if [[ ${nom:0:1} == @(A|E|H|I|O|U) \ |
|
38 |
+ || ${nom:0:1} == @(Â|Ê|H|Î|Ô|Û) \ |
|
39 |
+ || ${nom:0:1} == @(Ä|Ë|H|Ï|Ö|Ü) \ |
|
40 |
+ || ${nom:0:1} == @(À|È|H|Ì|Ò|Ù) \ |
|
41 |
+ || ${nom:0:1} == @(Á|É|H|Í|Ó|Ú) \ |
|
42 |
+ ]]; then |
|
43 |
+ eval "determinant_$1=\"d'\"" |
|
44 |
+ eval "determinant_$1_formatted=\"d-\"" |
|
45 |
+ else |
|
46 |
+ eval "determinant_$1=\"de \"" |
|
47 |
+ eval "determinant_$1_formatted=\"de-\"" |
|
48 |
+ fi |
|
49 |
+} |
|
50 |
+ |
|
51 |
+function format () { |
|
52 |
+ eval "$1_formatted=\$(tr --complement --squeeze-repeats 'A-Za-z-_\n' - <<< \"\${$1// /_}\")" |
|
53 |
+} |
|
54 |
+ |
|
55 |
+function renomme_fichiers () { |
|
56 |
+ for char in $(find characters -type f -regex "characters/[0-9]+-$1.txt"); do |
|
57 |
+ local new_char=$(sed "s/$1/$2/" <<< $char) |
|
58 |
+ echo "character: $char -> $new_char" |
|
59 |
+ mv $char $new_char |
|
60 |
+ break |
|
61 |
+ done |
|
62 |
+ |
|
63 |
+ for chapter in $(find outline -type d -regex "outline/.*$1.*"); do |
|
64 |
+ local new_chapter=$(sed "s/$1/$2/g" <<< $chapter) |
|
65 |
+ echo "chapter: $chapter -> $new_chapter" |
|
66 |
+ mv $chapter $new_chapter |
|
67 |
+ done |
|
68 |
+ |
|
69 |
+ for part in $(find outline -type f -regex "outline/[^/]*$1[^/]*.md"); do |
|
70 |
+ local new_part=$(sed "s/$1/$2/g" <<< $part) |
|
71 |
+ echo "part: $part -> $new_part" |
|
72 |
+ mv $part $new_part |
|
73 |
+ done |
|
74 |
+} |
|
75 |
+ |
|
76 |
+trap trap_exit EXIT |
|
77 |
+ |
|
78 |
+temp=$(mktemp --directory /dev/shm/XXXXXXXXX) |
|
79 |
+cd $temp |
|
80 |
+ |
|
81 |
+if [[ $nouveau = @(check|prox|leven) ]]; then |
|
82 |
+ unzip -qq "$manuscrit" |
|
83 |
+ for param in $(seq 4 $#); do |
|
84 |
+ for dst in ins del rep max; do |
|
85 |
+ eval "if test -n '\$$param' && [[ \"\$$param\" =~ $dst=[0-9]+ ]]; then cost_$dst=\${$param#*=}; fi" |
|
86 |
+ done |
|
87 |
+ done |
|
88 |
+ for dst in ins del rep; do |
|
89 |
+ eval "cost_$dst=\${cost_$dst:-1}" |
|
90 |
+ done |
|
91 |
+ cost_max=${cost_max:-$(($cost_ins + $cost_del + $cost_rep))} |
|
92 |
+ if test $cost_max -ge ${#ancien}; then |
|
93 |
+ cost_max=$(( ${#ancien} - 1 )) |
|
94 |
+ fi |
|
95 |
+ |
|
96 |
+ echo paramètres d\'approximation |
|
97 |
+ echo "caractère manquant (del=): $cost_del" |
|
98 |
+ echo "caractère inséré (ins=): $cost_ins" |
|
99 |
+ echo "caractère remplacé (rep=): $cost_rep" |
|
100 |
+ echo "distance maximale (max=): $cost_max" |
|
101 |
+ for f in $(find . -type f); do |
|
102 |
+ let wc+=$(wc -w < $f) |
|
103 |
+ done |
|
104 |
+ awk -v ancien=$ancien -v wc=$wc -v cost_ins=$cost_ins -v cost_del=$cost_del -v cost_rep=$cost_rep -v cost_max=$cost_max ' |
|
105 |
+ BEGIN { |
|
106 |
+ RS="[[:punct:]]" |
|
107 |
+ progress_mod = 10 |
|
108 |
+ actual_progress = 0 |
|
109 |
+ pct_progress = 0 |
|
110 |
+ progress = 0 |
|
111 |
+ found_words = 0 |
|
112 |
+ |
|
113 |
+ cost_tot = cost_ins + cost_del + cost_rep |
|
114 |
+ cost_tot = cost_tot > cost_max ? cost_max : cost_tot |
|
115 |
+ |
|
116 |
+ str1_len = length(ancien) |
|
117 |
+ for (i=1; i<=str1_len; i++) |
|
118 |
+ str1_substr[i]=substr(ancien, i, 1) |
|
119 |
+ } |
|
120 |
+ function levenshtein(str2) { |
|
121 |
+ str2_len = length(str2) |
|
122 |
+ if(str2_len == 0) return str1_len * cost_del |
|
123 |
+ for(j = 1; j <= str2_len; j++) |
|
124 |
+ str2_substr[j]=substr(str2, j, 1) |
|
125 |
+ matrix[0, 0] = 0 |
|
126 |
+ for(i = 1; i <= str1_len; i++) { |
|
127 |
+ matrix[i, 0] = i * cost_del |
|
128 |
+ for(j = 1; j <= str2_len; j++) { |
|
129 |
+ matrix[0, j] = j * cost_ins |
|
130 |
+ x = matrix[i - 1, j] + cost_del |
|
131 |
+ y = matrix[i, j - 1] + cost_ins |
|
132 |
+ z = matrix[i - 1, j - 1] + (str1_substr[i] == str2_substr[j] ? 0 : cost_rep) |
|
133 |
+ x = x < y ? x : y |
|
134 |
+ matrix[i, j] = x < z ? x : z |
|
135 |
+ } |
|
136 |
+ } |
|
137 |
+ return matrix[str1_len, str2_len] |
|
138 |
+ } |
|
139 |
+ { |
|
140 |
+ for (word=1; word<=NF; word++) { |
|
141 |
+ progress++ |
|
142 |
+ lvstn = levenshtein(gensub("[[:punct:]]","","g",$word)) |
|
143 |
+ if (lvstn <= cost_tot && lvstn > 0) { |
|
144 |
+ key = sprintf("%s (%d)", $word, lvstn) |
|
145 |
+ approx_possibles[key]++ |
|
146 |
+ found_words++ |
|
147 |
+ } |
|
148 |
+ pct_progress=int(progress / wc * 100) |
|
149 |
+ if (actual_progress < pct_progress && pct_progress % progress_mod == 0) { |
|
150 |
+ actual_progress = pct_progress |
|
151 |
+ printf("%i%\n", actual_progress) |
|
152 |
+ } |
|
153 |
+ } |
|
154 |
+ } |
|
155 |
+ END { |
|
156 |
+ if (found_words > 0) { |
|
157 |
+ pluriel = found_words > 1 ? "s" : "" |
|
158 |
+ printf("mot%s proche%s de «%s» (distance) [occurences]\n", pluriel, pluriel, ancien) |
|
159 |
+ for (i in approx_possibles) |
|
160 |
+ printf("- %s [%i]\n", i, approx_possibles[i]) |
|
161 |
+ } |
|
162 |
+ else { |
|
163 |
+ print "aucun mot proche et différent de «" ancien "» trouvé" |
|
164 |
+ } |
|
165 |
+ } |
|
166 |
+ ' $(find . -type f) |
|
167 |
+ exit |
|
168 |
+fi |
|
169 |
+ |
|
170 |
+mv --backup=numbered "$manuscrit" "$backup" |
|
171 |
+ |
|
172 |
+unzip -qq "$backup" |
|
173 |
+ |
|
174 |
+for version in ancien nouveau; do |
|
175 |
+ format $version |
|
176 |
+ determinant $version |
|
177 |
+done |
|
178 |
+ |
|
179 |
+declare -A remplacement |
|
180 |
+remplacement=( |
|
181 |
+ [$ancien]="$nouveau" |
|
182 |
+ [$ancien_formatted]="$nouveau_formatted" |
|
183 |
+ [$determinant_ancien$ancien]="$determinant_nouveau$nouveau" |
|
184 |
+ [$determinant_ancien_formatted$ancien_formatted]="$determinant_nouveau_formatted$nouveau_formatted" |
|
185 |
+) |
|
186 |
+ |
|
187 |
+renomme_fichiers "$ancien_formatted" "$nouveau_formatted" |
|
188 |
+renomme_fichiers "$determinant_ancien_formatted$ancien_formatted" "$determinant_nouveau_formatted$nouveau_formatted" |
|
189 |
+ |
|
190 |
+egrep --word-regexp --only-matching --recursive --regexp="($determinant_ancien|$determinant_ancien_formatted)\?($ancien|$ancien_formatted)" . \ |
|
191 |
+| awk -v name="$1" -F ':' ' |
|
192 |
+ { |
|
193 |
+ if ($NF > 0) { |
|
194 |
+ file[$1]++ |
|
195 |
+ nb++ |
|
196 |
+ } |
|
197 |
+ } |
|
198 |
+ END { |
|
199 |
+ printf("remplacement de %i occurences pour %s dans %i fichiers\n", nb, name, asort(file)) |
|
200 |
+ }' |
|
201 |
+ |
|
202 |
+for regexp in "${!remplacement[@]}"; do |
|
203 |
+ egrep --word-regexp --files-with-matches --recursive --regexp="$regexp" . \ |
|
204 |
+ | xargs --no-run-if-empty sed --regexp-extended --in-place "s/(\W|^)$regexp(\W|$)/\1${remplacement[$regexp]}\2/g" |
|
205 |
+done |
|
206 |
+ |
|
207 |
+zip --recurse-paths --no-dir-entries -qq "${manuscrit}" * |
... | ... |
@@ -1,12 +1,19 @@ |
1 | 1 |
#!/bin/zsh |
2 | 2 |
# bash compatible |
3 | 3 |
|
4 |
-for c in nvidia-current vboxdrv; do |
|
4 |
+declare -A mod |
|
5 |
+mod[nv]=nvidia-current |
|
6 |
+mod[vbox]=vboxdrv |
|
7 |
+mod[default]="${mod[nv]} ${mod[vbox]}" |
|
8 |
+test -n "$1" && test -z ${mod[$1]} && mod[$1]=${mod[default]} |
|
9 |
+ |
|
10 |
+for c in ${mod[${1:-default}]}; do |
|
5 | 11 |
for modfile in $(dirname $(modinfo -n $c))/*.ko; do |
6 | 12 |
sudo /usr/src/linux-headers-$(uname -r)/scripts/sign-file \ |
7 | 13 |
sha256 \ |
8 | 14 |
/root/module-signing/MOK.priv \ |
9 | 15 |
/root/module-signing/MOK.der \ |
10 | 16 |
"$modfile" |
17 |
+ echo ${modfile##*/} |
|
11 | 18 |
done |
12 | 19 |
done |
... | ... |
@@ -2,20 +2,36 @@ |
2 | 2 |
|
3 | 3 |
thumbnailsconf=${1:-${0##*/}.conf} |
4 | 4 |
test -r "$thumbnailsconf" || exit 1 |
5 |
+shift |
|
5 | 6 |
|
6 |
-function set_imgsize () { |
|
7 |
- imgsize[${#imgsize[@]}]=$1 |
|
8 |
- imgsize[${#imgsize[@]}]=$2 |
|
9 |
- imgsize[${#imgsize[@]}]=$3 |
|
10 |
-} |
|
7 |
+declare -A imgsize width height |
|
11 | 8 |
|
12 |
-#set_imgsize 'custom' 1656 1242 |
|
13 |
-set_imgsize 'xxlarge' 1224 918 |
|
14 |
-set_imgsize 'xlarge' 1008 756 |
|
15 |
-set_imgsize 'large' 792 594 |
|
16 |
-set_imgsize 'medium' 576 432 |
|
17 |
-set_imgsize 'small' 432 324 |
|
18 |
-set_imgsize 'xsmall' 240 240 |
|
9 |
+imgsize[xxlarge]="1656 1242" |
|
10 |
+imgsize[xlarge]=" 1224 918" |
|
11 |
+imgsize[large]=" 1008 756" |
|
12 |
+imgsize[medium]=" 792 594" |
|
13 |
+imgsize[small]=" 576 432" |
|
14 |
+imgsize[xsmall]=" 432 324" |
|
15 |
+imgsize[2small]=" 240 180" |
|
16 |
+ |
|
17 |
+if test $# -gt 0; then |
|
18 |
+ filter=$(echo $@ | sed 's/ /|/g') |
|
19 |
+ for size in ${!imgsize[@]}; do |
|
20 |
+ if ! [[ $size =~ ^(${filter})$ ]]; then |
|
21 |
+ unset imgsize[$size] |
|
22 |
+ else |
|
23 |
+ geometry=(${imgsize[$size]}) |
|
24 |
+ width[$size]=${geometry[0]} |
|
25 |
+ height[$size]=${geometry[1]} |
|
26 |
+ fi |
|
27 |
+ done |
|
28 |
+fi |
|
29 |
+ |
|
30 |
+for size in ${!imgsize[@]}; do |
|
31 |
+ geometry=(${imgsize[$size]}) |
|
32 |
+ width[$size]=${geometry[0]} |
|
33 |
+ height[$size]=${geometry[1]} |
|
34 |
+done |
|
19 | 35 |
|
20 | 36 |
if which readconf > /dev/null; then |
21 | 37 |
eval $(readconf --conf "$thumbnailsconf" --section=mysql --section=path --case-sensitive) |
... | ... |
@@ -31,12 +47,12 @@ if ! which convert > /dev/null; then |
31 | 47 |
exit 1 |
32 | 48 |
fi |
33 | 49 |
|
34 |
-sqlcmd="select path from ${dbprefix}images \ |
|
35 |
- where id in ( \ |
|
36 |
- select distinct image_id from ${dbprefix}image_category \ |
|
37 |
- where category_id in ( \ |
|
38 |
- select id from ${dbprefix}categories where dir is NULL \ |
|
39 |
- ) \ |
|
50 |
+sqlcmd="select path from ${dbprefix}images |
|
51 |
+ where id in ( |
|
52 |
+ select distinct image_id from ${dbprefix}image_category |
|
53 |
+ where category_id in ( |
|
54 |
+ select id from ${dbprefix}categories where dir is ${IMPORT:+not} NULL |
|
55 |
+ ) |
|
40 | 56 |
)" |
41 | 57 |
|
42 | 58 |
IFS=$'\n' |
... | ... |
@@ -48,21 +64,24 @@ for file in $(mysql -N -u $dbuser -p${dbpasswd} $dbname <<< $sqlcmd); do |
48 | 64 |
test $fnExt = 'mp4' && continue |
49 | 65 |
cmd="sudo -u www-data convert \"$site/$file\" -write mpr:image +delete " |
50 | 66 |
count=0 |
51 |
- for ((i=0; i<${#imgsize[@]}; i+=3)); do |
|
52 |
- if ! test -e "${destDir}/${fnNoExt}-${imgsize[i]:0:2}.$fnExt"; then |
|
53 |
- format+="${imgsize[i]} " |
|
67 |
+ for size in ${!imgsize[@]}; do |
|
68 |
+ if ! test -e "${destDir}/${fnNoExt}-${size:0:2}.$fnExt" -a -z "$FORCE"; then |
|
69 |
+ format+="$size " |
|
54 | 70 |
cmd+="mpr:image -filter Lanczos -auto-orient -strip -quality 95 -interlace line -sampling-factor 4:2:2 " |
55 |
- cmd+="-resize ${imgsize[i+1]}x${imgsize[i+2]} -write \"${destDir}/${fnNoExt}-${imgsize[i]:0:2}.$fnExt\" +delete " |
|
71 |
+ cmd+="-resize ${width[$size]}x${height[$size]} -write \"${destDir}/${fnNoExt}-${size:0:2}.$fnExt\" +delete " |
|
56 | 72 |
let count++ |
57 | 73 |
fi |
58 | 74 |
done |
59 | 75 |
if test -n "$format"; then |
60 | 76 |
echo "$site/$file : $format" |
77 |
+ if test -n "$LIST"; then |
|
78 |
+ continue |
|
79 |
+ fi |
|
61 | 80 |
echo -n "please wait... " |
62 | 81 |
cmd=$(sed -r 's/^(.+) -write (.*) \+delete $/\1 \2/' <<< $cmd) |
63 | 82 |
eval $(echo -e nice -20 $cmd) |
64 |
- for ((i=0; i<${#imgsize[@]}; i+=3)); do |
|
65 |
- test -s "${destDir}/${fnNoExt}-${imgsize[i]:0:2}.$fnExt" || echo "missing extension ${imgsize[i]:0:2}.$fnExt" |
|
83 |
+ for size in ${!imgsize[@]}; do |
|
84 |
+ test -s "${destDir}/${fnNoExt}-${size:0:2}.$fnExt" || echo "missing extension ${size:0:2}.$fnExt" |
|
66 | 85 |
done |
67 | 86 |
echo "done, sleeping..." |
68 | 87 |
sleep $(( 2 + $count )) |