1 contributor
#!/bin/bash
hangar[path]=$HOME/.fgfs/flightgear-fgaddon/Aircraft
hangar[name]=FGMEMBERS
hangar[source]=github
hangar[type]=git
hangar[url]=https://github.com/FGMEMBERS
hangar[active]=1
function getfromrepo () {
curl -s https://raw.githubusercontent.com/FGMEMBERS/${1%%/*}/master/${1#*/}
}
function on_exit () {
rm -f $gh_curl_content $json_file $contents
unset contents gh_curl_content
}
function parse_repo_history () {
function github_curl () {
test $# -eq 0 && return $(test -n "$githup_token")
curl ${githup_token:+-u $githup_token} -si $1 > $gh_curl_content
eval $(sed -rn '1,/^\s*$/{s/^X-Ratelimit-Remaining:\s*([0-9]+).*$/remaining=\1/ip;s/^X-Ratelimit-Reset:\s*([0-9]+).*$/reset=\1/ip}' $gh_curl_content)
if test ${remaining:-1} -eq 0; then
((reset = reset + 10)) # just to be prudent
echo "github API limit: waiting $(date +%H:%M -d@$reset) to continue" >&2
if [[ $1 =~ '/contents/'$ ]]; then
echo "process already found data so far" >&2
apply_revision >&2
fi
sleep $(( $reset - $(date +%s) ))
curl ${githup_token:+-u $githup_token} -s $1 # not sure that was really fetched
else
sed -n '/^\s*$/,$p' $gh_curl_content
fi
}
gh_curl_content=$temppath/github-$tempid
contents=$temppath/contents-$tempid
github_orga_repos='https://api.github.com/orgs/FGMEMBERS/repos?sort=updated&type=all&per_page=100&page=_page_'
page=1
echo '[' > ${json_file}_recent
while github_curl "${github_orga_repos/_page_/$((page++))}" > $json_file; do
jq_length=$(json 'length')
test $jq_length -eq 0 && break
for ((i = 0; i < $jq_length; i++)); do
if test $(date +%s -d $(json ".[$i].updated_at")) -gt $latest_revision; then
json ".[$i]" >> ${json_file}_recent
echo ',' >> ${json_file}_recent
else
break 2
fi
done
done
sed -i '${/^,/d}' ${json_file}_recent
echo ']' >> ${json_file}_recent
mv -f ${json_file}_recent $json_file
jq_length=$(json 'length')
local progress=0
local repo
if test $latest_revision -eq 1; then
if github_curl; then
max_requests_per_hour=5000
else
max_requests_per_hour=60
fi
echo "the initial import may take more than $(($jq_length / $max_requests_per_hour)) hours to perform"
fi
for ((i = 0; i < $jq_length; i++)); do
local repo=$(json ".[$i].name")
add_record name $repo
github_curl "https://api.github.com/repos/FGMEMBERS/${repo}/contents/" > $contents
for sx in $(json '.[] | select(.type == "file") | .path | capture("(?<setxml>.+)-set.xml") | .setxml' $contents); do
add_setxml_for_aircraft $repo $sx
done
if test -n "$sx"; then
add_record revision $(date +%s -d $(json ".[$i].updated_at"))
add_record date $(date +%s -d $(json ".[$i].updated_at"))
add_record author ${hangar[name]}
add_aircraft
else
sqlite_request "delete from aircrafts where name = '$repo' and hangar = ${hangar[i]}"
fi
newprogress=$((i * 100 / $jq_length))
if test $(( $newprogress - $progress )) -ge ${progress_granularity:-1}; then
progress=$newprogress
printf "\r%d%% (%d)" $progress $(sqlite_request 'select count(name) from recover_aircrafts')
fi
done
printf "\r\033[K"
apply_revision
}