2015-09-27 11:00:08 +02:00
#!/bin/bash
2015-09-29 11:30:18 +02:00
2015-09-27 11:44:47 +02:00
dir="$PWD"
2015-11-04 21:57:13 +01:00
2015-09-27 11:44:47 +02:00
git config --global user.name "${GIT_NAME}"
git config --global user.email "${GIT_EMAIL}"
git config --global push.default simple
2015-09-27 11:00:08 +02:00
tmp="$dir/tmp"
mkdir $tmp
cd $tmp
2015-11-04 17:46:48 +01:00
wget http://profile.gigaset.net/logs.tgz
2015-11-04 21:54:25 +01:00
[ "$(md5sum $tmp/logs.tgz)" != "$(cat $dir/.md5sum)" ] && {
2015-11-04 21:04:56 +01:00
tar -xzf logs.tgz
2015-09-27 11:00:08 +02:00
cd log
2015-11-04 16:03:56 +01:00
2015-09-27 11:00:08 +02:00
for f in ./*;do {
echo "Working... for $f"
2015-10-08 19:32:22 +02:00
sed -i '/200\|206\|302\|301/!d;s/\sHTTP.*//g;s/.*\s/http:\/\/profile\.gigaset\.net/g;/http:\/\/profile\.gigaset\.net\"\"/d;s/?.*//g;s/\.net\/device/\.net\/chagall/g;s/^\.$//g' $f
2015-09-27 11:00:08 +02:00
echo "Remove duplicates for $f"
awk '!seen[$0]++' $f>>$tmp/tmp
};done
echo "Remove all duplicates"
awk '!seen[$0]++' $tmp/tmp $dir/urllist >$tmp/final
2015-09-30 13:55:18 +02:00
2015-11-04 17:36:21 +01:00
echo wh
2015-11-04 16:03:56 +01:00
for f in $(grep "\.bin" $tmp/final);do {
2015-11-04 17:36:21 +01:00
echo -n "e" >&2
2015-11-04 15:30:19 +01:00
file=$(wget -qO- $f | strings)
echo "$file" | sed '/http:\/\//!d;/profile.gigaset.net\|update.gigaset.net/!d;s/.*http:\/\//http:\/\//g;s/update\.gigaset/profile\.gigaset/g'
2015-09-27 11:00:08 +02:00
2015-11-04 15:30:19 +01:00
baseurl="$(echo "$f" | sed 's/[^/]*$//')"
url=$(echo "$file" | sed '/http/d;/\.bin/!d')
for e in $url;do echo "$baseurl$e";done
2015-11-04 16:03:56 +01:00
}; done >> $tmp/final
awk '!seen[$0]++' $tmp/final > $tmp/finall
mv $tmp/finall $tmp/final
2015-11-04 17:36:21 +01:00
echo "Turn down for wh"
2015-11-04 16:03:56 +01:00
for dl in $(cat $tmp/final); do {
2015-11-04 17:36:21 +01:00
echo -n "a" >&2
2015-11-04 22:11:24 +01:00
wget -q "$dl" || continue
2015-11-04 15:30:19 +01:00
curl -w "%{url_effective}\n" -L -s -I -S "$dl" -o /dev/null
2015-11-04 16:03:56 +01:00
};done | grep -v "HTTP" | awk '!seen[$0]++' >$tmp/finall
2015-11-04 21:37:48 +01:00
mv $tmp/finall $dir/urllist
}
2015-11-04 16:03:56 +01:00
mkdir $tmp/test
cd $tmp/test
2015-11-04 21:37:48 +01:00
for f in $(sed 's/http:\/\/profile.gigaset.net\///g;s/^\/*//g' $dir/urllist);do mkdir -p $f; echo -n "a";done
2015-11-04 17:36:21 +01:00
echo t
2015-11-04 15:30:19 +01:00
2015-11-04 16:03:56 +01:00
tree -T "profile.gigaset.net" -C -H http://profile.gigaset.net -o $tmp/tree.html
2015-11-04 15:30:19 +01:00
2015-11-04 21:37:48 +01:00
2015-09-27 11:19:33 +02:00
git clone https://${GH_TOKEN}@github.com/danog/gigaclone.git $tmp/git
2015-09-27 11:00:08 +02:00
cd $tmp/git
2015-09-29 11:10:18 +02:00
2015-11-04 21:37:48 +01:00
cp $dir/urllist .
2015-11-04 17:48:34 +01:00
md5sum $tmp/logs.tgz >.md5sum
2015-11-04 16:03:56 +01:00
git add -A
2015-09-27 11:00:08 +02:00
git commit -m "Updated url list"
git push origin master &>/dev/null
2015-11-04 21:37:48 +01:00
2015-11-04 21:04:56 +01:00
cd $tmp/git
2015-11-04 15:30:19 +01:00
git checkout gh-pages
2015-11-04 16:03:56 +01:00
cp $tmp/tree.html .
2015-11-04 21:58:59 +01:00
git add -A
2015-11-04 15:30:19 +01:00
git commit -m "Updated dir tree"
git push origin gh-pages &>/dev/null
2015-09-27 11:00:08 +02:00
cd $dir
echo "Clean up."
2015-09-27 11:44:47 +02:00
rm -fr $tmp