1
0
mirror of https://github.com/danog/gigaclone.git synced 2024-11-30 04:29:09 +01:00
gigaclone/mkurllist

108 lines
3.1 KiB
Plaintext
Raw Normal View History

2015-11-23 22:29:53 +01:00
#!/bin/bash
2015-11-24 21:51:59 +01:00
dir="$PWD"
2015-11-23 22:29:53 +01:00
tmp="$dir/tmp"
2016-07-04 04:29:25 +02:00
rm -r "$tmp"
mkdir -p "$tmp"
cd "$tmp"
if [ "$1" == "treegen" ]; then
2016-07-24 21:16:20 +02:00
shift
2016-07-23 00:26:31 +02:00
a=$(echo "$2" | sed "s|$1||g")
cd "$2"
2016-07-25 16:59:49 +02:00
tree=$(tree -a -T "profile.gigaset.net" -C -H http://profile.gigaset.net"$a" -L 1 | sed 's/net\/\.\//net\//g;s/class=\"DIR\" href=\"http\:\/\/profile\.gigaset\.net/class=\"DIR\" href=\"http\:\/\/daniil\.it\/gigaclone\/tree/g;s/<\/a>/<\/a><br>/g;s/<\/a><br><br>/<\/a><br>/g')
echo "$tree" >index.html
2016-07-04 04:29:25 +02:00
cd "$OLDPWD"
2016-07-03 02:57:41 +02:00
exit
2016-07-04 04:29:25 +02:00
fi
2015-12-08 14:57:48 +01:00
2016-07-04 04:29:25 +02:00
extractlog() {
tar -xzf $tmp/logs.tgz
cd log
for extractfile in ./*;do
echo "Working... for $extractfile"
sed -i '/200\|206\|302\|301/!d;s/\sHTTP.*//g;s/.*\s/http:\/\/profile\.gigaset\.net/g;/http:\/\/profile\.gigaset\.net\"\"/d;s/?.*//g;s/\.net\/device/\.net\/chagall/g;s/^\.$//g' $extractfile
echo "Remove duplicates for $extractfile"
awk '!seen[$0]++' $extractfile>>$tmp/tmp
done
md5sum $tmp/logs.tgz >$dir/.md5sum
rm $tmp/logs.tgz
rdupes $tmp/tmp
}
rdupes() {
if [ "$1" != "" ];then
echo "Remove all duplicates for $*"
2016-07-06 19:15:27 +02:00
awk '!seen[$0]++' $* $dir/urllist >$dir/final
2016-07-04 04:29:25 +02:00
mv $dir/final $dir/urllist
rm $*
fi
sed -i 's/\/\//\//g;s/\/$//g;s/http:\//http:\/\//g;s/http:\/\/\//http:\/\//g' $dir/urllist
while read dl; do
a=$(curl -w "%{url_effective}\n" -L -f -s -I -S "$dl" -o /dev/null)
[ $? = "0" ] && echo "$a" >> $dir/final
done < $dir/urllist
mv $dir/final $dir/urllist
awk '!seen[$0]++' $dir/urllist >$dir/final
mv $dir/final $dir/urllist
}
2016-03-26 17:44:57 +01:00
2016-07-04 04:29:25 +02:00
extractbin() {
echo "Extracting urls..."
cd $tmp
grep "\.bin" $dir/urllist | while read currenturl;do
if [ $(wget -S --spider "$currenturl" 2>&1 | sed '/Length/!d;s/Length\: //g;s/\s.*//g;s/\s//g;s/[^0-9]*//g' | tr -d "\n") -lt 52428800 ]; then
file=$(wget -qO- "$currenturl" | strings)
echo "$file" | sed '/http:\/\//!d;/profile.gigaset.net\|update.gigaset.net/!d;s/.*http:\/\//http:\/\//g;s/update\.gigaset/profile\.gigaset/g'>>$tmp/tmp
baseurl=$(dirname "$currenturl")/
url=$(echo "$file" | sed '/http/d;/\.bin/!d')
for e in $url;do echo "$baseurl$e">>$tmp/tmp;done
fi
done
rdupes $tmp/tmp
}
treegen() {
echo "Creating tree..."
cd $dir
rm -r tree
mkdir tree
cd tree
sed 's/http:\/\/profile.gigaset.net\///g;s/^\/*//g' $dir/urllist | while read f;do
2016-07-06 19:15:27 +02:00
f=$(echo "$f" | awk -niord '{printf RT?$0chr("0x"substr(RT,2)):$0}' RS=%..)
2016-07-22 12:43:09 +02:00
mkdir -p "$(dirname "$f")"&>/dev/null
2016-07-04 04:29:25 +02:00
touch "$f"
done
2016-07-23 00:26:31 +02:00
find $PWD -type d -exec bash $dir/mkurllist treegen "$PWD" "{}" \;
2016-07-06 19:15:27 +02:00
find . -not -iname "index.html" -type f -a -not -name "." -exec rm "{}" \;
2016-07-04 04:29:25 +02:00
}
2016-04-08 21:42:41 +02:00
2016-07-04 04:29:25 +02:00
{
wget http://profile.gigaset.net/logs.tgz -qO $tmp/logs.tgz
[ "$(md5sum $tmp/logs.tgz)" != "$(cat $dir/.md5sum)" ] && {
extractlog
extractbin
}
cd $dir
sort urllist > final
mv final urllist
treegen
cd $dir
rm -rf $tmp/
mkdir $tmp
git clone https://$GH_TOKEN@github.com/danog/gigaclone.git $tmp/git
cd $tmp/git
rm -r tree
cp -a $dir/* .
rm -rf tmp gigaclone.log
git add -A
git commit -m "Updated url list"
git push origin master &>/dev/null
git checkout gh-pages
rm -r tree
cp -a $dir/tree .
git add -A
git commit -m "Updated dir tree"
git push origin gh-pages &>/dev/null
cd $dir
echo "Clean up."
rm -rf $tmp
2016-03-26 14:59:32 +01:00
} &> $dir/gigaclone.log