mirror of
https://github.com/danog/gigaclone.git
synced 2024-11-30 04:29:09 +01:00
107 lines
3.0 KiB
Bash
Executable File
107 lines
3.0 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
dir="$PWD"
|
|
tmp="$dir/tmp"
|
|
rm -r $tmp
|
|
mkdir -p $tmp
|
|
cd $tmp
|
|
{
|
|
extractlog() {
|
|
tar -xzf logs.tgz
|
|
cd log
|
|
for f in ./*;do {
|
|
echo "Working... for $f"
|
|
sed -i '/200\|206\|302\|301/!d;s/\sHTTP.*//g;s/.*\s/http:\/\/profile\.gigaset\.net/g;/http:\/\/profile\.gigaset\.net\"\"/d;s/?.*//g;s/\.net\/device/\.net\/chagall/g;s/^\.$//g' $f
|
|
echo "Remove duplicates for $f"
|
|
awk '!seen[$0]++' $f>>$tmp/tmp
|
|
};done
|
|
|
|
md5sum $tmp/logs.tgz >$dir/.md5sum
|
|
rm $tmp/logs.tgz
|
|
rdupes $tmp/tmp
|
|
}
|
|
|
|
rdupes() {
|
|
[ "$1" != "" ] && {
|
|
echo "Remove all duplicates for $*"
|
|
sed -i 's/\/\//\//g;s/\/$//g' $dir/urllist
|
|
awk '!seen[$0]++' $* $dir/urllist >$dir/final
|
|
|
|
mv $dir/final $dir/urllist
|
|
rm $*
|
|
}
|
|
for dl in $(cat $dir/urllist); do {
|
|
a=$(curl -w "%{url_effective}\n" -L -f -s -I -S "$dl" -o /dev/null)
|
|
[ $? = "0" ] && echo "$a" >> $dir/final
|
|
};done
|
|
mv $dir/final $dir/urllist
|
|
awk '!seen[$0]++' $dir/urllist >$dir/final
|
|
mv $dir/final $dir/urllist
|
|
}
|
|
|
|
extractbin() {
|
|
echo "Extracting urls..."
|
|
cd $tmp
|
|
for f in $(grep "\.bin" $dir/urllist);do {
|
|
[ $(wget -S --spider "$f" 2>&1 | sed '/Length/!d;s/Length\: //g;s/\s.*//g;s/\s//g;s/[^0-9]*//g' | tr -d "\n") -lt 52428800 ] && {
|
|
file=$(wget -qO- "$f" | strings)
|
|
echo "$file" | sed '/http:\/\//!d;/profile.gigaset.net\|update.gigaset.net/!d;s/.*http:\/\//http:\/\//g;s/update\.gigaset/profile\.gigaset/g'>>$tmp/tmp
|
|
baseurl="$(dirname "$f")"/
|
|
url=$(echo "$file" | sed '/http/d;/\.bin/!d')
|
|
for e in $url;do echo "$baseurl$e">>$tmp/tmp;done
|
|
}
|
|
}; done
|
|
rdupes $tmp/tmp
|
|
}
|
|
|
|
|
|
treegen() {
|
|
echo "Creating tree..."
|
|
cd $dir
|
|
rm -r tree
|
|
mkdir tree
|
|
cd tree
|
|
for f in $(sed 's/http:\/\/profile.gigaset.net\///g;s/^\/*//g' $dir/urllist);do {
|
|
mkdir -p $(dirname "$f")&>/dev/null
|
|
touch "$f"
|
|
};done
|
|
for f in $(find . -type d);do a=$(echo "$f" | sed 's/\.\///g'); cd $f; index="$(tree -a -T "profile.gigaset.net" -C -H "http://profile.gigaset.net/"$a -L 1 | sed 's/net\/\.\//net\//g;s/class=\"DIR\" href=\"http\:\/\/profile\.gigaset\.net/class=\"DIR\" href=\"http\:\/\/daniil\.it\/gigaclone\/tree/g')"; echo "$index">index.html; cd $OLDPWD; done
|
|
find . -not -iname "index.html" -type f -a -not -name "." -exec rm {} \;
|
|
}
|
|
|
|
wget http://profile.gigaset.net/logs.tgz -qO logs.tgz
|
|
[ "$(md5sum $tmp/logs.tgz)" != "$(cat $dir/.md5sum)" ] && {
|
|
extractlog
|
|
extractbin
|
|
}
|
|
[ "$1" = 'check' ] && { extractbin; }
|
|
cd $dir
|
|
sort urllist > final
|
|
mv final urllist
|
|
treegen
|
|
|
|
cd $dir
|
|
rm -rf $tmp/
|
|
mkdir $tmp
|
|
|
|
git clone https://$GH_TOKEN@github.com/danog/gigaclone.git $tmp/git
|
|
cd $tmp/git
|
|
rm -r tree
|
|
cp -a $dir/* .
|
|
rm -rf tmp gigaclone.log
|
|
git add -A
|
|
git commit -m "Updated url list"
|
|
git push origin master &>/dev/null
|
|
|
|
git checkout gh-pages
|
|
rm -r tree
|
|
cp -a $dir/tree .
|
|
git add -A
|
|
git commit -m "Updated dir tree"
|
|
git push origin gh-pages &>/dev/null
|
|
|
|
cd $dir
|
|
echo "Clean up."
|
|
rm -rf $tmp
|
|
} &> $dir/gigaclone.log
|