--- get_book.sh 2004/06/02 19:18:21 1.10 +++ get_book.sh 2004/07/21 13:58:35 1.12 @@ -1,7 +1,10 @@ #!/bin/sh # proxy settings (same as in firebird) -fping proxy && export http_proxy=http://proxy:8080 +fping -q proxy && export http_proxy=http://proxy:8080 +if [ -z "$http_proxy" ] ; then + fping -q proxy.lan && export http_proxy=http://proxy.lan:8080 +fi # user agent (same as in firebird) ua="Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040506 Firefox/0.8" cookie_file="cookie.txt" @@ -9,6 +12,9 @@ # wait between pages export wait=120 +if [ ! -z "$http_proxy" ] ; then + echo "Using proxy $http_proxy" +fi if [ -z "$1" ] ; then echo "Usage: $0 ISBN" @@ -28,7 +34,7 @@ grep -l 'This is only a preview of the full book' * | xargs -i rm {} fi -isbn=`echo $1 | sed 's/-//g'` +isbn=`echo $1 | sed 's/-//g' | tr '[a-z]' '[A-Z]'` function mirror() { @@ -41,7 +47,7 @@ return fi - cookie=" " + cookie="" if echo $url | grep '/index' >/dev/null ; then echo -n "no login (index) " elif echo $url | grep 'mode=toc' >/dev/null ; then @@ -67,8 +73,8 @@ fi wget -q -p -nH -nc -k -t 1 -U "$ua" --cookies=off --header="$cookie" $url + perl -e '$t=rand($ENV{wait} || 120);print "sleep for $t sec.\n"; sleep($t);' - perl -e '$t=rand($ENV{wait} || 30);print "sleep for $t sec.\n"; sleep($t);' } function geturl() { @@ -79,16 +85,16 @@ -e 's/#.*$//' \ -e 's/\&srchText=//' \ -e 's/open=false/open=true/' | \ - grep '&s=1&b=1&f=1&t=1&c=1&u=1&r=&o=1' | \ - grep $2 | \ - grep -v "$2/[0-9][0-9][0-9][0-9][0-9][0-9][0-9]" | \ + grep '&s=1&b=1&f=1&t=1&c=1&u=1&r=&o=1&n=1&d=1&p=1&a=0' | \ + grep -i "xmlid=[0-9A-Za-z\-][0-9A-Za-z\-]*" | \ + grep -vi "xmlid=[0-9A-Za-z\-]*/[0-9][0-9][0-9][0-9][0-9][0-9][0-9]" | \ sort -u >> in } function uniqurl() { mv in in.tmp - grep -v 'view=[A-Z]' in.tmp | sort -u > in - grep 'view=[A-Z].*/index' in.tmp | sort -u >> in + grep -v 'view=[A-Z]' in.tmp | grep 'a=0$' | sort -u > in + grep 'view=[A-Z].*/index' in.tmp | grep 'a=0$' | sort -u >> in links=`wc -l in | cut -d" " -f1` echo "found $links unique links" } @@ -122,11 +128,13 @@ } echo -n > in -mirror "http://safari.oreilly.com/?XmlId=$isbn" +mirror "http://safari.oreilly.com/$isbn" echo -echo "extract URLs from first page..." -geturl "index.html?XmlId=$isbn" $isbn +file=`ls *index.html?XmlId=*` +isbn=`echo $file | cut -d= -f2` +echo "extract URLs from first page $file... [$isbn]" +geturl $file $isbn uniqurl mirror_in