/[safari]/get_book.sh
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Diff of /get_book.sh

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1.1 by dpavlin, Sun Dec 14 19:11:30 2003 UTC revision 1.9 by dpavlin, Wed Jun 2 16:14:53 2004 UTC
# Line 1  Line 1 
1  #!/bin/sh  #!/bin/sh
2    
3  #export http_proxy=http://proxy.pliva.hr:8080  # proxy settings (same as in firebird)
4    fping proxy && export http_proxy=http://proxy:8080
5    # user agent (same as in firebird)
6    ua="Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040506 Firefox/0.8"
7    cookie_file="cookie.txt"
8    
9    # wait between pages
10    export wait=120
11    
12    
13    if [ -z "$1" ] ; then
14            echo "Usage: $0 ISBN"
15            exit 1
16    fi
17    
18    if [ -e orig ] ; then
19            echo "orig directory found. Resume download? [Y/n]"
20            read ans
21            if [ "$ans" = "n" ] ; then
22                    exit 1;
23            fi
24            mv orig/* .
25            rm -Rf orig
26    
27            grep -l 'promo.asp' * | xargs -i rm {}
28            grep -l 'This is only a preview of the full book' * | xargs -i rm {}
29    fi
30    
31  #isbn="0-201-41975-0"  isbn=`echo $1 | sed 's/-//g'`
 isbn="0-672-32240-4"  
32    
33  wait=10  function mirror() {
34    
35  isbn2=`echo $isbn | sed 's/-//g'`          url="$1"
36    
37  function mirror() {          file=`echo $url | sed -e s,http://[^?]*?,index.html?, -e s,#.*$,, -e s,/,%2F,g`
38          wget -p -nH -nc -k \          if [ -e "$file" ] ; then
39                  --random-wait --wait=$wait -t 0 \  #               echo "skip $url"
40                  --load-cookies=/home/dpavlin/.phoenix/default/g6b45nt6.slt/cookies.txt \                  echo -n "."
41                  -U "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.5) Gecko/20031206 Firebird/0.7" \                  return
42                  $1          fi
43    
44            cookie=""
45            if echo $url | grep '/index' >/dev/null ; then
46                    echo -n "no login (index) "
47            elif echo $url | grep 'mode=toc' >/dev/null ; then
48                    echo -n "no login (toc) "
49            else
50                    if [ ! -e $cookie_file ] ; then
51                            echo "cookies file $cookie_file doesn't exits! Please create it."
52                            echo "It should be in format:"
53                            echo "Cookie: Site=UICode=&Portal=oreilly&GUID=..."
54                            exit 1
55                    fi
56                    read cookie < $cookie_file
57                    if [ -z "$cookie" ] ; then
58                            echo "Empty cookie file $cookie_file !"
59                            exit 1
60                    fi
61            fi
62    
63            if [ -z "$cookie" ] ; then
64                    echo "$url [no cookie]"
65            else
66                    echo "$url [with cookie]"
67            fi
68    
69            wget -q -p -nH -nc -k -t 1 -U "$ua" --cookies=off --header="$cookie" $url
70    
71  #       -D safari.oreilly.com \          perl -e '$t=rand($ENV{wait} || 30);print "sleep for $t sec.\n"; sleep($t);'
 #       -A 0-201-41975-0 \  
72  }  }
73    
74  function geturl() {  function geturl() {
75          hindent -s $1 | grep $2 | grep -i href | grep mode=[st][eo]c | \          hindent -s $1 | grep -i href | grep mode=[st][eo]c | \
76          sed -e 's/^.*<a.*href="//i' \          sed -e 's/^.*<a.*href="//i' \
77                  -e 's/".*//' -e 's/amp;//g' \                  -e 's/".*//' -e 's/amp;//g' \
78                  -e 's,^[^\?]*\?,http://safari.oreilly.com/,' \                  -e 's,^[^\?]*\?,http://safari.oreilly.com/,' \
79                  -e 's/#$//' \                  -e 's/#.*$//' \
80                  -e 's/srchText=//' | \                  -e 's/\&srchText=//' \
81                  grep -v open=false | \                  -e 's/open=false/open=true/' | \
                 grep -v 'view=[A-Z].*%2F[^i]' | \  
                 grep -v 'view=[A-Z].*/[^i]' | \  
82                  grep '&s=1&b=1&f=1&t=1&c=1&u=1&r=&o=1' | \                  grep '&s=1&b=1&f=1&t=1&c=1&u=1&r=&o=1' | \
83                    grep $2 | \
84                    grep -v "$2/[0-9][0-9][0-9][0-9][0-9][0-9][0-9]" | \
85                  sort -u >> in                  sort -u >> in
86  }  }
87    
88  echo > in  function uniqurl() {
89  #mirror "http://safari.oreilly.com/?XmlId=$isbn"          mv in in.tmp
90            grep -v 'view=[A-Z]' in.tmp | sort -u > in
91            grep 'view=[A-Z].*/index' in.tmp | sort -u >> in
92            links=`wc -l in | cut -d" " -f1`
93            echo "found $links unique links"
94    }
95    
96    function mirror_in() {
97            cat in | while read url ; do
98                    mirror "$url"
99                    #sleep $wait
100    
101                    if grep 'promo.asp' `ls -t index.html* | head -3` >/dev/null ; then
102                            echo "WARNING: safari seems to logout you as user. Aborting."
103                            exit 1
104                    fi
105    
106                    if grep -i '>Account locked<' `ls -t index.html* | head -3` >/dev/null ; then
107                            echo "WARNING: your safari account is locked. Aborting."
108                            exit 1
109                    fi
110    
111                    if grep -i 'session disabled' `ls -t index.html* | head -3` >/dev/null ; then
112                            echo "WARNING: your safari session is disabled. Aborting."
113                            exit 1
114                    fi
115    
116                    if grep -i 'This is only a preview of the full book' `ls -t index.html* | head -3` >/dev/null ; then
117                            echo "WARNING: you didn't add this book to your bookshelf!"
118                            exit 1
119                    fi
120            done
121            echo
122    }
123    
124    echo -n > in
125    mirror "http://safari.oreilly.com/?XmlId=$isbn"
126    echo
127    
128  echo "extract URLs from first page..."  echo "extract URLs from first page..."
129  geturl "index.html?XmlId=$isbn" $isbn  geturl "index.html?XmlId=$isbn" $isbn
130    uniqurl
131    
132  mirror "-i in"  mirror_in
133    
134  echo -n "extracting URLs [1]"  echo -n "extracting URLs [1]"
135  ls index.html* | while read file ; do  ls index.html* | while read file ; do
# Line 49  ls index.html* | while read file ; do Line 138  ls index.html* | while read file ; do
138  done  done
139  echo  echo
140    
141  sort -u in > in2  uniqurl
142    
143  mirror "-i in2"  mirror_in
144    
145  echo > in  echo -n > in
146  echo -n "extracting URLs [2]"  echo -n "extracting URLs [2]"
147  ls index.html* | while read file ; do  ls index.html* | while read file ; do
148          echo -n "."          echo -n "."
149          geturl $file $isbn          geturl $file $isbn
150  done  done
151    
152  sort -u in > in2  uniqurl
153    
154    mirror_in
155    
156    # convert links in html
157    bn=`basename $0`
158    dir=`echo $0 | sed "s/$bn$//"`
159    ls index.html* | xargs -i $dir/filter.pl {}
160    mkdir orig
161    mv index.html* orig/
162    
 mirror "-i in2"  

Legend:
Removed from v.1.1  
changed lines
  Added in v.1.9

  ViewVC Help
Powered by ViewVC 1.1.26