1 ! Copyright (C) 2008 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors fry html.parser html.parser.analyzer
4 http.client kernel tools.time sets assocs sequences
5 concurrency.combinators io threads namespaces math multiline
6 math.parser inspector urls logging combinators.short-circuit
7 continuations calendar prettyprint dlists deques locals
11 TUPLE: spider base count max-count sleep max-depth initial-links
12 filters spidered todo nonmatching quiet currently-spidering ;
14 TUPLE: spider-result url depth headers fetch-time parsed-html
15 links processing-time timestamp ;
17 TUPLE: todo-url url depth ;
19 : <todo-url> ( url depth -- todo-url )
24 TUPLE: unique-deque assoc deque ;
26 : <unique-deque> ( -- unique-deque )
27 H{ } clone <dlist> unique-deque boa ;
29 : url-exists? ( url unique-deque -- ? )
30 [ url>> ] [ assoc>> ] bi* key? ;
32 : push-url ( url depth unique-deque -- )
33 [ <todo-url> ] dip 2dup url-exists? [
36 [ [ [ t ] dip url>> ] [ assoc>> ] bi* set-at ]
37 [ deque>> push-back ] 2bi
40 : pop-url ( unique-deque -- todo-url ) deque>> pop-front ;
42 : peek-url ( unique-deque -- todo-url ) deque>> peek-front ;
44 : <spider> ( base -- spider )
48 over >>currently-spidering
49 swap 0 <unique-deque> [ push-url ] keep >>todo
50 <unique-deque> >>nonmatching
54 H{ } clone >>spidered ;
58 : apply-filters ( links spider -- links' )
59 filters>> [ '[ [ _ 1&& ] filter ] call( seq -- seq' ) ] when* ;
61 : push-links ( links level unique-deque -- )
62 '[ _ _ push-url ] each ;
64 : add-todo ( links level spider -- )
67 : add-nonmatching ( links level spider -- )
68 nonmatching>> push-links ;
70 : filter-base-links ( spider spider-result -- base-links nonmatching-links )
71 [ base>> host>> ] [ links>> prune ] bi*
72 [ host>> = ] with partition ;
74 : add-spidered ( spider spider-result -- )
75 [ [ 1+ ] change-count ] dip
76 2dup [ spidered>> ] [ dup url>> ] bi* rot set-at
77 [ filter-base-links ] 2keep
80 [ tuck [ apply-filters ] 2dip add-todo ] 2bi ;
82 : url-absolute? ( url -- ? )
83 present "http://" head? ;
85 : normalize-hrefs ( links spider -- links' )
86 currently-spidering>> present swap
87 [ dup url-absolute? [ derive-url ] [ url-append-path >url ] if ] with map ;
89 : print-spidering ( url depth -- )
90 "depth: " write number>string write
91 ", spidering: " write . yield ;
93 :: new-spidered-result ( spider url depth -- spider-result )
94 f url spider spidered>> set-at
95 [ url http-get ] benchmark :> fetch-time :> html :> headers
97 html parse-html [ ] [ find-all-links spider normalize-hrefs ] bi
98 ] benchmark :> processing-time :> links :> parsed-html
99 url depth headers fetch-time parsed-html links processing-time
100 now spider-result boa ;
102 :: spider-page ( spider url depth -- )
103 spider quiet>> [ url depth print-spidering ] unless
104 spider url depth new-spidered-result :> spidered-result
105 spider quiet>> [ spidered-result describe ] unless
106 spider spidered-result add-spidered ;
108 \ spider-page ERROR add-error-logging
110 : spider-sleep ( spider -- )
111 sleep>> [ sleep ] when* ;
113 :: queue-initial-links ( spider -- spider )
114 spider initial-links>> spider normalize-hrefs 0 spider add-todo spider ;
116 : spider-page? ( spider -- ? )
118 [ todo>> deque>> deque-empty? not ]
119 [ [ todo>> peek-url depth>> ] [ max-depth>> ] bi < ]
120 [ [ count>> ] [ max-count>> ] bi < ]
123 : setup-next-url ( spider -- spider url depth )
124 dup todo>> peek-url url>> present >>currently-spidering
125 dup todo>> pop-url [ url>> ] [ depth>> ] bi ;
127 : spider-next-page ( spider -- )
128 setup-next-url spider-page ;
132 : run-spider-loop ( spider -- )
134 [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
139 : run-spider ( spider -- spider )
141 queue-initial-links [ run-spider-loop ] keep