1 ! Copyright (C) 2008 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors fry html.parser html.parser.analyzer
4 http.client kernel tools.time sets assocs sequences
5 concurrency.combinators io threads namespaces math multiline
6 math.parser inspector urls logging combinators.short-circuit
7 continuations calendar prettyprint dlists deques locals
11 TUPLE: spider base count max-count sleep max-depth initial-links
12 filters spidered todo nonmatching quiet currently-spidering
13 #threads follow-robots? robots ;
15 TUPLE: spider-result url depth headers
16 fetched-in parsed-html links processed-in fetched-at ;
18 : <spider> ( base -- spider )
22 over >>currently-spidering
23 swap 0 <unique-deque> [ push-url ] keep >>todo
24 <unique-deque> >>nonmatching
33 : apply-filters ( links spider -- links' )
34 filters>> [ '[ [ _ 1&& ] filter ] call( seq -- seq' ) ] when* ;
36 : push-links ( links level unique-deque -- )
37 '[ _ _ push-url ] each ;
39 : add-todo ( links level spider -- )
42 : add-nonmatching ( links level spider -- )
43 nonmatching>> push-links ;
45 : filter-base-links ( spider spider-result -- base-links nonmatching-links )
46 [ base>> host>> ] [ links>> prune ] bi*
47 [ host>> = ] with partition ;
49 : add-spidered ( spider spider-result -- )
50 [ [ 1+ ] change-count ] dip
51 2dup [ spidered>> ] [ dup url>> ] bi* rot set-at
52 [ filter-base-links ] 2keep
55 [ tuck [ apply-filters ] 2dip add-todo ] 2bi ;
57 : normalize-hrefs ( base links -- links' )
58 [ derive-url ] with map ;
60 : print-spidering ( url depth -- )
61 "depth: " write number>string write
62 ", spidering: " write . yield ;
64 :: new-spidered-result ( spider url depth -- spider-result )
65 f url spider spidered>> set-at
66 [ url http-get ] benchmark :> fetched-at :> html :> headers
69 spider currently-spidering>>
70 over find-all-links normalize-hrefs
71 ] benchmark :> processing-time :> links :> parsed-html
72 url depth headers fetched-at parsed-html links processing-time
73 now spider-result boa ;
75 :: spider-page ( spider url depth -- )
76 spider quiet>> [ url depth print-spidering ] unless
77 spider url depth new-spidered-result :> spidered-result
78 spider quiet>> [ spidered-result describe ] unless
79 spider spidered-result add-spidered ;
81 \ spider-page ERROR add-error-logging
83 : spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
85 : queue-initial-links ( spider -- )
87 [ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0
90 : spider-page? ( spider -- ? )
92 [ todo>> deque>> deque-empty? not ]
93 [ [ todo>> peek-url depth>> ] [ max-depth>> ] bi < ]
94 [ [ count>> ] [ max-count>> ] bi < ]
97 : setup-next-url ( spider -- spider url depth )
98 dup todo>> peek-url url>> >>currently-spidering
99 dup todo>> pop-url [ url>> ] [ depth>> ] bi ;
101 : spider-next-page ( spider -- )
102 setup-next-url spider-page ;
106 : run-spider-loop ( spider -- )
108 [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
113 : run-spider ( spider -- spider )
115 dup queue-initial-links [ run-spider-loop ] keep