1 ! Copyright (C) 2008 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors fry html.parser html.parser.analyzer
4 http.client kernel tools.time sets assocs sequences
5 concurrency.combinators io threads namespaces math multiline
6 math.parser inspector urls logging combinators.short-circuit
7 continuations calendar prettyprint dlists deques locals
8 spider.unique-deque combinators concurrency.semaphores ;
13 { count integer initial: 0 }
14 { max-count number initial: 1/0. }
16 { max-depth integer initial: 0 }
24 { #threads integer initial: 1 }
29 TUPLE: spider-result url depth headers
30 fetched-in parsed-html links processed-in fetched-at ;
32 : <spider> ( base -- spider )
36 over >>currently-spidering
37 swap 0 <unique-deque> [ push-url ] keep >>todo
38 <unique-deque> >>nonmatching
40 1 <semaphore> >>semaphore ;
42 : <spider-result> ( url depth -- spider-result )
49 : apply-filters ( links spider -- links' )
51 '[ [ _ 1&& ] filter ] call( seq -- seq' )
54 : push-links ( links level unique-deque -- )
55 '[ _ _ push-url ] each ;
57 : add-todo ( links level spider -- )
60 : add-nonmatching ( links level spider -- )
61 nonmatching>> push-links ;
63 : filter-base-links ( spider spider-result -- base-links nonmatching-links )
64 [ base>> host>> ] [ links>> members ] bi*
65 [ host>> = ] with partition ;
67 :: add-spidered ( spider spider-result -- )
68 spider [ 1 + ] change-count drop
70 spider-result dup url>>
71 spider spidered>> set-at
73 spider spider-result filter-base-links :> ( matching nonmatching )
74 spider-result depth>> 1 + :> depth
76 nonmatching depth spider add-nonmatching
78 matching spider apply-filters depth spider add-todo ;
80 : normalize-hrefs ( base links -- links' )
81 [ derive-url ] with map ;
83 : print-spidering ( spider-result -- )
84 [ url>> ] [ depth>> ] bi
85 "depth: " write number>string write
86 ", spidering: " write . yield ;
88 :: fill-spidered-result ( spider spider-result -- )
89 f spider-result url>> spider spidered>> set-at
90 [ spider-result url>> http-get ] benchmark :> ( headers html fetched-in )
93 spider currently-spidering>>
94 over find-all-links normalize-hrefs
95 ] benchmark :> ( parsed-html links processed-in )
98 fetched-in >>fetched-in
99 parsed-html >>parsed-html
101 processed-in >>processed-in
102 now >>fetched-at drop ;
104 :: spider-page ( spider spider-result -- )
105 spider quiet?>> [ spider-result print-spidering ] unless
106 spider spider-result fill-spidered-result
107 spider quiet?>> [ spider-result describe ] unless
108 spider spider-result add-spidered ;
110 \ spider-page ERROR add-error-logging
112 : spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
114 : queue-initial-links ( spider -- spider )
115 [ [ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0 ]
119 : spider-page? ( spider -- ? )
121 [ todo>> deque>> deque-empty? not ]
122 [ [ todo>> peek-url depth>> ] [ max-depth>> ] bi <= ]
123 [ [ count>> ] [ max-count>> ] bi < ]
126 : setup-next-url ( spider -- spider spider-result )
127 dup todo>> peek-url url>> >>currently-spidering
128 dup todo>> pop-url [ url>> ] [ depth>> ] bi <spider-result> ;
130 : spider-next-page ( spider -- )
131 setup-next-url spider-page ;
135 : run-spider-loop ( spider -- )
137 [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
142 : run-spider ( spider -- spider )
145 [ run-spider-loop ] keep