1 ! Copyright (C) 2008 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors fry html.parser html.parser.analyzer
4 http.client kernel tools.time sets assocs sequences
5 concurrency.combinators io threads namespaces math multiline
6 math.parser inspector urls logging combinators.short-circuit
7 continuations calendar prettyprint dlists deques locals
8 spider.unique-deque combinators concurrency.semaphores
14 { count integer initial: 0 }
15 { max-count number initial: 1/0. }
17 { max-depth integer initial: 0 }
25 { #threads integer initial: 1 }
30 TUPLE: spider-result url depth headers
31 fetched-in parsed-html links processed-in fetched-at ;
33 : <spider> ( base -- spider )
37 over >>currently-spidering
38 swap 0 <unique-deque> [ push-url ] keep >>todo
39 <unique-deque> >>nonmatching
41 1 <semaphore> >>semaphore ;
43 : <spider-result> ( url depth -- spider-result )
50 : apply-filters ( links spider -- links' )
52 '[ [ _ 1&& ] filter ] call( seq -- seq' )
55 : push-links ( links level unique-deque -- )
56 '[ _ _ push-url ] each ;
58 : add-todo ( links level spider -- )
61 : add-nonmatching ( links level spider -- )
62 nonmatching>> push-links ;
64 : filter-base-links ( spider spider-result -- base-links nonmatching-links )
65 [ base>> host>> ] [ links>> members ] bi*
66 [ host>> = ] with partition ;
68 :: add-spidered ( spider spider-result -- )
69 spider [ 1 + ] change-count drop
71 spider-result dup url>>
72 spider spidered>> set-at
74 spider spider-result filter-base-links :> ( matching nonmatching )
75 spider-result depth>> 1 + :> depth
77 nonmatching depth spider add-nonmatching
79 matching spider apply-filters depth spider add-todo ;
81 : normalize-hrefs ( base links -- links' )
82 [ derive-url ] with map ;
84 : print-spidering ( spider-result -- )
85 [ url>> ] [ depth>> ] bi
86 "depth: " write number>string write
87 ", spidering: " write . yield ;
89 : url-html? ( url -- ? )
90 path>> file-extension { ".htm" ".html" f } member? ;
92 :: fill-spidered-result ( spider spider-result -- )
93 spider-result url>> :> url
94 f url spider spidered>> set-at
95 [ url http-get ] benchmark :> ( headers html fetched-in )
99 spider currently-spidering>>
100 over find-all-links normalize-hrefs
104 ] benchmark :> ( parsed-html links processed-in )
107 fetched-in >>fetched-in
108 parsed-html >>parsed-html
110 processed-in >>processed-in
111 now >>fetched-at drop ;
113 :: spider-page ( spider spider-result -- )
114 spider quiet?>> [ spider-result print-spidering ] unless
115 spider spider-result fill-spidered-result
116 spider quiet?>> [ spider-result describe ] unless
117 spider spider-result add-spidered ;
119 \ spider-page ERROR add-error-logging
121 : spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
123 : queue-initial-links ( spider -- spider )
124 [ [ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0 ]
128 : spider-page? ( spider -- ? )
130 [ todo>> deque>> deque-empty? not ]
131 [ [ todo>> peek-url depth>> ] [ max-depth>> ] bi <= ]
132 [ [ count>> ] [ max-count>> ] bi < ]
135 : setup-next-url ( spider -- spider spider-result )
136 dup todo>> peek-url url>> >>currently-spidering
137 dup todo>> pop-url [ url>> ] [ depth>> ] bi <spider-result> ;
139 : spider-next-page ( spider -- )
145 : run-spider-loop ( spider -- )
147 [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
152 : run-spider ( spider -- spider )
155 [ run-spider-loop ] keep