1 ! Copyright (C) 2008 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors assocs calendar combinators.short-circuit
4 concurrency.semaphores deques html.parser html.parser.analyzer
5 http.client inspector io io.pathnames kernel logging math
6 math.parser prettyprint sequences sets spider.unique-deque
7 threads tools.time urls ;
12 { count integer initial: 0 }
13 { max-count number initial: 1/0. }
15 { max-depth integer initial: 0 }
23 { #threads integer initial: 1 }
28 TUPLE: spider-result url depth headers
29 fetched-in parsed-html links processed-in fetched-at ;
31 : <spider> ( base -- spider )
35 over >>currently-spidering
36 swap 0 <unique-deque> [ push-url ] keep >>todo
37 <unique-deque> >>nonmatching
39 1 <semaphore> >>semaphore ;
41 : <spider-result> ( url depth -- spider-result )
48 : apply-filters ( links spider -- links' )
50 '[ [ _ 1&& ] filter ] call( seq -- seq' )
53 : push-links ( links level unique-deque -- )
54 '[ _ _ push-url ] each ;
56 : add-todo ( links level spider -- )
59 : add-nonmatching ( links level spider -- )
60 nonmatching>> push-links ;
62 : filter-base-links ( spider spider-result -- base-links nonmatching-links )
63 [ base>> host>> ] [ links>> members ] bi*
64 [ host>> = ] with partition ;
66 :: add-spidered ( spider spider-result -- )
67 spider [ 1 + ] change-count drop
69 spider-result dup url>>
70 spider spidered>> set-at
72 spider spider-result filter-base-links :> ( matching nonmatching )
73 spider-result depth>> 1 + :> depth
75 nonmatching depth spider add-nonmatching
77 matching spider apply-filters depth spider add-todo ;
79 : normalize-hrefs ( base links -- links' )
80 [ derive-url ] with map ;
82 : print-spidering ( spider-result -- )
83 [ url>> ] [ depth>> ] bi
84 "depth: " write number>string write
85 ", spidering: " write . yield ;
87 : url-html? ( url -- ? )
88 path>> file-extension { ".htm" ".html" f } member? ;
90 :: fill-spidered-result ( spider spider-result -- )
91 spider-result url>> :> url
92 f url spider spidered>> set-at
93 [ url http-get ] benchmark :> ( headers html fetched-in )
97 spider currently-spidering>>
98 over find-all-links normalize-hrefs
102 ] benchmark :> ( parsed-html links processed-in )
105 fetched-in >>fetched-in
106 parsed-html >>parsed-html
108 processed-in >>processed-in
109 now >>fetched-at drop ;
111 :: spider-page ( spider spider-result -- )
112 spider quiet?>> [ spider-result print-spidering ] unless
113 spider spider-result fill-spidered-result
114 spider quiet?>> [ spider-result describe ] unless
115 spider spider-result add-spidered ;
117 \ spider-page ERROR add-error-logging
119 : spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
121 : queue-initial-links ( spider -- spider )
122 [ [ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0 ]
126 : spider-page? ( spider -- ? )
128 [ todo>> deque>> deque-empty? not ]
129 [ [ todo>> peek-url depth>> ] [ max-depth>> ] bi <= ]
130 [ [ count>> ] [ max-count>> ] bi < ]
133 : setup-next-url ( spider -- spider spider-result )
134 dup todo>> peek-url url>> >>currently-spidering
135 dup todo>> pop-url [ url>> ] [ depth>> ] bi <spider-result> ;
137 : spider-next-page ( spider -- )
143 : run-spider-loop ( spider -- )
145 [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
150 : run-spider ( spider -- spider )
153 [ run-spider-loop ] keep