USING: accessors fry html.parser html.parser.analyzer
http.client kernel tools.time sets assocs sequences
concurrency.combinators io threads namespaces math multiline
-heaps math.parser inspector urls assoc-deques logging
-combinators.short-circuit continuations calendar prettyprint ;
+math.parser inspector urls logging combinators.short-circuit
+continuations calendar prettyprint dlists deques locals
+spider.unique-deque combinators concurrency.semaphores ;
IN: spider
-TUPLE: spider base count max-count sleep max-depth secure? agent timeout
-filters spidered todo nonmatching initial-links ;
+TUPLE: spider base count max-count sleep max-depth initial-links
+filters spidered todo nonmatching quiet currently-spidering
+#threads semaphore follow-robots? robots ;
-TUPLE: spider-result url depth headers fetch-time parsed-html
-links processing-time ;
+TUPLE: spider-result url depth headers
+fetched-in parsed-html links processed-in fetched-at ;
: <spider> ( base -- spider )
>url
spider new
over >>base
- swap 0 <unique-min-heap> [ heap-push ] keep >>todo
- <unique-min-heap> >>nonmatching
+ over >>currently-spidering
+ swap 0 <unique-deque> [ push-url ] keep >>todo
+ <unique-deque> >>nonmatching
0 >>max-depth
0 >>count
1/0. >>max-count
- H{ } clone >>spidered ;
+ H{ } clone >>spidered
+ 1 [ >>#threads ] [ <semaphore> >>semaphore ] bi ;
+
+: <spider-result> ( url depth -- spider-result )
+ spider-result new
+ swap >>depth
+ swap >>url ;
<PRIVATE
: apply-filters ( links spider -- links' )
- filters>> [ '[ _ 1&& ] filter ] when* ;
+ filters>> [ '[ [ _ 1&& ] filter ] call( seq -- seq' ) ] when* ;
+
+: push-links ( links level unique-deque -- )
+ '[ _ _ push-url ] each ;
: add-todo ( links level spider -- )
- tuck [ apply-filters ] 2dip
- tuck
- [ spidered>> keys diff ]
- [ todo>> ] 2bi* '[ _ _ heap-push ] each ;
+ todo>> push-links ;
: add-nonmatching ( links level spider -- )
- nonmatching>> '[ _ _ heap-push ] each ;
-
-: relative-url? ( url -- ? ) protocol>> not ;
+ nonmatching>> push-links ;
-: filter-base ( spider spider-result -- base-links nonmatching-links )
+: filter-base-links ( spider spider-result -- base-links nonmatching-links )
[ base>> host>> ] [ links>> prune ] bi*
[ host>> = ] with partition ;
: add-spidered ( spider spider-result -- )
- [ [ 1+ ] change-count ] dip
+ [ [ 1 + ] change-count ] dip
2dup [ spidered>> ] [ dup url>> ] bi* rot set-at
- [ filter-base ] 2keep
- depth>> 1+ swap
+ [ filter-base-links ] 2keep
+ depth>> 1 + swap
[ add-nonmatching ]
- [ add-todo ] 2bi ;
+ [ tuck [ apply-filters ] 2dip add-todo ] 2bi ;
-: print-spidering ( url depth -- )
+: normalize-hrefs ( base links -- links' )
+ [ derive-url ] with map ;
+
+: print-spidering ( spider-result -- )
+ [ url>> ] [ depth>> ] bi
"depth: " write number>string write
", spidering: " write . yield ;
-: normalize-hrefs ( links -- links' )
- [ >url ] map
- spider get base>> swap [ derive-url ] with map ;
+:: fill-spidered-result ( spider spider-result -- )
+ f spider-result url>> spider spidered>> set-at
+ [ spider-result url>> http-get ] benchmark :> fetched-in :> html :> headers
+ [
+ html parse-html
+ spider currently-spidering>>
+ over find-all-links normalize-hrefs
+ ] benchmark :> processed-in :> links :> parsed-html
+ spider-result
+ headers >>headers
+ fetched-in >>fetched-in
+ parsed-html >>parsed-html
+ links >>links
+ processed-in >>processed-in
+ now >>fetched-at drop ;
+
+:: spider-page ( spider spider-result -- )
+ spider quiet>> [ spider-result print-spidering ] unless
+ spider spider-result fill-spidered-result
+ spider quiet>> [ spider-result describe ] unless
+ spider spider-result add-spidered ;
-: (spider-page) ( url depth -- spider-result )
- 2dup print-spidering
- f pick spider get spidered>> set-at
- over '[ _ http-get ] benchmark swap
- [ parse-html dup find-hrefs normalize-hrefs ] benchmark
- spider-result boa
- dup describe ;
+\ spider-page ERROR add-error-logging
-: spider-page ( url depth -- )
- (spider-page) spider get swap add-spidered ;
+: spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
-\ spider-page ERROR add-error-logging
+: queue-initial-links ( spider -- )
+ [
+ [ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0
+ ] keep add-todo ;
+
+: spider-page? ( spider -- ? )
+ {
+ [ todo>> deque>> deque-empty? not ]
+ [ [ todo>> peek-url depth>> ] [ max-depth>> ] bi < ]
+ [ [ count>> ] [ max-count>> ] bi < ]
+ } 1&& ;
-: spider-sleep ( -- )
- spider get sleep>> [ sleep ] when* ;
+: setup-next-url ( spider -- spider spider-result )
+ dup todo>> peek-url url>> >>currently-spidering
+ dup todo>> pop-url [ url>> ] [ depth>> ] bi <spider-result> ;
+
+: spider-next-page ( spider -- )
+ setup-next-url spider-page ;
PRIVATE>
+: run-spider-loop ( spider -- )
+ dup spider-page? [
+ [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
+ ] [
+ drop
+ ] if ;
+
: run-spider ( spider -- spider )
"spider" [
- dup spider [
- [ todo>> ] [ max-depth>> ] bi
- '[
- _ <= spider get
- [ count>> ] [ max-count>> ] bi < and
- ] [ spider-page spider-sleep ] slurp-heap-when
- spider get
- ] with-variable
+ dup queue-initial-links [ run-spider-loop ] keep
] with-logging ;