http.client kernel tools.time sets assocs sequences
concurrency.combinators io threads namespaces math multiline
math.parser inspector urls logging combinators.short-circuit
-continuations calendar prettyprint dlists deques locals ;
+continuations calendar prettyprint dlists deques locals
+spider.unique-deque combinators concurrency.semaphores ;
IN: spider
TUPLE: spider base count max-count sleep max-depth initial-links
-filters spidered todo nonmatching quiet ;
+filters spidered todo nonmatching quiet currently-spidering
+#threads semaphore follow-robots? robots ;
-TUPLE: spider-result url depth headers fetch-time parsed-html
-links processing-time timestamp ;
-
-TUPLE: todo-url url depth ;
-
-: <todo-url> ( url depth -- todo-url )
- todo-url new
- swap >>depth
- swap >>url ;
-
-TUPLE: unique-deque assoc deque ;
-
-: <unique-deque> ( -- unique-deque )
- H{ } clone <dlist> unique-deque boa ;
-
-: push-url ( url depth unique-deque -- )
- [ <todo-url> ] dip
- [ [ [ t ] dip url>> ] [ assoc>> ] bi* set-at ]
- [ deque>> push-back ] 2bi ;
-
-: pop-url ( unique-deque -- todo-url ) deque>> pop-front ;
-
-: peek-url ( unique-deque -- todo-url ) deque>> peek-front ;
+TUPLE: spider-result url depth headers
+fetched-in parsed-html links processed-in fetched-at ;
: <spider> ( base -- spider )
>url
spider new
over >>base
+ over >>currently-spidering
swap 0 <unique-deque> [ push-url ] keep >>todo
<unique-deque> >>nonmatching
0 >>max-depth
0 >>count
1/0. >>max-count
- H{ } clone >>spidered ;
+ H{ } clone >>spidered
+ 1 [ >>#threads ] [ <semaphore> >>semaphore ] bi ;
+
+: <spider-result> ( url depth -- spider-result )
+ spider-result new
+ swap >>depth
+ swap >>url ;
<PRIVATE
[ host>> = ] with partition ;
: add-spidered ( spider spider-result -- )
- [ [ 1+ ] change-count ] dip
+ [ [ 1 + ] change-count ] dip
2dup [ spidered>> ] [ dup url>> ] bi* rot set-at
[ filter-base-links ] 2keep
- depth>> 1+ swap
+ depth>> 1 + swap
[ add-nonmatching ]
[ tuck [ apply-filters ] 2dip add-todo ] 2bi ;
-: normalize-hrefs ( links spider -- links' )
- [ [ >url ] map ] dip
- base>> swap [ derive-url ] with map ;
+: normalize-hrefs ( base links -- links' )
+ [ derive-url ] with map ;
-: print-spidering ( url depth -- )
+: print-spidering ( spider-result -- )
+ [ url>> ] [ depth>> ] bi
"depth: " write number>string write
", spidering: " write . yield ;
-:: new-spidered-result ( spider url depth -- spider-result )
- f url spider spidered>> set-at
- [ url http-get ] benchmark :> fetch-time :> html :> headers
+:: fill-spidered-result ( spider spider-result -- )
+ f spider-result url>> spider spidered>> set-at
+ [ spider-result url>> http-get ] benchmark :> fetched-in :> html :> headers
[
- html parse-html [ ] [ find-hrefs spider normalize-hrefs ] bi
- ] benchmark :> processing-time :> links :> parsed-html
- url depth headers fetch-time parsed-html links processing-time
- now spider-result boa ;
-
-:: spider-page ( spider url depth -- )
- spider quiet>> [ url depth print-spidering ] unless
- spider url depth new-spidered-result :> spidered-result
- spider quiet>> [ spidered-result describe ] unless
- spider spidered-result add-spidered ;
+ html parse-html
+ spider currently-spidering>>
+ over find-all-links normalize-hrefs
+ ] benchmark :> processed-in :> links :> parsed-html
+ spider-result
+ headers >>headers
+ fetched-in >>fetched-in
+ parsed-html >>parsed-html
+ links >>links
+ processed-in >>processed-in
+ now >>fetched-at drop ;
+
+:: spider-page ( spider spider-result -- )
+ spider quiet>> [ spider-result print-spidering ] unless
+ spider spider-result fill-spidered-result
+ spider quiet>> [ spider-result describe ] unless
+ spider spider-result add-spidered ;
\ spider-page ERROR add-error-logging
-: spider-sleep ( spider -- )
- sleep>> [ sleep ] when* ;
+: spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
-:: queue-initial-links ( spider -- spider )
- spider initial-links>> spider normalize-hrefs 0 spider add-todo spider ;
+: queue-initial-links ( spider -- )
+ [
+ [ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0
+ ] keep add-todo ;
: spider-page? ( spider -- ? )
{
[ [ count>> ] [ max-count>> ] bi < ]
} 1&& ;
-: setup-next-url ( spider -- spider url depth )
- dup todo>> pop-url [ url>> ] [ depth>> ] bi ;
+: setup-next-url ( spider -- spider spider-result )
+ dup todo>> peek-url url>> >>currently-spidering
+ dup todo>> pop-url [ url>> ] [ depth>> ] bi <spider-result> ;
: spider-next-page ( spider -- )
setup-next-url spider-page ;
: run-spider-loop ( spider -- )
dup spider-page? [
- [ spider-next-page ] [ run-spider-loop ] bi
+ [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
] [
drop
] if ;
: run-spider ( spider -- spider )
"spider" [
- queue-initial-links [ run-spider-loop ] keep
+ dup queue-initial-links [ run-spider-loop ] keep
] with-logging ;