! Copyright (C) 2008 Doug Coleman.
! See http://factorcode.org/license.txt for BSD license.
-USING: accessors fry html.parser html.parser.analyzer
-http.client kernel tools.time sets assocs sequences
-concurrency.combinators io threads namespaces math multiline
-heaps math.parser inspector urls assoc-heaps logging
-combinators.short-circuit continuations calendar prettyprint ;
+USING: accessors assocs calendar combinators.short-circuit
+concurrency.semaphores deques html.parser html.parser.analyzer
+http.client inspector io io.pathnames kernel logging math
+math.parser prettyprint sequences sets spider.unique-deque
+threads tools.time urls ;
IN: spider
-TUPLE: spider base count max-count sleep max-depth initial-links
-filters spidered todo nonmatching quiet ;
-
-TUPLE: spider-result url depth headers fetch-time parsed-html
-links processing-time timestamp ;
+TUPLE: spider
+ base
+ { count integer initial: 0 }
+ { max-count number initial: 1/0. }
+ sleep
+ { max-depth integer initial: 0 }
+ initial-links
+ filters
+ spidered
+ todo
+ nonmatching
+ quiet?
+ currently-spidering
+ { #threads integer initial: 1 }
+ semaphore
+ follow-robots?
+ robots ;
+
+TUPLE: spider-result url depth headers
+fetched-in parsed-html links processed-in fetched-at ;
: <spider> ( base -- spider )
>url
spider new
over >>base
- swap 0 <unique-min-heap> [ heap-push ] keep >>todo
- <unique-min-heap> >>nonmatching
- 0 >>max-depth
- 0 >>count
- 1/0. >>max-count
- H{ } clone >>spidered ;
+ over >>currently-spidering
+ swap 0 <unique-deque> [ push-url ] keep >>todo
+ <unique-deque> >>nonmatching
+ H{ } clone >>spidered
+ 1 <semaphore> >>semaphore ;
+
+: <spider-result> ( url depth -- spider-result )
+ spider-result new
+ swap >>depth
+ swap >>url ; inline
<PRIVATE
: apply-filters ( links spider -- links' )
- filters>> [ '[ _ 1&& ] filter ] when* ;
+ filters>> [
+ '[ [ _ 1&& ] filter ] call( seq -- seq' )
+ ] when* ;
-: push-links ( links level assoc-heap -- )
- '[ _ _ heap-push ] each ;
+: push-links ( links level unique-deque -- )
+ '[ _ _ push-url ] each ;
: add-todo ( links level spider -- )
todo>> push-links ;
: add-nonmatching ( links level spider -- )
nonmatching>> push-links ;
-: filter-base ( spider spider-result -- base-links nonmatching-links )
- [ base>> host>> ] [ links>> prune ] bi*
+: filter-base-links ( spider spider-result -- base-links nonmatching-links )
+ [ base>> host>> ] [ links>> members ] bi*
[ host>> = ] with partition ;
-: add-spidered ( spider spider-result -- )
- [ [ 1+ ] change-count ] dip
- 2dup [ spidered>> ] [ dup url>> ] bi* rot set-at
- [ filter-base ] 2keep
- depth>> 1+ swap
- [ add-nonmatching ]
- [ tuck [ apply-filters ] 2dip add-todo ] 2bi ;
+:: add-spidered ( spider spider-result -- )
+ spider [ 1 + ] change-count drop
+
+ spider-result dup url>>
+ spider spidered>> set-at
+
+ spider spider-result filter-base-links :> ( matching nonmatching )
+ spider-result depth>> 1 + :> depth
+
+ nonmatching depth spider add-nonmatching
-: normalize-hrefs ( links -- links' )
- [ >url ] map
- spider get base>> swap [ derive-url ] with map ;
+ matching spider apply-filters depth spider add-todo ;
-: print-spidering ( url depth -- )
+: normalize-hrefs ( base links -- links' )
+ [ derive-url ] with map ;
+
+: print-spidering ( spider-result -- )
+ [ url>> ] [ depth>> ] bi
"depth: " write number>string write
", spidering: " write . yield ;
-: (spider-page) ( url depth -- spider-result )
- f pick spider get spidered>> set-at
- over '[ _ http-get ] benchmark swap
- [ parse-html dup find-hrefs normalize-hrefs ] benchmark
- now spider-result boa ;
-
-: spider-page ( url depth -- )
- spider get quiet>> [ 2dup print-spidering ] unless
- (spider-page)
- spider get [ quiet>> [ dup describe ] unless ]
- [ swap add-spidered ] bi ;
+: url-html? ( url -- ? )
+ path>> file-extension { ".htm" ".html" f } member? ;
+
+:: fill-spidered-result ( spider spider-result -- )
+ spider-result url>> :> url
+ f url spider spidered>> set-at
+ [ url http-get ] benchmark :> ( headers html fetched-in )
+ [
+ url url-html? [
+ html parse-html
+ spider currently-spidering>>
+ over find-all-links normalize-hrefs
+ ] [
+ f { }
+ ] if
+ ] benchmark :> ( parsed-html links processed-in )
+ spider-result
+ headers >>headers
+ fetched-in >>fetched-in
+ parsed-html >>parsed-html
+ links >>links
+ processed-in >>processed-in
+ now >>fetched-at drop ;
+
+:: spider-page ( spider spider-result -- )
+ spider quiet?>> [ spider-result print-spidering ] unless
+ spider spider-result fill-spidered-result
+ spider quiet?>> [ spider-result describe ] unless
+ spider spider-result add-spidered ;
\ spider-page ERROR add-error-logging
-: spider-sleep ( -- )
- spider get sleep>> [ sleep ] when* ;
+: spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
: queue-initial-links ( spider -- spider )
- [ initial-links>> normalize-hrefs 0 ] keep
- [ add-todo ] keep ;
+ [ [ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0 ]
+ [ add-todo ]
+ [ ] tri ;
+
+: spider-page? ( spider -- ? )
+ {
+ [ todo>> deque>> deque-empty? not ]
+ [ [ todo>> peek-url depth>> ] [ max-depth>> ] bi <= ]
+ [ [ count>> ] [ max-count>> ] bi < ]
+ } 1&& ;
-: slurp-heap-while ( heap quot1 quot2: ( value key -- ) -- )
- pick heap-empty? [ 3drop ] [
- [ [ heap-pop dup ] 2dip slip [ t ] compose [ 2drop f ] if ]
- [ roll [ slurp-heap-while ] [ 3drop ] if ] 3bi
- ] if ; inline recursive
+: setup-next-url ( spider -- spider spider-result )
+ dup todo>> peek-url url>> >>currently-spidering
+ dup todo>> pop-url [ url>> ] [ depth>> ] bi <spider-result> ;
+
+: spider-next-page ( spider -- )
+ setup-next-url
+ spider-page ;
PRIVATE>
+: run-spider-loop ( spider -- )
+ dup spider-page? [
+ [ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
+ ] [
+ drop
+ ] if ;
+
: run-spider ( spider -- spider )
"spider" [
- dup spider [
- queue-initial-links
- [ todo>> ] [ max-depth>> ] bi
- '[
- _ <= spider get
- [ count>> ] [ max-count>> ] bi < and
- ] [ spider-page spider-sleep ] slurp-heap-while
- spider get
- ] with-variable
+ queue-initial-links
+ [ run-spider-loop ] keep
] with-logging ;