1 ! Copyright (C) 2009 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors arrays assocs calendar.parser combinators
4 combinators.short-circuit globs http.client kernel math.parser
5 namespaces present regexp regexp.combinators sequences splitting
6 splitting.monotonic unicode urls ;
9 ! visit-time is GMT, request-rate is pages/second
10 ! crawl-rate is seconds
12 SYMBOL: robot-identities
13 robot-identities [ { "FactorSpider" } ] initialize
15 TUPLE: robots site sitemap rules rules-quot ;
17 : <robots> ( site sitemap rules -- robots )
23 TUPLE: rules user-agents allows disallows
24 visit-time request-rate crawl-delay unknowns ;
28 : >robots.txt-url ( url -- url' )
29 >url URL" robots.txt" derive-url ;
31 : get-robots.txt ( url -- robots.txt )
32 >robots.txt-url http-get nip ;
34 : normalize-robots.txt ( string -- sitemaps seq )
36 [ [ unicode:blank? ] trim ] map
37 [ "#" head? ] reject harvest
38 [ ":" split1 [ [ unicode:blank? ] trim ] bi@ [ >lower ] dip ] { } map>assoc
39 [ first "sitemap" = ] partition [ values ] dip
42 [ [ first "user-agent" = ] both? ]
43 [ nip first "user-agent" = not ]
47 : <rules> ( -- rules )
49 V{ } clone >>user-agents
51 V{ } clone >>disallows
52 H{ } clone >>unknowns ;
54 : add-user-agent ( rules agent -- rules ) over user-agents>> push ;
55 : add-allow ( rules allow -- rules ) >url over allows>> push ;
56 : add-disallow ( rules disallow -- rules ) >url over disallows>> push ;
58 : parse-robots.txt-line ( rules seq -- rules )
60 { "user-agent" [ add-user-agent ] }
61 { "allow" [ add-allow ] }
62 { "disallow" [ add-disallow ] }
63 { "crawl-delay" [ string>number >>crawl-delay ] }
64 { "request-rate" [ string>number >>request-rate ] }
66 "visit-time" [ "-" split1 [ hhmm>duration ] bi@ 2array
69 [ pick unknowns>> push-at ]
72 : derive-urls ( url seq -- seq' )
73 [ derive-url present ] with { } map-as ;
75 : robot-rules-quot ( robots -- quot )
77 [ site>> ] [ rules>> allows>> ] bi
78 derive-urls [ <glob> ] map
81 [ site>> ] [ rules>> disallows>> ] bi
82 derive-urls [ <glob> ] map <and> <not>
83 ] bi 2array <or> '[ _ matches? ] ;
85 : relevant-rules ( robots -- rules )
88 robot-identities get [ swap glob-matches? ] with any?
94 : parse-robots.txt ( string -- sitemaps rules-seq )
95 normalize-robots.txt [
96 [ <rules> dup ] dip [ parse-robots.txt-line drop ] with each
99 : url>robots ( url -- robots )
100 >url dup get-robots.txt parse-robots.txt <robots> ;