1 ! Copyright (C) 2009 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors arrays assocs calendar.format combinators
4 combinators.short-circuit fry globs http.client kernel make
5 math.parser multiline namespaces present regexp
6 regexp.combinators sequences sets splitting splitting.monotonic
7 unicode.case unicode.categories urls ;
10 ! visit-time is GMT, request-rate is pages/second
11 ! crawl-rate is seconds
13 SYMBOL: robot-identities
14 robot-identities [ { "FactorSpider" } ] initialize
16 TUPLE: robots site sitemap rules rules-quot ;
18 : <robots> ( site sitemap rules -- robots )
24 TUPLE: rules user-agents allows disallows
25 visit-time request-rate crawl-delay unknowns ;
29 : >robots.txt-url ( url -- url' )
30 >url URL" robots.txt" derive-url ;
32 : get-robots.txt ( url -- robots.txt )
33 >robots.txt-url http-get nip ;
35 : normalize-robots.txt ( string -- sitemaps seq )
37 [ [ blank? ] trim ] map
38 [ "#" head? ] reject harvest
39 [ ":" split1 [ [ blank? ] trim ] bi@ [ >lower ] dip ] { } map>assoc
40 [ first "sitemap" = ] partition [ values ] dip
43 [ [ first "user-agent" = ] both? ]
44 [ nip first "user-agent" = not ]
48 : <rules> ( -- rules )
50 V{ } clone >>user-agents
52 V{ } clone >>disallows
53 H{ } clone >>unknowns ;
55 : add-user-agent ( rules agent -- rules ) over user-agents>> push ;
56 : add-allow ( rules allow -- rules ) >url over allows>> push ;
57 : add-disallow ( rules disallow -- rules ) >url over disallows>> push ;
59 : parse-robots.txt-line ( rules seq -- rules )
61 { "user-agent" [ add-user-agent ] }
62 { "allow" [ add-allow ] }
63 { "disallow" [ add-disallow ] }
64 { "crawl-delay" [ string>number >>crawl-delay ] }
65 { "request-rate" [ string>number >>request-rate ] }
67 "visit-time" [ "-" split1 [ hhmm>timestamp ] bi@ 2array
70 [ pick unknowns>> push-at ]
73 : derive-urls ( url seq -- seq' )
74 [ derive-url present ] with { } map-as ;
76 : robot-rules-quot ( robots -- quot )
78 [ site>> ] [ rules>> allows>> ] bi
79 derive-urls [ <glob> ] map
82 [ site>> ] [ rules>> disallows>> ] bi
83 derive-urls [ <glob> ] map <and> <not>
84 ] bi 2array <or> '[ _ matches? ] ;
86 : relevant-rules ( robots -- rules )
89 robot-identities get [ swap glob-matches? ] with any?
95 : parse-robots.txt ( string -- sitemaps rules-seq )
96 normalize-robots.txt [
97 [ <rules> dup ] dip [ parse-robots.txt-line drop ] with each
100 : url>robots ( url -- robots )
101 >url dup get-robots.txt parse-robots.txt <robots> ;