1 ! Copyright (C) 2009 Doug Coleman.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: accessors http.client kernel unicode.categories
4 sequences urls splitting combinators splitting.monotonic
5 combinators.short-circuit assocs unicode.case arrays
6 math.parser calendar.format make ;
9 ! visit-time is GMT, request-rate is pages/second
10 ! crawl-rate is seconds
11 TUPLE: rules user-agents allows disallows
12 visit-time request-rate crawl-delay unknowns ;
16 : >robots.txt-url ( url -- url' )
17 >url URL" robots.txt" derive-url ;
19 : get-robots.txt ( url -- headers robots.txt )
20 >robots.txt-url http-get ;
22 : normalize-robots.txt ( string -- sitemaps seq )
24 [ [ blank? ] trim ] map
25 [ "#" head? not ] filter harvest
26 [ ":" split1 [ [ blank? ] trim ] bi@ [ >lower ] dip ] { } map>assoc
27 [ first "sitemap" = ] partition [ values ] dip
30 [ [ first "user-agent" = ] bi@ and ]
31 [ nip first "user-agent" = not ]
35 : <rules> ( -- rules )
37 V{ } clone >>user-agents
39 V{ } clone >>disallows
40 H{ } clone >>unknowns ;
42 : add-user-agent ( rules agent -- rules ) over user-agents>> push ;
43 : add-allow ( rules allow -- rules ) over allows>> push ;
44 : add-disallow ( rules disallow -- rules ) over disallows>> push ;
46 : parse-robots.txt-line ( rules seq -- rules )
48 { "user-agent" [ add-user-agent ] }
49 { "allow" [ add-allow ] }
50 { "disallow" [ add-disallow ] }
51 { "crawl-delay" [ string>number >>crawl-delay ] }
52 { "request-rate" [ string>number >>request-rate ] }
54 "visit-time" [ "-" split1 [ hhmm>timestamp ] bi@ 2array
57 [ pick unknowns>> push-at ]
62 : parse-robots.txt ( string -- sitemaps rules-seq )
63 normalize-robots.txt [
64 [ <rules> dup ] dip [ parse-robots.txt-line drop ] with each
67 : robots ( url -- sitemaps rules-seq )
68 get-robots.txt nip parse-robots.txt ;