1 ! Copyright (C) 2011-2012 John Benediktsson
2 ! See http://factorcode.org/license.txt for BSD license
4 USING: accessors assocs calendar classes.tuple colors.constants
5 colors.hex combinators formatting http.client io io.styles json
6 json.reader kernel make math math.statistics sequences urls
13 TUPLE: comment approved_by author author_flair_css_class
14 author_flair_text banned_by body body_html created created_utc
15 downs id levenshtein likes link_id link_title name num_reports
16 parent_id replies edited subreddit subreddit_id ups ;
18 TUPLE: user comment_karma created created_utc has_mail
19 has_mod_mail id is_gold is_mod link_karma name ;
21 TUPLE: story author author_flair_css_class author_flair_text
22 approved_by banned_by clicked created created_utc domain downs
23 hidden id is_self levenshtein likes link_flair_css_class
24 link_flair_text media media_embed name edited num_comments
25 num_reports over_18 permalink saved score selftext selftext_html
26 subreddit subreddit_id thumbnail title ups url ;
28 TUPLE: subreddit accounts_active created created_utc description
29 display_name id header_img header_size header_title name over18
30 public_description subscribers title url ;
32 : parse-data ( assoc -- obj )
33 [ "data" of ] [ "kind" of ] bi {
37 { "t5" [ subreddit ] }
41 TUPLE: page url data before after ;
43 : json-page ( url -- page )
44 >url dup http-get nip json> "data" of {
45 [ "children" of [ parse-data ] map ]
46 [ "before" of [ f ] when-json-null ]
47 [ "after" of [ f ] when-json-null ]
50 : get-user ( username -- page )
51 "http://api.reddit.com/user/%s" sprintf json-page ;
53 : get-user-info ( username -- user )
54 "http://api.reddit.com/user/%s/about" sprintf
55 http-get nip json> parse-data ;
57 : get-url-info ( url -- page )
58 "http://api.reddit.com/api/info?url=%s" sprintf json-page ;
60 : search-reddit ( query -- page )
61 "http://api.reddit.com/search?q=%s" sprintf json-page ;
63 : search-subreddits ( query -- page )
64 "http://api.reddit.com/reddits/search?q=%s" sprintf json-page ;
66 : get-domains ( query -- page )
67 "http://api.reddit.com/domain/%s" sprintf json-page ;
69 : get-subreddit ( subreddit -- page )
70 "http://api.reddit.com/r/%s" sprintf json-page ;
72 : next-page ( page -- page' )
73 [ url>> ] [ after>> "after" set-query-param ] bi json-page ;
75 : all-pages ( page -- data )
77 [ [ data>> , ] [ dup after>> ] bi ]
78 [ next-page ] while drop
83 : user-links ( username -- stories )
84 get-user data>> [ story? ] filter [ url>> ] map ;
86 : user-comments ( username -- comments )
87 get-user data>> [ comment? ] filter [ body>> ] map ;
89 : user-karma ( username -- karma )
90 get-user-info link_karma>> ;
92 : url-score ( url -- score )
93 get-url-info data>> [ score>> ] map-sum ;
95 : subreddit-links ( subreddit -- links )
96 get-subreddit data>> [ url>> ] map ;
98 : story>comments-url ( story -- url )
99 permalink>> "http://reddit.com" prepend >url ;
101 : story>author-url ( story -- url )
102 author>> "http://reddit.com/user/" prepend >url ;
106 : write-title ( title url -- )
109 COLOR: blue foreground ,,
112 : write-link ( title url -- )
115 HEXCOLOR: 888888 foreground ,,
118 : write-text ( str -- )
119 H{ { foreground HEXCOLOR: 888888 } } format ;
123 : subreddit. ( subreddit -- )
124 get-subreddit data>> [
125 1 + "%2d. " sprintf write-text {
126 [ [ title>> ] [ url>> ] bi write-title ]
127 [ domain>> " (%s)\n" sprintf write-text ]
128 [ score>> " %d points, " sprintf write-text ]
129 [ [ num_comments>> "%d comments" sprintf ] [ story>comments-url ] bi write-link ]
131 created_utc>> unix-time>timestamp now swap time-
132 duration>hours ", posted %d hours ago" sprintf write-text
134 [ " by " write-text [ author>> ] [ story>author-url ] bi write-link nl nl ]
138 : domain-stats ( domain -- stats )
139 get-domains all-pages [
140 created>> 1000 * millis>timestamp year>>
141 ] collect-by [ [ score>> ] map-sum ] assoc-map ;