1 USING: combinators.short-circuit assocs math kernel sequences
2 io.files hashtables quotations splitting grouping arrays
3 math.parser hash2 math.order byte-arrays words namespaces words
4 compiler.units parser io.encodings.ascii values interval-maps
5 ascii sets combinators locals math.ranges sorting ;
14 VALUE: compatibility-map
20 : canonical-entry ( char -- seq ) canonical-map at ;
21 : combine-chars ( a b -- char/f ) combine-map hash2 ;
22 : compatibility-entry ( char -- seq ) compatibility-map at ;
23 : combining-class ( char -- n ) class-map at ;
24 : non-starter? ( char -- ? ) class-map key? ;
25 : name>char ( string -- char ) name-map at ;
26 : char>name ( char -- string ) name-map value-at ;
27 : property? ( char property -- ? ) properties at interval-key? ;
29 ! Convenience functions
30 : ?between? ( n/f from to -- ? )
31 pick [ between? ] [ 3drop f ] if ;
33 ! Loading data from UnicodeData.txt
35 : split-; ( line -- array )
36 ";" split [ [ blank? ] trim ] map ;
38 : data ( filename -- data )
39 ascii file-lines [ split-; ] map ;
41 : load-data ( -- data )
42 "resource:basis/unicode/data/UnicodeData.txt" data ;
44 : filter-comments ( lines -- lines )
45 [ "#@" split first ] map harvest ;
47 : (process-data) ( index data -- newdata )
49 [ [ nth ] keep first swap ] with { } map>assoc
50 [ >r hex> r> ] assoc-map ;
52 : process-data ( index data -- hash )
53 (process-data) [ hex> ] assoc-map [ nip ] assoc-filter >hashtable ;
55 : (chain-decomposed) ( hash value -- newvalue )
58 [ (chain-decomposed) ] [ 1array nip ] ?if
61 : chain-decomposed ( hash -- newhash )
62 dup [ swap (chain-decomposed) ] curry assoc-map ;
65 second { [ empty? ] [ first ] } 1|| ;
67 : (process-decomposed) ( data -- alist )
69 [ " " split [ hex> ] map ] assoc-map ;
71 : process-canonical ( data -- hash2 hash )
72 (process-decomposed) [ first* ] filter
74 [ second length 2 = ] filter
75 ! using 1009 as the size, the maximum load is 4
76 [ first2 first2 rot 3array ] map 1009 alist>hash2
77 ] [ >hashtable chain-decomposed ] bi ;
79 : process-compatibility ( data -- hash )
81 [ dup first* [ first2 rest 2array ] unless ] map
82 [ second empty? not ] filter
83 >hashtable chain-decomposed ;
85 : process-combining ( data -- hash )
87 [ string>number ] assoc-map
88 [ nip zero? not ] assoc-filter
91 : categories ( -- names )
92 ! For non-existent characters, use Cn
94 "Lu" "Ll" "Lt" "Lm" "Lo"
97 "Pc" "Pd" "Ps" "Pe" "Pi" "Pf" "Po"
100 "Cc" "Cf" "Cs" "Co" } ;
102 : num-chars HEX: 2FA1E ;
103 ! the maximum unicode char in the first 3 planes
105 : ?set-nth ( val index seq -- )
106 2dup bounds-check? [ set-nth ] [ 3drop ] if ;
108 :: fill-ranges ( table -- table )
109 name-map >alist sort-values keys
110 [ { [ "first>" tail? ] [ "last>" tail? ] } 1|| ] filter
112 [ name>char ] bi@ [ [a,b] ] [ table ?nth ] bi
113 [ swap table ?set-nth ] curry each
116 :: process-category ( data -- category-listing )
117 [let | table [ num-chars <byte-array> ] |
118 2 data (process-data) [| char cat |
119 cat categories index char table ?set-nth
120 ] assoc-each table fill-ranges ] ;
122 : ascii-lower ( string -- lower )
123 [ dup CHAR: A CHAR: Z between? [ HEX: 20 + ] when ] map ;
125 : process-names ( data -- names-hash )
126 1 swap (process-data) [
127 ascii-lower { { CHAR: \s CHAR: - } } substitute swap
128 ] H{ } assoc-map-as ;
130 : multihex ( hexstring -- string )
131 " " split [ hex> ] map sift ;
133 TUPLE: code-point lower title upper ;
135 C: <code-point> code-point
137 : set-code-point ( seq -- )
138 4 head [ multihex ] map first4
139 <code-point> swap first set ;
142 : properties-lines ( -- lines )
143 "resource:basis/unicode/data/PropList.txt"
146 : parse-properties ( -- {{[a,b],prop}} )
147 properties-lines filter-comments [
149 [ ".." split1 [ dup ] unless* [ hex> ] bi@ 2array ] dip
152 : properties>intervals ( properties -- assoc[str,interval] )
153 dup values prune [ f ] H{ } map>assoc
154 [ [ push-at ] curry assoc-each ] keep
155 [ <interval-set> ] assoc-map ;
157 : load-properties ( -- assoc )
158 parse-properties properties>intervals ;
160 ! Special casing data
161 : load-special-casing ( -- special-casing )
162 "resource:basis/unicode/data/SpecialCasing.txt" data
163 [ length 5 = ] filter
164 [ [ set-code-point ] each ] H{ } make-assoc ;
167 [ process-names to: name-map ]
168 [ 13 swap process-data to: simple-lower ]
169 [ 12 swap process-data to: simple-upper ]
170 [ 14 swap process-data simple-upper assoc-union to: simple-title ]
171 [ process-combining to: class-map ]
172 [ process-canonical to: canonical-map to: combine-map ]
173 [ process-compatibility to: compatibility-map ]
174 [ process-category to: category-map ]
177 load-special-casing to: special-casing
179 load-properties to: properties