1 ! Copyright (C) 2008, 2009 Daniel Ehrenberg.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: combinators.short-circuit assocs math kernel sequences
4 io.files hashtables quotations splitting grouping arrays io
5 math.parser hash2 math.order byte-arrays namespaces
6 compiler.units parser io.encodings.ascii values interval-maps
7 ascii sets combinators locals math.ranges sorting make
8 strings.parser io.encodings.utf8 memoize simple-flat-file ;
9 FROM: namespaces => set ;
20 VALUE: compatibility-map
29 : canonical-entry ( char -- seq ) canonical-map at ; inline
30 : combine-chars ( a b -- char/f ) combine-map hash2 ; inline
31 : compatibility-entry ( char -- seq ) compatibility-map at ; inline
32 : combining-class ( char -- n ) class-map at ; inline
33 : non-starter? ( char -- ? ) combining-class { 0 f } member? not ; inline
34 : name>char ( name -- char ) name-map at ; inline
35 : char>name ( char -- name ) name-map value-at ; inline
36 : property? ( char property -- ? ) properties at interval-key? ; inline
37 : ch>lower ( ch -- lower ) simple-lower ?at drop ; inline
38 : ch>upper ( ch -- upper ) simple-upper ?at drop ; inline
39 : ch>title ( ch -- title ) simple-title ?at drop ; inline
40 : special-case ( ch -- casing-tuple ) special-casing at ; inline
42 ! For non-existent characters, use Cn
45 "Lu" "Ll" "Lt" "Lm" "Lo"
48 "Pc" "Pd" "Ps" "Pe" "Pi" "Pf" "Po"
55 MEMO: categories-map ( -- hashtable )
56 categories <enum> [ swap ] H{ } assoc-map-as ;
58 CONSTANT: num-chars HEX: 2FA1E
62 : category# ( char -- n )
63 ! There are a few characters that should be Cn
64 ! that this gives Cf or Mn
65 ! Cf = 26; Mn = 5; Cn = 29
66 ! Use a compressed array instead?
67 dup category-map ?nth [ ] [
68 dup HEX: E0001 HEX: E007F between?
70 HEX: E0100 HEX: E01EF between? 5 29 ?
74 : category ( char -- category )
75 category# categories nth ;
79 ! Loading data from UnicodeData.txt
81 : load-data ( -- data )
82 "vocab:unicode/data/UnicodeData.txt" data ;
84 : (process-data) ( index data -- newdata )
85 [ [ nth ] keep first swap ] with { } map>assoc
86 [ [ hex> ] dip ] assoc-map ;
88 : process-data ( index data -- hash )
89 (process-data) [ hex> ] assoc-map [ nip ] assoc-filter >hashtable ;
91 : (chain-decomposed) ( hash value -- newvalue )
94 [ (chain-decomposed) ] [ 1array nip ] ?if
97 : chain-decomposed ( hash -- newhash )
98 dup [ swap (chain-decomposed) ] curry assoc-map ;
100 : first* ( seq -- ? )
101 second { [ empty? ] [ first ] } 1|| ;
103 : (process-decomposed) ( data -- alist )
104 5 swap (process-data)
105 [ " " split [ hex> ] map ] assoc-map ;
107 : exclusions-file ( -- filename )
108 "vocab:unicode/data/CompositionExclusions.txt" ;
110 : exclusions ( -- set )
111 exclusions-file utf8 file-lines
112 [ "#" split1 drop [ blank? ] trim-tail hex> ] map
115 : remove-exclusions ( alist -- alist )
116 exclusions [ dup ] H{ } map>assoc assoc-diff ;
118 : process-canonical ( data -- hash2 hash )
119 (process-decomposed) [ first* ] filter
121 [ second length 2 = ] filter remove-exclusions
122 ! using 1009 as the size, the maximum load is 4
123 [ first2 first2 rot 3array ] map 1009 alist>hash2
124 ] [ >hashtable chain-decomposed ] bi ;
126 : process-compatibility ( data -- hash )
128 [ dup first* [ first2 rest 2array ] unless ] map
129 [ second empty? not ] filter
130 >hashtable chain-decomposed ;
132 : process-combining ( data -- hash )
133 3 swap (process-data)
134 [ string>number ] assoc-map
135 [ nip zero? not ] assoc-filter
138 ! the maximum unicode char in the first 3 planes
140 : ?set-nth ( val index seq -- )
141 2dup bounds-check? [ set-nth ] [ 3drop ] if ;
143 :: fill-ranges ( table -- table )
144 name-map >alist sort-values keys
145 [ { [ "first>" tail? ] [ "last>" tail? ] } 1|| ] filter
147 [ name>char ] bi@ [ [a,b] ] [ table ?nth ] bi
148 [ swap table ?set-nth ] curry each
151 :: process-category ( data -- category-listing )
152 num-chars <byte-array> :> table
153 2 data (process-data) [| char cat |
154 cat categories-map at char table ?set-nth
155 ] assoc-each table fill-ranges ;
157 : process-names ( data -- names-hash )
158 1 swap (process-data) [
159 >lower { { CHAR: \s CHAR: - } } substitute swap
160 ] H{ } assoc-map-as ;
162 : multihex ( hexstring -- string )
163 " " split [ hex> ] map sift ;
167 TUPLE: code-point lower title upper ;
169 C: <code-point> code-point
173 : set-code-point ( seq -- )
174 4 head [ multihex ] map first4
175 <code-point> swap first set ;
178 : parse-properties ( -- {{[a,b],prop}} )
179 "vocab:unicode/data/PropList.txt" data [
181 ".." split1 [ dup ] unless*
186 : properties>intervals ( properties -- assoc[str,interval] )
187 dup values members [ f ] H{ } map>assoc
188 [ [ push-at ] curry assoc-each ] keep
189 [ <interval-set> ] assoc-map ;
191 : load-properties ( -- assoc )
192 parse-properties properties>intervals ;
194 ! Special casing data
195 : load-special-casing ( -- special-casing )
196 "vocab:unicode/data/SpecialCasing.txt" data
197 [ length 5 = ] filter
198 [ [ set-code-point ] each ] H{ } make-assoc ;
201 [ process-names to: name-map ]
202 [ 13 swap process-data to: simple-lower ]
203 [ 12 swap process-data to: simple-upper ]
204 [ 14 swap process-data simple-upper assoc-union to: simple-title ]
205 [ process-combining to: class-map ]
206 [ process-canonical to: canonical-map to: combine-map ]
207 [ process-compatibility to: compatibility-map ]
208 [ process-category to: category-map ]
211 : postprocess-class ( -- )
212 combine-map [ values ] map concat
213 [ combining-class not ] filter
214 [ 0 swap class-map set-at ] each ;
218 load-special-casing to: special-casing
220 load-properties to: properties
222 [ name>char [ "Invalid character" throw ] unless* ]
223 name>char-hook set-global