! Copyright (C) 2008 Daniel Ehrenberg.
! See http://factorcode.org/license.txt for BSD license.
USING: accessors arrays assocs combinators
-combinators.short-circuit combinators.smart fry kernel locals make
+combinators.short-circuit combinators.smart fry kernel make
math math.order math.parser namespaces sequences
simple-flat-file splitting strings unicode.data ;
IN: unicode.collation
] map ;
: parse-keys ( string -- chars )
- " " split [ hex> ] "" map-as ;
+ split-words [ hex> ] "" map-as ;
: parse-ducet ( file -- ducet )
load-data-file [ [ parse-keys ] [ parse-weight ] bi* ] H{ } assoc-map-as ;
"vocab:unicode/UCA/allkeys.txt" parse-ducet ducet set-global
! https://www.unicode.org/reports/tr10/tr10-41.html#Well_Formed_DUCET
-: fixup-ducet ( -- )
+! WF5 - Well-formedness 5 condition:
+! https://www.unicode.org/reports/tr10/tr10-41.html#WF5
+! { "0CC6" "0CC2" "0CD5" } ! 0CD5 is not a non-starter, don't add 2-gram "0CC6" "0CC2"to ducet
+! { "0DD9" "0DCF" "0DCA" } ! already in allkeys.txt file
+! { "0FB2" "0F71" "0F80" } ! added below
+! { "0FB3" "0F71" "0F80" } ! added below
+! This breaks the unicode tests that ship in CollationTest_SHIFTED.txt
+! but it's supposedly more correct.
+: fixup-ducet-for-tibetan ( -- )
{
{
{ 0x0FB2 0x0F71 } ! CE(0FB2) CE(0F71)
}
} ducet get-global '[ swap >string _ set-at ] assoc-each ;
-! Add a few missing ducet values for Tibetan
+! These values actually break the collation unit tests in CollationTest_SHIFTED.txt
+! So we disable those tests in favor of supposedly better collation for Tibetan.
! https://www.unicode.org/reports/tr10/tr10-41.html#Well_Formed_DUCET
-fixup-ducet
+
+fixup-ducet-for-tibetan
: tangut-block? ( char -- ? )
- ! Tangut Block, Tangut Components Block
- { [ 0x17000 0x187FF between? ] [ 0x18800 0x18AFF between? ] } 1|| ; inline
+ {
+ [ 0x17000 0x18AFF between? ] ! Tangut and Tangut Components
+ [ 0x18D00 0x18D8F between? ] ! Tangut Supplement
+ } 1|| ; inline
: nushu-block? ( char -- ? )
- 0x1b170 0x1B2FB between? ; inline
+ 0x1b170 0x1B2FF between? ; inline
+
+: khitan-block? ( char -- ? )
+ 0x18b00 0x18cff between? ; inline
! https://wiki.computercraft.cc/Module:Unicode_data
! Unicode TR10 - Computing Implicit Weights
: base ( char -- base )
{
- { [ dup 0x03400 0x04DB5 between? ] [ drop 0xFB80 ] } ! Extension A
- { [ dup 0x20000 0x2A6D6 between? ] [ drop 0xFB80 ] } ! Extension B
- { [ dup 0x2A700 0x2B734 between? ] [ drop 0xFB80 ] } ! Extension C
+ { [ dup 0x03400 0x04DBF between? ] [ drop 0xFB80 ] } ! Extension A
+ { [ dup 0x20000 0x2A6DF between? ] [ drop 0xFB80 ] } ! Extension B
+ { [ dup 0x2A700 0x2B739 between? ] [ drop 0xFB80 ] } ! Extension C
{ [ dup 0x2B740 0x2B81D between? ] [ drop 0xFB80 ] } ! Extension D
{ [ dup 0x2B820 0x2CEA1 between? ] [ drop 0xFB80 ] } ! Extension E
{ [ dup 0x2CEB0 0x2EBE0 between? ] [ drop 0xFB80 ] } ! Extension F
- { [ dup 0x04E00 0x09FEF between? ] [ drop 0xFB40 ] } ! CJK
+ { [ dup 0x30000 0x3134A between? ] [ drop 0xFB80 ] } ! Extension G
+ { [ dup 0x31350 0x323AF between? ] [ drop 0xFB80 ] } ! Extension H
+ { [ dup 0x2F800 0x2FA1D between? ] [ drop 0xFB80 ] } ! CJK Compatibility
+ { [ dup 0x04E00 0x09FFF between? ] [ drop 0xFB40 ] } ! CJK
+ { [ dup 0x0F900 0x0FAD9 between? ] [ drop 0xFB40 ] } ! CJK
[ drop 0xFBC0 ] ! Other
} cond ;
: nushu-BBBB ( char -- weight-levels )
0x1B170 - 0x8000 bitor 0 0 <weight-levels> ; inline
+: khitan-AAAA ( char -- weight-levels )
+ drop 0xfb02 0x0020 0x0002 <weight-levels> ; inline
+
+: khitan-BBBB ( char -- weight-levels )
+ 0x18b00 - 0x8000 bitor 0 0 <weight-levels> ; inline
+
: AAAA ( char -- weight-levels )
[ base ] [ -15 shift ] bi + 0x0020 0x0002 <weight-levels> ; inline
{
{ [ dup tangut-block? ] [ [ tangut-AAAA ] [ tangut-BBBB ] bi 2array ] }
{ [ dup nushu-block? ] [ [ nushu-AAAA ] [ nushu-BBBB ] bi 2array ] }
+ { [ dup khitan-block? ] [ [ khitan-AAAA ] [ khitan-BBBB ] bi 2array ] }
[ [ AAAA ] [ BBBB ] bi 2array ]
} cond ;
[ [ primary>> ] append-weights { 0 } ]
[ [ secondary>> ] append-weights { 0 } ]
[ [ tertiary>> ] append-weights { 0 } ]
- [ [ [ secondary>> ] [ tertiary>> ] bi [ zero? ] bi@ and not ] filter [ variable-weight ] map ]
+ [ [ [ secondary>> ] [ tertiary>> ] bi [ zero? ] both? ] reject [ variable-weight ] map ]
} cleave
] { } append-outputs-as ;