! See http://factorcode.org/license.txt for BSD license.
USING: tools.test core-text core-text.fonts core-foundation
core-foundation.dictionaries destructors arrays kernel generalizations
-math accessors core-foundation.utilities combinators hashtables colors
+locals math accessors core-foundation.utilities combinators hashtables colors
colors.constants ;
IN: core-text.tests
] with-destructors
] unit-test
-: test-typographic-bounds ( string font -- ? )
+:: test-typographic-bounds ( string font -- ? )
[
- test-font &CFRelease tuck COLOR: white <CTLine> &CFRelease
- compute-line-metrics {
+ font test-font &CFRelease :> ctfont
+ string ctfont COLOR: white <CTLine> &CFRelease :> ctline
+ ctfont ctline compute-line-metrics {
[ width>> float? ]
[ ascent>> float? ]
[ descent>> float? ]
[ t ] [ "Hello world" "Chicago" test-typographic-bounds ] unit-test
-[ t ] [ "日本語" "Helvetica" test-typographic-bounds ] unit-test
\ No newline at end of file
+[ t ] [ "日本語" "Helvetica" test-typographic-bounds ] unit-test
"can write csv too!"
[ "foo1,bar1\nfoo2,bar2\n" ]
-[ { { "foo1" "bar1" } { "foo2" "bar2" } } <string-writer> tuck write-csv >string ] named-unit-test
+[ { { "foo1" "bar1" } { "foo2" "bar2" } } <string-writer> [ write-csv ] keep >string ] named-unit-test
+
"escapes quotes commas and newlines when writing"
[ "\"fo\"\"o1\",bar1\n\"fo\no2\",\"b,ar2\"\n" ]
-[ { { "fo\"o1" "bar1" } { "fo\no2" "b,ar2" } } <string-writer> tuck write-csv >string ] named-unit-test ! "
+[ { { "fo\"o1" "bar1" } { "fo\no2" "b,ar2" } } <string-writer> [ write-csv ] keep >string ] named-unit-test ! "
[ { { "writing" "some" "csv" "tests" } } ]
[
io.files kernel math math.parser namespaces prettyprint fry
sequences strings classes.tuple alien.c-types continuations
db.sqlite.lib db.sqlite.ffi db.tuples words db.types combinators
-math.intervals io nmake accessors vectors math.ranges random
+math.intervals io locals nmake accessors vectors math.ranges random
math.bitwise db.queries destructors db.tuples.private interpolate
io.streams.string make db.private sequences.deep
db.errors.sqlite ;
nip [ key>> ] [ value>> ] [ type>> ] tri
<sqlite-low-level-binding> ;
-M: generator-bind sqlite-bind-conversion ( tuple generate-bind -- array )
- tuck
- [ generator-singleton>> eval-generator tuck ] [ slot-name>> ] bi
- rot set-slot-named
- [ [ key>> ] [ type>> ] bi ] dip
- swap <sqlite-low-level-binding> ;
+M:: generator-bind sqlite-bind-conversion ( tuple generate-bind -- array )
+ generate-bind generator-singleton>> eval-generator :> obj
+ generate-bind slot-name>> :> name
+ obj name tuple set-slot-named
+ generate-bind key>> obj generate-bind type>> <sqlite-low-level-binding> ;
M: sqlite-statement bind-tuple ( tuple statement -- )
[
{ { $link literalize } { $snippet ": literalize '[ _ ] ;" } }\r
{ { $link curry } { $snippet ": curry '[ _ @ ] ;" } }\r
{ { $link compose } { $snippet ": compose '[ @ @ ] ;" } }\r
- { { $link bi@ } { $snippet ": bi@ tuck '[ _ @ _ @ ] call ;" } }\r
} ;\r
\r
ARTICLE: "fry.philosophy" "Fried quotation philosophy"\r
get-controllers [ product-id = ] with filter ;
: find-controller-instance ( product-id instance-id -- controller/f )
get-controllers [
- tuck
[ product-id = ]
- [ instance-id = ] 2bi* and
+ [ instance-id = ] bi-curry bi* and
] with with find nip ;
TUPLE: keyboard-state keys ;
binary [
[
{ HEX: FF } read-until
- read1 tuck HEX: 00 = and
+ read1 [ HEX: 00 = and ] keep swap
]
[ drop ] produce
swap >marker { EOI } assert=
[ length ] dip buffer-reset ;
: string>buffer ( string -- buffer )
- dup length <buffer> tuck buffer-set ;
+ dup length <buffer> [ buffer-set ] keep ;
: buffer-read-all ( buffer -- byte-array )
[ [ pos>> ] [ ptr>> ] bi <displaced-alien> ]
M: winnt file-system-info ( path -- file-system-info )
normalize-path root-directory (file-system-info) ;
-: volume>paths ( string -- array )
- 16384 <ushort-array> tuck dup length
- 0 <uint> dup [ GetVolumePathNamesForVolumeName 0 = ] dip swap [
- win32-error-string throw
+:: volume>paths ( string -- array )
+ 16384 :> names-buf-length
+ names-buf-length <ushort-array> :> names
+ 0 <uint> :> names-length
+
+ string names names-buf-length names-length GetVolumePathNamesForVolumeName :> ret
+ ret 0 = [
+ ret win32-error-string throw
] [
- *uint "ushort" heap-size * head
+ names names-length *uint "ushort" heap-size * head
utf16n alien>string CHAR: \0 split
] if ;
FindFirstVolume dup win32-error=0/f
[ utf16n alien>string ] dip ;
-: find-next-volume ( handle -- string/f )
- MAX_PATH 1 + [ <ushort-array> tuck ] keep
- FindNextVolume 0 = [
+:: find-next-volume ( handle -- string/f )
+ MAX_PATH 1 + :> buf-length
+ buf-length <ushort-array> :> buf
+
+ handle buf buf-length FindNextVolume :> ret
+ ret 0 = [
GetLastError ERROR_NO_MORE_FILES =
[ drop f ] [ win32-error-string throw ] if
] [
- utf16n alien>string
+ buf utf16n alien>string
] if ;
: find-volumes ( -- array )
current-directory get absolute-path cd
dup make-CreateProcess-args
- tuck fill-redirection
+ [ fill-redirection ] keep
dup call-CreateProcess
lpProcessInformation>>
] with-destructors ;
cons>> car ;
M: lazy-until cdr ( lazy-until -- cdr )
- [ cons>> unswons ] keep quot>> tuck call( elt -- ? )
+ [ quot>> ] [ cons>> unswons ] bi over call( elt -- ? )
[ 2drop nil ] [ luntil ] if ;
M: lazy-until nil? ( lazy-until -- ? )
initial-values [ over 0 > ] [ next-values ] produce
[ 3drop ] dip ;
-: combination-indices ( m combo -- seq )
- [ tuck dual-index combinadic ] keep
- seq>> length 1 - swap [ - ] with map ;
+:: combination-indices ( m combo -- seq )
+ combo m combo dual-index combinadic
+ combo seq>> length 1 - swap [ - ] with map ;
: apply-combination ( m combo -- seq )
[ combination-indices ] keep seq>> nths ;
[ t ] [ 1 2 [a,b] dup empty-interval interval-union = ] unit-test
-[ t ] [ empty-interval 1 2 [a,b] tuck interval-union = ] unit-test
+[ t ] [ 1 2 [a,b] empty-interval over interval-union = ] unit-test
[ t ] [
0 1 (a,b) 0 1 [a,b] interval-union 0 1 [a,b] =
dup full-interval eq? [
drop 32 random-bits 31 2^ -
] [
- dup to>> first over from>> first tuck - random +
+ [ ] [ from>> first ] [ to>> first ] tri over - random +
2dup swap interval-contains? [
nip
] [
[ 2array ] [ drop level>> 1 + ] 2bi node boa ;
: new-child ( new-child node -- node' expansion/f )
- dup full? [ tuck level>> 1node ] [ node-add f ] if ;
+ dup full? [ [ level>> 1node ] keep swap ] [ node-add f ] if ;
: new-last ( val seq -- seq' )
[ length 1 - ] keep new-nth ;
dup level>> 1 = [
new-child
] [
- tuck children>> last (ppush-new-tail)
+ [ nip ] 2keep children>> last (ppush-new-tail)
[ swap new-child ] [ swap node-set-last f ] ?if
] if ;
] unless ;
: epsilon-table ( states nfa -- table )
- [ H{ } clone tuck ] dip
+ [ [ H{ } clone ] dip over ] dip
'[ _ _ t epsilon-loop ] each ;
: find-epsilon-closure ( states nfa -- dfa-state )
'[ _ delete-duplicates ] change-transitions ;
: combine-state-transitions ( hash -- hash )
- H{ } clone tuck '[
+ [ H{ } clone ] dip over '[
_ [ 2array <or-class> ] change-at
] assoc-each [ swap ] assoc-map ;
: <funky-slice> ( from/f to/f seq -- slice )
[
- tuck
- [ drop 0 or ] [ length or ] 2bi*
+ [ drop 0 or ] [ length or ] bi-curry bi*
[ min ] keep
] keep <slice> ; inline
[ main-file-string ] dip utf8 set-file-contents ;
: scaffold-main ( vocab-root vocab -- )
- tuck ".factor" vocab-root/vocab/suffix>path scaffolding? [
+ [ ".factor" vocab-root/vocab/suffix>path ] keep swap scaffolding? [
set-scaffold-main-file
] [
2drop
] [
[
[ children>> swap first head-slice % ]
- [ tuck traverse-step traverse-to-path ]
- 2bi
+ [ nip ]
+ [ traverse-step traverse-to-path ]
+ 2tri
] make-node
] if
] if ;
] [
[
[ traverse-step traverse-from-path ]
- [ tuck children>> swap first 1 + tail-slice % ] 2bi
+ [ nip ]
+ [ children>> swap first 1 + tail-slice % ]
+ 2tri
] make-node
] if
] if ;
gr_mem>> utf8 alien>strings ;
: (group-struct) ( id -- group-struct id group-struct byte-array length void* )
- \ unix:group <struct> tuck 4096
+ [ \ unix:group <struct> ] dip over 4096
[ <byte-array> ] keep f <void*> ;
: check-group-struct ( group-struct ptr -- group-struct/f )
dup [ glob-matches? ] [ 2drop f ] if ;
: suitable-mode? ( file-name first-line mode -- ? )
- tuck first-line-glob>> ?glob-matches
+ [ nip ] 2keep first-line-glob>> ?glob-matches
[ 2drop t ] [ file-name-glob>> ?glob-matches ] if ;
: find-mode ( file-name first-line -- mode )
[ >string ] dip first-match dup [ to>> ] when ;
: rule-start-matches? ( rule -- match-count/f )
- dup start>> tuck swap can-match-here? [
+ [ start>> dup ] keep can-match-here? [
rest-of-line swap text>> text-matches?
] [
drop f
dup mark-following-rule? [
dup start>> swap can-match-here? 0 and
] [
- dup end>> tuck swap can-match-here? [
+ [ end>> dup ] keep can-match-here? [
rest-of-line
swap text>> context get end>> or
text-matches?
?end-rule
mark-token
add-remaining-token
- tuck body-token>> next-token,
+ [ body-token>> next-token, ] keep
delegate>> [ push-context ] when* ;
UNION: abstract-span-rule span-rule eol-span-rule ;
?end-rule
mark-token
add-remaining-token
- tuck rule-match-token* next-token,
+ [ rule-match-token* next-token, ] keep
! ... end subst ...
dup context get (>>in-rule)
delegate>> push-context ;
M: mark-following-rule handle-rule-start
?end-rule
mark-token add-remaining-token
- tuck rule-match-token* next-token,
+ [ rule-match-token* next-token, ] keep
f context get (>>end)
context get (>>in-rule) ;
HELP: pick ( x y z -- x y z x ) $shuffle ;
HELP: swap ( x y -- y x ) $shuffle ;
-HELP: spin $complex-shuffle ;
HELP: rot ( x y z -- y z x ) $complex-shuffle ;
HELP: -rot ( x y z -- z x y ) $complex-shuffle ;
HELP: dupd ( x y -- x x y ) $complex-shuffle ;
swapd
rot
-rot
- spin
} ;
ARTICLE: "shuffle-words" "Shuffle words"