Add unlines word.
Related to #2140
"." write flush
{
- lines prefix suffix unclip new-assoc assoc-union!
+ read-lines prefix suffix unclip new-assoc assoc-union!
word-prop set-word-prop 1array 2array 3array ?nth
} compile-unoptimized
: download-checksums ( -- alist )
url "checksums.txt" >url derive-url http-get nip
- string-lines [ " " split1 ] { } map>assoc ;
+ lines [ " " split1 ] { } map>assoc ;
: file-checksum ( image -- checksum )
md5 checksum-file bytes>hex-string ;
[
[ month-name 20 center. ]
[ days-header. days. nl nl ] bi
- ] with-string-writer string-lines
+ ] with-string-writer lines
] with map 3 <groups>
[ first3 [ "%-20s %-20s %-20s\n" printf ] 3each ] each ;
[ t >>inside-undo? ] dip keep f >>inside-undo? drop ; inline
: split-lines ( str -- seq )
- [ string-lines ] keep ?last
+ [ lines ] keep ?last
[ "\r\n" member? ] [ t ] if*
[ "" suffix ] when ;
[ sha1-escape-string ] { } map-as ;
M: string sha1-escape-strings ( str -- strs )
- string-lines sha1-escape-strings ;
\ No newline at end of file
+ lines sha1-escape-strings ;
\ No newline at end of file
IN: eval
: parse-string ( str -- quot )
- [ string-lines parse-lines ] with-compilation-unit ;
+ [ lines parse-lines ] with-compilation-unit ;
: (eval) ( str effect -- )
[ parse-string ] dip call-effect ; inline
} case ;
: parse-farkup ( string -- farkup )
- string-lines [ dup empty? not ] [ parse-item ] produce nip sift ;
+ lines [ dup empty? not ] [ parse-item ] produce nip sift ;
CONSTANT: invalid-url "javascript:alert('Invalid URL in farkup');"
} cond ;
: render-code ( string mode -- xml )
- [ string-lines ] dip htmlize-lines
+ [ lines ] dip htmlize-lines
[XML <pre><-></pre> XML] ;
GENERIC: (write-farkup) ( farkup -- xml )
start-directory [
utf8 encode-output [
"." directory.
- ] with-string-writer string-lines
+ ] with-string-writer lines
harvest [ ftp-send ] each
] with-output-stream finish-directory ;
] produce nip ;
: code-lines ( str -- seq )
- string-lines [ [ blank? ] trim ] map harvest ;
+ lines [ [ blank? ] trim ] map harvest ;
: make-example ( str -- seq )
code-lines dup { [ array? ] [ length 1 > ] } 1&& [
: <code> ( -- code )
code new ;
-: ?string-lines ( str/f -- seq )
- [ { } ] [ string-lines ] if-empty ;
+: ?lines ( str/f -- seq )
+ [ { } ] [ lines ] if-empty ;
M: code render*
- [ ?string-lines ] [ drop ] [ mode>> value ] tri* htmlize-lines ;
+ [ ?lines ] [ drop ] [ mode>> value ] tri* htmlize-lines ;
! Farkup component
TUPLE: farkup no-follow disable-images parsed ;
[
parser-quiet? on
"html.templates.fhtml" use-vocab
- string-lines parse-template-lines
+ lines parse-template-lines
] with-file-vocabs
] with-compilation-unit ;
[ read-request ] with-string-reader
[ write-request ] with-string-writer
! normalize crlf
- string-lines "\n" join
+ lines "\n" join
] unit-test
STRING: read-request-test-2
[ read-response ] with-string-reader
[ write-response ] with-string-writer
! normalize crlf
- string-lines "\n" join
+ lines "\n" join
] unit-test
{ t } [
{ $examples
{ $unchecked-example
"USING: io.launcher prettyprint io.encodings.utf8 ;"
- "\"ls -dl /etc\" utf8 [ contents ] with-process-reader ."
+ "\"ls -dl /etc\" utf8 [ read-contents ] with-process-reader ."
"\"drwxr-xr-x 213 root root 12288 mar 11 18:52 /etc\\n\""
}
} ;
launcher-test-path [
<process>
console-vm-path "-script" "env.factor" 3array >>command
- utf8 [ contents ] with-process-reader
+ utf8 [ read-contents ] with-process-reader
] with-directory eval( -- alist )
os-envs =
console-vm-path "-script" "env.factor" 3array >>command
+replace-environment+ >>environment-mode
os-envs >>environment
- utf8 [ contents ] with-process-reader
+ utf8 [ read-contents ] with-process-reader
] with-directory eval( -- alist )
os-envs =
<process>
console-vm-path "-script" "env.factor" 3array >>command
{ { "A" "B" } } >>environment
- utf8 [ contents ] with-process-reader
+ utf8 [ read-contents ] with-process-reader
] with-directory eval( -- alist )
"A" of
console-vm-path "-script" "env.factor" 3array >>command
{ { "USERPROFILE" "XXX" } } >>environment
+prepend-environment+ >>environment-mode
- utf8 [ contents ] with-process-reader
+ utf8 [ read-contents ] with-process-reader
] with-directory eval( -- alist )
"USERPROFILE" of "XXX" =
{ "asdf" } [
"asdf" <string-reader> 2 <limited-stream> [
- unlimited-input contents
+ unlimited-input read-contents
] with-input-stream
] unit-test
{ "asdf" } [
"asdf" <string-reader> 2 <limited-stream> [
- [ contents ] with-unlimited-input
+ [ read-contents ] with-unlimited-input
] with-input-stream
] unit-test
{ "gh" } [
"asdfgh" <string-reader> 4 <limited-stream> [
2 [
- [ contents drop ] with-unlimited-input
+ [ read-contents drop ] with-unlimited-input
] with-limited-input
- [ contents ] with-unlimited-input
+ [ read-contents ] with-unlimited-input
] with-input-stream
] unit-test
USING: io.streams.string io kernel arrays namespaces make
tools.test ;
-{ "" } [ "" [ contents ] with-string-reader ] unit-test
+{ "" } [ "" [ read-contents ] with-string-reader ] unit-test
{ "line 1" CHAR: l }
[
unit-test
{ { "line 1" "line 2" "line 3" } } [
- "line 1\nline 2\nline 3" [ lines ] with-string-reader
+ "line 1\nline 2\nline 3" [ read-lines ] with-string-reader
] unit-test
{ { "" "foo" "bar" "baz" } } [
- "\rfoo\r\nbar\rbaz\n" [ lines ] with-string-reader
+ "\rfoo\r\nbar\rbaz\n" [ read-lines ] with-string-reader
] unit-test
{ f } [ "" [ readln ] with-string-reader ] unit-test
check-log-message
log-service get
2dup [ log? ] [ ] bi* and [
- [ [ string-lines ] [ name>> ] [ name>> ] tri* ] dip
+ [ [ lines ] [ name>> ] [ name>> ] tri* ] dip
4array "log-message" send-to-log-server
] [
4drop
>byte-array write ;
: parse-headers ( string -- hashtable )
- string-lines harvest [ parse-header-line ] map >hashtable ;
+ lines harvest [ parse-header-line ] map >hashtable ;
: fill-bytes ( multipart -- multipart )
buffer-size read
H{
{ "dup" dup } { "nip" nip } { "over" over } ! kernel
{ "nth" nth } ! sequences
- } [ string-lines parse-lines ] with-words ;
+ } [ lines parse-lines ] with-words ;
M: ebnf-action (transform)
ebnf-transform check-action-effect action ;
55 [ "hello" ] replicate concat ;
{ f } [ message >quoted "=\r\n" swap subseq? ] unit-test
-{ 1 } [ message >quoted string-lines length ] unit-test
+{ 1 } [ message >quoted lines length ] unit-test
{ t } [ message >quoted-lines "=\r\n" swap subseq? ] unit-test
-{ 4 } [ message >quoted-lines string-lines length ] unit-test
-{ "===o" } [ message >quoted-lines string-lines [ last ] "" map-as ] unit-test
+{ 4 } [ message >quoted-lines lines length ] unit-test
+{ "===o" } [ message >quoted-lines lines [ last ] "" map-as ] unit-test
[ parse-resolv.conf-line ] each ;
: string>resolv.conf ( string -- resolv.conf )
- string-lines lines>resolv.conf ;
+ lines lines>resolv.conf ;
: path>resolv.conf ( path -- resolv.conf )
utf8 file-lines lines>resolv.conf ;
PRIVATE>
: format-table ( table -- seq )
- [ [ string-lines ] map format-row flip ] map concat flip
+ [ [ lines ] map format-row flip ] map concat flip
[ { } ] [
[ but-last-slice [ format-column ] map! drop ] keep
flip [ " " join ] map!
parse-fresh [ first assoc-union ] unless-empty ;
: set-deploy-config ( assoc vocab -- )
- [ [ unparse-use ] without-limits string-lines ] dip
+ [ [ unparse-use ] without-limits lines ] dip
"deploy.factor" set-vocab-file-lines ;
: set-deploy-flag ( value key vocab -- )
PRIVATE>
: ?string-lines ( string -- string/array )
- CHAR: \n over member-eq? [ string-lines ] when ;
+ CHAR: \n over member-eq? [ lines ] when ;
M: label string<<
[
M: f draw-cell 2drop ;
: single-line ( str -- str' )
- dup [ "\r\n" member? ] any? [ string-lines " " join ] when ;
+ dup [ "\r\n" member? ] any? [ lines " " join ] when ;
M: string cell-dim single-line text-dim first2 ceiling 0 ;
M: string draw-cell single-line draw-text ;
: collation-test-lines ( -- lines )
"vocab:unicode/UCA/CollationTest_SHIFTED.txt.zip"
- binary file-contents uncompress utf8 decode string-lines
+ binary file-contents uncompress utf8 decode lines
[ "#" head? ] reject harvest ;
: parse-collation-test-shifted ( -- lines )
: parse-platform-section ( string suffix -- )
[
- [ [ string-lines parse-lines ] curry with-nested-compilation-unit ]
+ [ [ lines parse-lines ] curry with-nested-compilation-unit ]
curry
] dip with-vocabulary drop ; inline
swap [ 0 over length <element> prefix ] when* ;
: split-lines ( string -- elements-lines )
- string-lines [ split-line ] map! ;
+ lines [ split-line ] map! ;
: join-elements ( wrapped-lines -- lines )
[ " " join ] map! ;
{ } [
"<style type=\"text/css\" media=\"screen\" >
* {margin:0; padding:0; border:0;}"
- string-lines "html" htmlize-lines drop
+ lines "html" htmlize-lines drop
] unit-test
{ } [
vocab-source-path highlight. ;
M: word highlight.
- [ see ] with-string-writer string-lines
+ [ see ] with-string-writer lines
"factor" highlight-lines ;
C: <computer> computer
{ "TUPLE: computer cpu ram ;" } [
- [ \ computer see ] with-string-writer string-lines second
+ [ \ computer see ] with-string-writer lines second
] unit-test
TUPLE: laptop < computer battery ;
test-laptop-slot-values
{ "TUPLE: laptop < computer battery ;" } [
- [ \ laptop see ] with-string-writer string-lines second
+ [ \ laptop see ] with-string-writer lines second
] unit-test
{ { tuple computer laptop } } [ laptop superclasses-of ] unit-test
{ f } [ \ + server? ] unit-test
{ "TUPLE: server < computer rackmount ;" } [
- [ \ server see ] with-string-writer string-lines second
+ [ \ server see ] with-string-writer lines second
] unit-test
[
{ $values { "stream" "an input stream" } { "seq" { $sequence string } } }
{ $description "Reads lines of text until the stream is exhausted, collecting them in a sequence of strings." } ;
-HELP: lines
+HELP: read-lines
{ $values { "seq" { $sequence string } } }
{ $description "Reads lines of text until from the " { $link input-stream } " until it is exhausted, collecting them in a sequence of strings." } ;
{ $description "Reads all elements in the given stream until the stream is exhausted. The type of the sequence depends on the stream's element type. The stream is closed after completion." }
$io-error ;
-HELP: contents
+HELP: read-contents
{ $values { "seq" { $or string byte-array } } }
{ $description "Reads all elements in the " { $link input-stream } " until the stream is exhausted. The type of the sequence depends on the stream's element type." }
$io-error ;
"Processing lines one by one:"
{ $subsections
stream-lines
- lines
+ read-lines
each-line
}
"Processing blocks of data:"
{ $subsections
stream-contents
- contents
+ read-contents
each-block
}
"Copying the contents of one stream to another:"
[ ] collector [ each-stream-line ] dip { } like
] with-disposal ; inline
-: lines ( -- seq )
+! Note: was `lines` before .99
+: read-lines ( -- seq )
input-stream get stream-lines ; inline
CONSTANT: each-block-size 65536
[ stream-exemplar produce-as nip ] bi
] with-disposal ; inline
-: contents ( -- seq )
+: read-contents ( -- seq )
input-stream get stream-contents ; inline
+ALIAS: contents read-contents
+
: stream-copy* ( in out -- )
[ stream-write ] curry each-stream-block ; inline
kernel math specialized-arrays strings tools.test ;
SPECIALIZED-ARRAY: int
-{ B{ } } [ B{ } binary [ contents ] with-byte-reader ] unit-test
+{ B{ } } [ B{ } binary [ read-contents ] with-byte-reader ] unit-test
! Issue #70 github
{ f } [ B{ } binary [ 0 read ] with-byte-reader ] unit-test
split-when-slice
}
"Splitting a string into lines:"
-{ $subsections string-lines }
+{ $subsections lines }
"Replacing subsequences with another subsequence:"
{ $subsections replace } ;
{ $values { "seq" sequence } { "end" sequence } { "newseq" slice } { "?" boolean } }
{ $description "Like " { $link ?tail } ", except the resulting sequence is a " { $link slice } "." } ;
-HELP: string-lines
+HELP: lines
{ $values { "seq" sequence } { "seq'" { $sequence string } } }
{ $description "Splits a string along line breaks." }
{ $examples
- { $example "USING: prettyprint splitting ;" "\"Hello\\r\\nworld\\n\" string-lines ." "{ \"Hello\" \"world\" }" }
+ { $example "USING: prettyprint splitting ;" "\"Hello\\r\\nworld\\n\" lines ." "{ \"Hello\" \"world\" }" }
} ;
HELP: replace
{ "" t } [ "\n" "\n" ?tail ] unit-test
{ "" f } [ "" "\n" ?tail ] unit-test
-{ { } } [ "" string-lines ] unit-test
-{ { "" } } [ "\n" string-lines ] unit-test
-{ { "" } } [ "\r" string-lines ] unit-test
-{ { "" } } [ "\r\n" string-lines ] unit-test
-{ { "hello" } } [ "hello" string-lines ] unit-test
-{ { "hello" } } [ "hello\n" string-lines ] unit-test
-{ { "hello" } } [ "hello\r" string-lines ] unit-test
-{ { "hello" } } [ "hello\r\n" string-lines ] unit-test
-{ { "hello" "hi" } } [ "hello\nhi" string-lines ] unit-test
-{ { "hello" "hi" } } [ "hello\rhi" string-lines ] unit-test
-{ { "hello" "hi" } } [ "hello\r\nhi" string-lines ] unit-test
-{ { "hello" "" "" } } [ "hello\n\n\n" string-lines ] unit-test
-
-{ { } } [ SBUF" " string-lines ] unit-test
-{ { "" } } [ SBUF" \n" string-lines ] unit-test
-{ { "" } } [ SBUF" \r" string-lines ] unit-test
-{ { "" } } [ SBUF" \r\n" string-lines ] unit-test
-{ { "hello" } } [ SBUF" hello" string-lines ] unit-test
-{ { "hello" } } [ SBUF" hello\n" string-lines ] unit-test
-{ { "hello" } } [ SBUF" hello\r" string-lines ] unit-test
-{ { "hello" } } [ SBUF" hello\r\n" string-lines ] unit-test
-{ { "hello" "hi" } } [ SBUF" hello\nhi" string-lines ] unit-test
-{ { "hello" "hi" } } [ SBUF" hello\rhi" string-lines ] unit-test
-{ { "hello" "hi" } } [ SBUF" hello\r\nhi" string-lines ] unit-test
-{ { "hello" "" "" } } [ SBUF" hello\n\n\n" string-lines ] unit-test
+{ { } } [ "" lines ] unit-test
+{ { "" } } [ "\n" lines ] unit-test
+{ { "" } } [ "\r" lines ] unit-test
+{ { "" } } [ "\r\n" lines ] unit-test
+{ { "hello" } } [ "hello" lines ] unit-test
+{ { "hello" } } [ "hello\n" lines ] unit-test
+{ { "hello" } } [ "hello\r" lines ] unit-test
+{ { "hello" } } [ "hello\r\n" lines ] unit-test
+{ { "hello" "hi" } } [ "hello\nhi" lines ] unit-test
+{ { "hello" "hi" } } [ "hello\rhi" lines ] unit-test
+{ { "hello" "hi" } } [ "hello\r\nhi" lines ] unit-test
+{ { "hello" "" "" } } [ "hello\n\n\n" lines ] unit-test
+
+{ { } } [ SBUF" " lines ] unit-test
+{ { "" } } [ SBUF" \n" lines ] unit-test
+{ { "" } } [ SBUF" \r" lines ] unit-test
+{ { "" } } [ SBUF" \r\n" lines ] unit-test
+{ { "hello" } } [ SBUF" hello" lines ] unit-test
+{ { "hello" } } [ SBUF" hello\n" lines ] unit-test
+{ { "hello" } } [ SBUF" hello\r" lines ] unit-test
+{ { "hello" } } [ SBUF" hello\r\n" lines ] unit-test
+{ { "hello" "hi" } } [ SBUF" hello\nhi" lines ] unit-test
+{ { "hello" "hi" } } [ SBUF" hello\rhi" lines ] unit-test
+{ { "hello" "hi" } } [ SBUF" hello\r\nhi" lines ] unit-test
+{ { "hello" "" "" } } [ SBUF" hello\n\n\n" lines ] unit-test
{ { "hey" "world" "what's" "happening" } }
[ "heyAworldBwhat'sChappening" [ LETTER? ] split-when ] unit-test
[ pick subseq ] keep swap
] map 2nip ;
-! string-lines uses string-nth-fast which is 50% faster over
+! lines uses string-nth-fast which is 50% faster over
! nth-unsafe. be careful when changing the definition so that
! you don't unoptimize it.
-GENERIC: string-lines ( seq -- seq' )
+GENERIC: lines ( seq -- seq' )
-M: string string-lines
+ALIAS: string-lines lines
+
+M: string lines
[ V{ } clone 0 ] dip [ 2dup bounds-check? ] [
2dup [ "\r\n" member? ] find-from swapd [
over [ [ nip length ] keep ] unless
] when
] while 2drop { } like ;
-M: sbuf string-lines "" like string-lines ;
+M: sbuf lines "" like lines ;
+
+: unlines-as ( seq exemplar -- seq ) "\n" swap join-as ; inline
+: unlines ( seq -- seq ) "" unlines-as ; inline
SYNTAX: `
"`" parse-multiline-string '[
_ interpolate>string
- utf8 [ contents ] with-process-reader
+ utf8 [ read-contents ] with-process-reader
] append! ;
IN: compiler.cfg.graphviz
: left-justify ( str -- str' )
- string-lines "\\l" join ;
+ lines "\\l" join ;
: left-justified ( quot -- str )
with-string-writer left-justify ; inline
: gemini-print ( url body meta -- )
f pre [
PAGE delete-all
- gemini-charset decode string-lines [
+ gemini-charset decode lines [
{ [ pre get not ] [ "=>" ?head ] } 0&& [
swap gemini-link present over 2array PAGE push
PAGE length swap "[%s] %s\n" printf
PRIVATE>
: gemtext. ( base-url body -- )
- f pre [ string-lines [ gemini-line. ] with each ] with-variable ;
+ f pre [ lines [ gemini-line. ] with each ] with-variable ;
IN: git.tests
: run-process-stdout ( process -- string )
- >process utf8 [ contents ] with-process-reader ;
+ >process utf8 [ read-contents ] with-process-reader ;
: with-empty-test-git-repo ( quot -- )
'[
: git-string>assoc ( string -- assoc )
"\n\n" split1 [
- string-lines [ nip first CHAR: \s = ] monotonic-split
+ lines [ nip first CHAR: \s = ] monotonic-split
[
dup length 1 = [
first " " split1 2array
] if ;
: gopher-text ( object -- lines )
- utf8 decode string-lines { "." } split1 drop ;
+ utf8 decode lines { "." } split1 drop ;
: gopher-text. ( object -- )
gopher-text [ print ] each ;
IN: io.streams.counting.tests
{ 1306 0 } [
- "resource:LICENSE.txt" utf8 <file-reader> [ contents ] with-counting-stream nipd
+ "resource:LICENSE.txt" utf8 <file-reader> [ read-contents ] with-counting-stream nipd
] unit-test
] [ call-next-method ] if ;
SYNTAX: <LITERATE
- "LITERATE>" parse-multiline-string string-lines [
+ "LITERATE>" parse-multiline-string lines [
<literate-lexer> (parse-lines) append!
] with-nested-compilation-unit ;
utf8 file-contents ;
: load-tabular-file ( name -- lines )
- load-file [ blank? ] trim string-lines
+ load-file [ blank? ] trim lines
[ [ blank? ] split-when harvest ] map harvest ;
: numerify ( table -- data names )
: <taf-report> ( taf -- report )
[ taf-report new ] dip [ >>raw ] keep
- string-lines [ [ blank? ] trim ] map
+ lines [ [ blank? ] trim ] map
rest dup first "TAF" = [ rest ] when
harvest unclip swapd taf-body swap taf-partials ;
M: p pdf-width
[ style>> set-style ] keep
- [ font>> ] [ string>> ] bi* string-lines
+ [ font>> ] [ string>> ] bi* lines
[ dupd text-width ] map nip supremum ;
M: text pdf-width
[ style>> set-style ] keep
- [ font>> ] [ string>> ] bi* string-lines
+ [ font>> ] [ string>> ] bi* lines
[ dupd text-width ] map nip supremum ;
IN: pdf
: text-to-pdf ( str -- pdf )
- string-lines [
+ lines [
H{ { font-name "monospace" } { font-size 10 } } <p>
] map pdf>string ;
! FIXME: what about "proper" tab support?
: string>texts ( string style -- seq )
- [ string-lines ] dip '[ _ <text> 1array ] map
+ [ lines ] dip '[ _ <text> 1array ] map
<br> 1array join ;
PRIVATE>
>robots.txt-url http-get nip ;
: normalize-robots.txt ( string -- sitemaps seq )
- string-lines
+ lines
[ [ blank? ] trim ] map
[ "#" head? ] reject harvest
[ ":" split1 [ [ blank? ] trim ] bi@ [ >lower ] dip ] { } map>assoc
M: back-quoted-expr expand
expr>> expr command>> expansion
- utf8 [ contents ] with-process-reader
+ utf8 [ read-contents ] with-process-reader
" \n" split harvest ;
: expansion ( command -- command ) [ expand ] map flatten ;
[ parse-srt-chunk ] { } map-as ;
: parse-srt-string ( seq -- seq' )
- string-lines parse-srt-lines ;
+ lines parse-srt-lines ;
: parse-srt-file ( path -- seq )
utf8 file-lines parse-srt-lines ;
] { } make "\n" join ;
: string>lines ( str -- lines )
- "\t" split " " join string-lines
+ "\t" split " " join lines
[ [ " " ] when-empty ] map ;
: lines>pages ( lines -- pages )
( ] { } make "\\n" join ;)'
( )'
(: string>lines \( str -- lines \))'
-( "\\t" split " " join string-lines)'
+( "\\t" split " " join lines)'
( [ [ " " ] when-empty ] map ;)'
( )'
ET
DEFER: name/values
: (parse-value) ( string -- values )
- decode-value string-lines
+ decode-value lines
[ "" ] [ dup length 1 = [ first ] when ] if-empty ;
: parse-value ( string -- remain value )
[ "new" [ from-object ] nest-form ]
bi*
]
- [ [ content>> string-lines ] bi@ lcs-diff "diff" set-value ]
+ [ [ content>> lines ] bi@ lcs-diff "diff" set-value ]
2bi
] >>init
: article. ( name -- )
wikipedia-url http-get nip parse-html
"content" find-by-id-between
- html-text string-lines
+ html-text lines
[ [ blank? ] trim ] map harvest [
html-unescape 72 wrap-string print nl
] each ;
[ "title" attr "%s:\n" printf ]
[
"plaintext" deep-tags-named [
- children>string string-lines
+ children>string lines
[ " %s\n" printf ] each
] each
] bi
SynKeywordFactorWord factorWord_sequences | syn keyword factorWord_sequences contained 1sequence 2all? 2any? 2each 2each-from 2map 2map-as 2map-reduce 2reduce 2selector 2sequence 3append 3append-as 3each 3map 3map-as 3sequence 4sequence <iota> <repetition> <reversed> <slice> ?first ?last ?nth ?second ?set-nth accumulate accumulate! accumulate* accumulate*! accumulate*-as accumulate-as all? any? append append! append-as assert-sequence assert-sequence= assert-sequence? binary-reduce bounds-check bounds-check? bounds-error bounds-error? but-last but-last-slice cartesian-each cartesian-find cartesian-map cartesian-product cartesian-product-as change-nth check-slice clone-like collapse-slice collector collector-as collector-for collector-for-as concat concat-as copy count cut cut* cut-slice cut-slice* delete-all delete-slice drop-prefix each each-from each-index empty? exchange filter filter! filter-as find find-from find-index find-index-from find-last find-last-from first first2 first3 first4 flip follow fourth glue glue-as halves harvest head head* head-slice head-slice* head? if-empty immutable immutable-sequence immutable-sequence? immutable? index index-from indices infimum infimum-by insert-nth interleave iota iota? join join-as last last-index last-index-from length lengthen like longer longer? longest map map! map-as map-find map-find-last map-index map-index-as map-integers map-reduce map-sum max-length member-eq? member? midpoint@ min-length mismatch move new-like new-resizable new-sequence non-negative-integer-expected non-negative-integer-expected? none? nth nths pad-head pad-tail partition pop pop* prefix prepend prepend-as produce produce-as product push push-all push-either push-if reduce reduce-index reject reject! reject-as remove remove! remove-eq remove-eq! remove-nth remove-nth! repetition repetition? replace-slice replicate replicate-as rest rest-slice reverse reverse! reversed reversed? second selector selector-as sequence sequence-hashcode sequence= sequence? set-first set-fourth set-last set-length set-nth set-second set-third short shorten shorter shorter? shortest sift slice slice-error slice-error? slice? snip snip-slice subseq subseq-as subseq-start subseq-start-from subseq? suffix suffix! sum sum-lengths supremum supremum-by surround surround-as tail tail* tail-slice tail-slice* tail? third trim trim-head trim-head-slice trim-slice trim-tail trim-tail-slice unclip unclip-last unclip-last-slice unclip-slice unless-empty virtual-exemplar virtual-sequence virtual-sequence? virtual@ when-empty
SynKeywordFactorWord factorWord_sets | syn keyword factorWord_sets contained ?adjoin ?delete adjoin adjoin-all adjoin-at all-unique? cardinality clear-set delete diff diff! duplicates fast-set gather in? intersect intersect! intersect-all intersects? members null? set set-like set= set? subset? union union! union-all within without
SynKeywordFactorWord factorWord_sorting | syn keyword factorWord_sorting contained inv-sort-with natural-sort sort sort-keys sort-pair sort-values sort-with
-SynKeywordFactorWord factorWord_splitting | syn keyword factorWord_splitting contained ?head ?head-slice ?tail ?tail-slice replace split split-indices split-slice split-subseq split-when split-when-slice split1 split1-last split1-last-slice split1-slice split1-when split1-when-slice string-lines
+SynKeywordFactorWord factorWord_splitting | syn keyword factorWord_splitting contained ?head ?head-slice ?tail ?tail-slice replace split split-indices split-slice split-subseq split-when split-when-slice split1 split1-last split1-last-slice split1-slice split1-when split1-when-slice lines
SynKeywordFactorWord factorWord_strings | syn keyword factorWord_strings contained 1string <string> >string resize-string string string?
SynKeywordFactorWord factorWord_strings_parser | syn keyword factorWord_strings_parser contained bad-escape bad-escape? escape hex-escape name>char-hook next-escape parse-string unescape-string unicode-escape
SynKeywordFactorWord factorWord_syntax | syn keyword factorWord_syntax contained " 'HS{ 'H{ '[ '{ ( : :: :> ; << <<<<<< <<<<<<< <PRIVATE ====== ======= >> >>>>>> >>>>>>> @ ALIAS: B B: BUILTIN: BV{ B{ C: CHAR: CONSTANT: CS{ C{ DEFER: ERROR: EXCLUDE: FORGET: FROM: GENERIC#: GENERIC: HOOK: HS{ H{ IDENTITY-MEMO: IDENTITY-MEMO:: IH{ IN: INSTANCE: INTERSECTION: M: M:: MACRO: MACRO:: MAIN: MATH: MEMO: MEMO:: MIXIN: M\ NAN: P" POSTPONE: PREDICATE: PRIMITIVE: PRIVATE> QUALIFIED-WITH: QUALIFIED: RENAME: SBUF" SINGLETON: SINGLETONS: SLOT: SYMBOL: SYMBOLS: SYNTAX: TUPLE: T{ UNION: UNUSE: USE: USING: V{ W{ [ [let [\| \ ] _ call( call-next-method delimiter deprecated execute( f final flushable foldable initial: inline intersection{ maybe{ not{ read-only recursive t t? union{ { }