[
" " split1 [ "()" in? ] trim "," split
[ [ blank? ] trim ] map
- [ "OS ABI:" head? not ] filter
+ [ "OS ABI:" head? ] reject
] dip 3array
] map ;
: compile-unoptimized ( words -- )
[ [ subwords ] map ] keep suffix concat
- [ optimized? not ] filter compile ;
+ [ optimized? ] reject compile ;
"debug-compiler" get [
[ dup lookup-vocab [ drop ] [ no-vocab ] if ] require-hook [
dictionary get values
- [ docs-loaded?>> not ] filter
+ [ docs-loaded?>> ] reject
[ load-docs ] each
] with-variable ;
SYNTAX: HEX{
"}" parse-tokens concat
- [ blank? not ] filter
+ [ blank? ] reject
dup length even? [ odd-length-hex-string ] unless
2 <groups> [ hex> ] B{ } map-as
suffix! ;
1 [ 0 >>offset type>> heap-size max ] reduce ;
: struct-alignment ( slots -- align )
- [ struct-bit-slot-spec? not ] filter
+ [ struct-bit-slot-spec? ] reject
1 [ dup offset>> c-type-align-at max ] reduce ;
PRIVATE>
[ blank? ] trim-head H{ { CHAR: \s CHAR: - } } substitute swap ;
: parse-colors ( lines -- assoc )
- [ "!" head? not ] filter
+ [ "!" head? ] reject
[ 11 cut [ " \t" split harvest ] dip suffix ] map
[ parse-color ] H{ } map>assoc ;
: hat-effect ( insn -- effect )
"insn-slots" word-prop
- [ type>> { def temp } member-eq? not ] filter [ name>> ] map
+ [ type>> { def temp } member-eq? ] reject [ name>> ] map
{ "vreg" } <effect> ;
: define-hat ( insn -- )
: assign-registers ( cfg live-intervals -- )
init-assignment
- linearization-order [ kill-block?>> not ] filter
+ linearization-order [ kill-block?>> ] reject
[ assign-registers-in-block ] each ;
: admissible-registers ( cfg -- regs )
machine-registers swap frame-pointer?>> [
- [ [ frame-reg = not ] filter ] assoc-map
+ [ [ frame-reg = ] reject ] assoc-map
] when ;
: allocate-and-assign-registers ( cfg -- )
: compute-live-intervals ( cfg -- live-intervals sync-points )
init-live-intervals
- linearization-order <reversed> [ kill-block?>> not ] filter
+ linearization-order <reversed> [ kill-block?>> ] reject
[ compute-live-intervals-step ] each
live-intervals get finish-live-intervals
sync-points get ;
dup visited get ?adjoin [ dup , sorted-successors ] [ drop { } ] if
[ predecessors-ready? ] filter
[ dup loop-entry? [ find-alternate-loop-head ] when ] map
- [ visited? not ] filter ;
+ [ visited? ] reject ;
: (linearization-order) ( cfg -- bbs )
HS{ } clone visited set
to-do get push-all-back ;
: init-ready ( bs -- )
- locs get '[ _ key? not ] filter ready get push-all-front ;
+ locs get '[ _ key? ] reject ready get push-all-front ;
: init ( mapping -- )
<dlist> to-do set
drop ;
! Generate methods for everything else
-insn-classes get special-vreg-insns diff [ insn-def-slots empty? not ] filter [
+insn-classes get special-vreg-insns diff [ insn-def-slots empty? ] reject [
[ \ rename-insn-defs create-method-in ]
[ insn-def-slots [ name>> ] map DEF-QUOT slot-change-quot ] bi
define
] each
-insn-classes get special-vreg-insns diff [ insn-use-slots empty? not ] filter [
+insn-classes get special-vreg-insns diff [ insn-use-slots empty? ] reject [
[ \ rename-insn-uses create-method-in ]
[ insn-use-slots [ name>> ] map USE-QUOT slot-change-quot ] bi
define
] each
-insn-classes get [ insn-temp-slots empty? not ] filter [
+insn-classes get [ insn-temp-slots empty? ] reject [
[ \ rename-insn-temps create-method-in ]
[ insn-temp-slots [ name>> ] map TEMP-QUOT slot-change-quot ] bi
define
dup useful-copy? [ , ] [ drop ] if ;
M: ##parallel-copy cleanup-insn
- values>> [ leaders ] assoc-map [ first2 = not ] filter
+ values>> [ leaders ] assoc-map [ first2 = ] reject
parallel-copy-rep % ;
M: ##tagged>integer cleanup-insn
: height-state>insns ( state -- insns )
[ second ] map { ds-loc rs-loc } [ new swap >>n ] 2map
- [ n>> 0 = not ] filter [ ##inc new swap >>loc ] map ;
+ [ n>> 0 = ] reject [ ##inc new swap >>loc ] map ;
: translate-local-loc ( loc state -- loc' )
[ clone ] dip over >loc< 0 1 ? rot nth first - >>n ;
! in the middle of recompiling something
[ { } ] [
all-words dup [ subwords ] map concat append
- H{ } clone '[ _ dependencies-satisfied? not ] filter
+ H{ } clone '[ _ dependencies-satisfied? ] reject
] unit-test
GENERIC: delete-node ( node -- )
M: #call-recursive delete-node
- dup label>> calls>> [ node>> eq? not ] with filter! drop ;
+ dup label>> calls>> [ node>> eq? ] with reject! drop ;
M: #return-recursive delete-node
label>> f >>return drop ;
\ at* [ at-quot ] 1 define-partial-eval
: diff-quot ( seq -- quot: ( seq' -- seq'' ) )
- [ tester ] keep '[ members [ @ not ] filter _ set-like ] ;
+ [ tester ] keep '[ members [ @ ] reject _ set-like ] ;
M\ set diff [ diff-quot ] 1 define-partial-eval
: not-a-loop? ( label -- ? ) not-loops get in? ;
: non-tail-calls ( call-graph-node -- seq )
- calls>> [ tail?>> not ] filter ;
+ calls>> [ tail?>> ] reject ;
: visit-back-edges ( call-graph -- )
[
: filter-ignores ( tuple specs -- specs' )
[ <mirror> [ nip IGNORE = ] assoc-filter keys ] dip
- [ slot-name>> swap member? not ] with filter ;
+ [ slot-name>> swap member? ] with reject ;
ERROR: not-persistent class ;
dup number? [ number>string ] when ;
: remove-db-assigned-id ( specs -- obj )
- [ +db-assigned-id+? not ] filter ;
+ [ +db-assigned-id+? ] reject ;
: remove-relations ( specs -- newcolumns )
- [ relation? not ] filter ;
+ [ relation? ] reject ;
: remove-id ( specs -- obj )
- [ primary-key>> not ] filter ;
+ [ primary-key>> ] reject ;
! SQLite Types: http://www.sqlite.org/datatype3.html
! NULL INTEGER REAL TEXT BLOB
: find-and-remove-detached-devices ( -- )
+controller-devices+ get-global keys
- [ device-attached? not ] filter
+ [ device-attached? ] reject
[ remove-controller ] each ;
: ?device-interface ( dbt-broadcast-hdr -- ? )
: def-classes ( classes -- ) [ def-class ] each ;
: def-boxeds ( boxeds -- )
- [ find-existing-boxed-type not ] filter
+ [ find-existing-boxed-type ] reject
[ def-boxed-type ] each ;
: def-records ( records -- )
: orphan-articles ( -- seq )
articles get keys
- [ article-parent not ] filter ;
+ [ article-parent ] reject ;
: xref-help ( -- )
all-articles [ xref-article ] each ;
: all-vocabs-really ( -- seq )
all-vocabs-recursive no-roots remove-redundant-prefixes
- [ vocab-name "scratchpad" = not ] filter ;
+ [ vocab-name "scratchpad" = ] reject ;
: all-topics ( -- topics )
[
dup struct-class? [ struct-slots ] [ all-slots ] if
[ name>> ] map
] [ extract-slots ] bi*
- [ swap member? not ] with filter [
+ [ swap member? ] with reject [
", " join "Described $slot does not exist: " prepend
simple-lint-error
] unless-empty
: :lint-failures ( -- ) lint-failures get values errors. ;
: unlinked-words ( vocab -- seq )
- words all-word-help [ article-parent not ] filter ;
+ words all-word-help [ article-parent ] reject ;
: linked-undocumented-words ( -- seq )
all-words
- [ word-help not ] filter
+ [ word-help ] reject
[ article-parent ] filter
- [ predicate? not ] filter ;
+ [ predicate? ] reject ;
MEMO: article-words ( name -- words )
article-content [ element-value ] map " " join search-words
- [ [ digit? ] all? not ] filter
+ [ [ digit? ] all? ] reject
[ [ { [ letter? ] [ digit? ] } 1|| not ] trim ] map! harvest ;
: (search-articles) ( string -- seq' )
natural-sort
[ [ class? ] filter describe-classes ]
[
- [ [ class? ] [ symbol? ] bi and not ] filter
+ [ [ class? ] [ symbol? ] bi and ] reject
[ parsing-word? ] partition
[ generic? ] partition
[ macro? ] partition
[ ] [ reset-cache ] unit-test
: run-template ( quot -- string )
- with-string-writer [ "\r\n\t" member? not ] filter
+ with-string-writer [ "\r\n\t" member? ] reject
"?>" split1 nip ; inline
: test-template ( name -- template )
[ "<ul><li>1</li><li>2</li><li>3</li></ul>" ] [
[
"test7" test-template call-template
- ] run-template [ blank? not ] filter
+ ] run-template [ blank? ] reject
] unit-test
TUPLE: person first-name last-name ;
[ "<table><tr><td>RBaxter</td><td>Unknown</td></tr><tr><td>Doug</td><td>Coleman</td></tr></table>" ] [
[
"test8" test-template call-template
- ] run-template [ blank? not ] filter
+ ] run-template [ blank? ] reject
] unit-test
[ ] [
[ "<table><tr><td>RBaxter</td><td>Unknown</td></tr><tr><td>Doug</td><td>Coleman</td></tr></table>" ] [
[
"test8" test-template call-template
- ] run-template [ blank? not ] filter
+ ] run-template [ blank? ] reject
] unit-test
[ ] [ 1 "id" set-value ] unit-test
[ "<table><tr><td>RBaxter</td><td>Unknown</td></tr></table>" ] [
[
"test11" test-template call-template
- ] run-template [ blank? not ] filter
+ ] run-template [ blank? ] reject
] unit-test
[ ] [
M: tuple error. describe ;
: vars-in-scope ( seq -- alist )
- [ [ global eq? not ] filter [ keys ] gather ] keep
+ [ [ global eq? ] reject [ keys ] gather ] keep
'[ dup _ assoc-stack ] H{ } map>assoc ;
: .vars ( -- )
: directory-entries ( path -- seq )
normalize-path
(directory-entries)
- [ name>> { "." ".." } member? not ] filter ;
+ [ name>> { "." ".." } member? ] reject ;
: directory-files ( path -- seq )
directory-entries [ name>> ] map! ;
server-addrs [ secure? ] filter random ;
: insecure-addr ( -- addrspec )
- server-addrs [ secure? not ] filter random ;
+ server-addrs [ secure? ] reject random ;
: server. ( threaded-server -- )
[ [ "=== " write name>> ] [ ] bi write-object nl ]
[ { } ] [
"localhost" <icmp> resolve-host
- [ [ icmp4? ] [ icmp6? ] bi or not ] filter
+ [ [ icmp4? ] [ icmp6? ] bi or ] reject
] unit-test
[ 0 0 (echelon) ] with-matrix ;
: nonzero-rows ( matrix -- matrix' )
- [ [ zero? ] all? not ] filter ;
+ [ [ zero? ] all? ] reject ;
: null/rank ( matrix -- null rank )
echelon dup length swap nonzero-rows length [ - ] keep ;
] unit-test
[ { } ] [
- simd-classes [ '[ _ new ] compile-call [ zero? ] all? not ] filter
+ simd-classes [ '[ _ new ] compile-call [ zero? ] all? ] reject
] unit-test
"== Checking -with constructors" print
MEMO: mime-db ( -- seq )
"vocab:mime/types/mime.types" ascii file-lines
- [ "#" head? not ] filter [ " \t" split harvest ] map harvest ;
+ [ "#" head? ] reject [ " \t" split harvest ] map harvest ;
: nonstandard-mime-types ( -- assoc )
H{
dup gl-program-shaders-length 2 *
0 int <ref>
over uint <c-array>
- [ glGetAttachedShaders ] keep [ zero? not ] filter ;
+ [ glGetAttachedShaders ] keep [ zero? ] reject ;
: delete-gl-program-only ( program -- )
glDeleteProgram ; inline
: filter-hidden ( seq -- seq )
#! Remove elements that produce no AST from sequence
- [ ebnf-ensure-not? not ] filter [ ebnf-ensure? not ] filter ;
+ [ ebnf-ensure-not? ] reject [ ebnf-ensure? not ] filter ;
: syntax ( string -- parser )
#! Parses the string, ignoring white space, and
"vocab:porter-stemmer/test/voc.txt" utf8 file-lines
[ stem ] map
"vocab:porter-stemmer/test/output.txt" utf8 file-lines
- [ 2array ] 2map [ first2 = not ] filter
+ [ 2array ] 2map [ first2 = ] reject
] unit-test
: pprint-sections ( block advancer -- )
[
- sections>> [ line-break? not ] filter
+ sections>> [ line-break? ] reject
unclip-slice pprint-section
] dip
[ [ pprint-section ] bi ] curry each ; inline
dup
[ simples>> ] [ not-simples>> ] [ and>> ] tri
3append or-class boa
- '[ [ _ class-member? not ] filter ] change-integers ;
+ '[ [ _ class-member? ] reject ] change-integers ;
: answer-ands ( partition -- partition' )
dup [ integers>> ] [ not-simples>> ] [ simples>> ] tri 3append
: find-transitions ( dfa-state nfa -- next-dfa-state )
transitions>>
'[ _ at keys [ condition-states ] map concat ] gather
- [ tagged-epsilon? not ] filter ;
+ [ tagged-epsilon? ] reject ;
: add-todo-state ( state visited-states new-states -- )
2over ?adjoin [ nip push ] [ 3drop ] if ;
: new-transitions ( transitions -- assoc ) ! assoc is class, partition
values [ keys ] gather
- [ tagged-epsilon? not ] filter
+ [ tagged-epsilon? ] reject
class-partitions ;
: get-transitions ( partition state-transitions -- next-states )
: simple ( str -- simple )
! Alternatively, first collation key level?
- >case-fold [ " \t_" member? not ] filter ;
+ >case-fold [ " \t_" member? ] reject ;
: simple-table ( seq -- table )
[ [ simple ] keep ] H{ } map>assoc ;
: seeing-implementors ( class -- seq )
dup implementors
- [ [ reader? ] [ writer? ] bi or not ] filter
+ [ [ reader? ] [ writer? ] bi or ] reject
[ lookup-method ] with map
natural-sort ;
! Make sure all primitives are covered
[ { } ] [
all-words [ primitive? ] filter
- [ "default-output-classes" word-prop not ] filter
- [ "special" word-prop not ] filter
- [ "shuffle" word-prop not ] filter
+ [ "default-output-classes" word-prop ] reject
+ [ "special" word-prop ] reject
+ [ "shuffle" word-prop ] reject
] unit-test
{ 1 0 } [ [ drop ] each ] must-infer-as
: defs-to-crossref ( -- seq )
[
all-words
- [ [ generic? not ] filter ]
+ [ [ generic? ] reject ]
[ [ subwords ] map concat ] bi
all-articles [ >link ] map
GENERIC: smart-usage ( defspec -- seq )
-M: object smart-usage usage [ irrelevant? not ] filter ;
+M: object smart-usage usage [ irrelevant? ] reject ;
M: method smart-usage "method-generic" word-prop smart-usage ;
[ "No usages." print ] [ sorted-definitions. ] if-empty ;
: vocab-xref ( vocab quot: ( defspec -- seq ) -- vocabs )
- [ [ vocab-name ] [ words [ generic? not ] filter ] bi ] dip map
+ [ [ vocab-name ] [ words [ generic? ] reject ] bi ] dip map
[
[ [ word? ] [ generic? not ] bi and ] filter [
dup method?
: strip-word-defs ( words -- )
"Stripping symbolic word definitions" show
- [ "no-def-strip" word-prop not ] filter
+ [ "no-def-strip" word-prop ] reject
[ [ ] >>def drop ] each ;
: strip-word-props ( stripped-props words -- )
:: (collect-subtrees) ( samples max-depth depth child-quot: ( samples -- child ) -- children )
max-depth depth > [
- samples [ sample-callstack leaf-callstack? not ] filter
+ samples [ sample-callstack leaf-callstack? ] reject
[ f ] [ child-quot call ] if-empty
] [ f ] if ; inline
:: collect-flat ( samples -- flat )
IH{ } clone :> per-word-samples
samples [| sample |
- sample sample-callstack members [ ignore-word? not ] filter [
+ sample sample-callstack members [ ignore-word? ] reject [
per-word-samples sample counts+at
] each
] each
: interesting-words ( vocab -- array )
words
- [ { [ "help" word-prop ] [ predicate? ] } 1|| not ] filter
+ [ { [ "help" word-prop ] [ predicate? ] } 1|| ] reject
natural-sort ;
: interesting-words. ( vocab -- )
[ descent>> ] map ?supremum ;
: max-graphics-height ( seq -- y )
- [ ascent>> not ] filter [ height>> ] map ?supremum 0 or ;
+ [ ascent>> ] reject [ height>> ] map ?supremum 0 or ;
:: combine-metrics ( graphics-height ascent descent cap-height -- ascent' descent' )
ascent [
[ [ length 1 - dup 1 - ] keep exchange ] [ drop ] if ;
: unregister-window ( handle -- )
- windows [ [ first = not ] with filter ] change-global ;
+ windows [ [ first = ] with reject ] change-global ;
: raised-window ( world -- )
windows get-global
[
"×" split
[ [ blank? ] trim hex> ] map
- [ { f 0 } member? not ] filter
+ [ { f 0 } member? ] reject
>string
] map
harvest
] { } map-as concat ;\r
\r
: append-weights ( weights quot -- )\r
- [ [ ignorable?>> not ] filter ] dip\r
- map [ zero? not ] filter % 0 , ; inline\r
+ [ [ ignorable?>> ] reject ] dip\r
+ map [ zero? ] reject % 0 , ; inline\r
\r
: variable-weight ( weight -- )\r
dup ignorable?>> [ primary>> ] [ drop 0xFFFF ] if , ;\r
: exclusions ( -- set )
exclusions-file utf8 file-lines
[ "#" split1 drop [ blank? ] trim-tail hex> ] map
- [ 0 = not ] filter ;
+ [ 0 = ] reject ;
: remove-exclusions ( alist -- alist )
exclusions unique assoc-diff ;
: process-compatibility ( data -- hash )
(process-decomposed)
[ dup first* [ first2 rest 2array ] unless ] map
- [ second empty? not ] filter
+ [ second empty? ] reject
>hashtable chain-decomposed ;
: process-combining ( data -- hash )
} cleave
combine-map keys [ 2ch> nip ] map
-[ combining-class not ] filter
+[ combining-class ] reject
[ 0 swap class-map set-at ] each
load-special-casing special-casing swap assoc-union! drop
[ CHAR: - 8 ] dip insert-nth ;
: string>uuid ( string -- n )
- [ CHAR: - = not ] filter hex> ;
+ [ CHAR: - = ] reject hex> ;
PRIVATE>
[ ensure-vocab-root ] dip\r
[ ((child-vocabs-recursive)) ] { } make ;\r
\r
-: no-rooted ( seq -- seq' ) [ find-vocab-root not ] filter ;\r
+: no-rooted ( seq -- seq' ) [ find-vocab-root ] reject ;\r
\r
: one-level-only? ( name prefix -- ? )\r
?head [ "." split1 nip not ] [ drop f ] if ;\r
\r
PRIVATE>\r
\r
-: no-prefixes ( seq -- seq' ) [ vocab-prefix? not ] filter ;\r
+: no-prefixes ( seq -- seq' ) [ vocab-prefix? ] reject ;\r
\r
: convert-prefixes ( seq -- seq' )\r
[ dup vocab-prefix? [ name>> <vocab-link> ] when ] map ;\r
[ vocab-prefix? ] partition\r
[\r
[ vocab-name ] map fast-set\r
- '[ name>> _ in? not ] filter\r
+ '[ name>> _ in? ] reject\r
convert-prefixes\r
] keep\r
append ;\r
\r
: (load-from-root) ( root prefix -- failures )\r
vocabs-in-root/prefix\r
- [ don't-load? not ] filter no-prefixes\r
+ [ don't-load? ] reject no-prefixes\r
require-all ;\r
\r
: load-from-root ( root prefix -- )\r
} 1|| ;
: filter-don't-load ( vocabs -- vocabs' )
- [ vocab-name don't-load? not ] filter ;
+ [ vocab-name don't-load? ] reject ;
: don't-test? ( vocab -- ? )
vocab-tags "not tested" swap member? ;
: filter-don't-test ( vocabs -- vocabs' )
- [ don't-test? not ] filter ;
+ [ don't-test? ] reject ;
TUPLE: unsupported-platform vocab requires ;
[ vocab-name ] sort-with ;
: pprint-using ( seq -- )
- [ "syntax" lookup-vocab = not ] filter
+ [ "syntax" lookup-vocab = ] reject
sort-vocabs [
\ USING: pprint-word
[ pprint-vocab ] each
] with-pprint ;
: filter-interesting ( seq -- seq' )
- [ [ vocab? ] [ extra-words? ] bi or not ] filter ;
+ [ [ vocab? ] [ extra-words? ] bi or ] reject ;
PRIVATE>
V{ } clone modified-sources set
V{ } clone modified-docs set
- child-vocabs [ ".private" tail? not ] filter [
+ child-vocabs [ ".private" tail? ] reject [
[
[
[ modified-sources ]
SYMBOL: windows-messages
"windows.messages" words
-[ name>> "windows-message" head? not ] filter
+[ name>> "windows-message" head? ] reject
[ dup execute swap ] { } map>assoc
windows-messages set-global
[ ] [ "<?xml version='1.0'?><!-- declarations for <head> & <body> --><foo/>" string>xml drop ] unit-test
: first-thing ( seq -- elt )
- [ "" = not ] filter first ;
+ [ "" = ] reject first ;
[ T{ element-decl f "br" "EMPTY" } ] [ "<!ELEMENT br EMPTY>" string>dtd directives>> first-thing ] unit-test
[ T{ element-decl f "p" "(#PCDATA|emph)*" } ] [ "<!ELEMENT p (#PCDATA|emph)*>" string>dtd directives>> first-thing ] unit-test
: ?filter-children ( children -- no-whitespace )\r
xml-pprint? get [\r
[ dup string? [ [ blank? ] trim ] when ] map\r
- [ "" = not ] filter\r
+ [ "" = ] reject\r
] when ;\r
\r
PRIVATE>\r
assoc>> >alist ;
: (keyword-map-no-word-sep) ( assoc -- str )
- keys combine [ alpha? not ] filter natural-sort ;
+ keys combine [ alpha? ] reject natural-sort ;
: keyword-map-no-word-sep* ( keyword-map -- str )
dup no-word-sep>> [ ] [
: check-classoids ( members -- members )
dup [ classoid? ] all?
- [ [ classoid? not ] filter not-classoids ] unless ;
+ [ [ classoid? ] reject not-classoids ] unless ;
ERROR: not-a-classoid object ;
: <anonymous-union> ( members -- classoid )
check-classoids
- [ null eq? not ] filter set-members
+ [ null eq? ] reject set-members
dup length 1 = [ first ] [ sort-classes f like anonymous-union boa ] if ;
M: anonymous-union rank-class drop 6 ;
M: sequence fast-set >hash-set ;
M: sequence duplicates
- dup length <hash-set> [ ?adjoin not ] curry filter ;
+ dup length <hash-set> [ ?adjoin ] curry reject ;
M: sequence all-unique?
dup length <hash-set> [ ?adjoin ] curry all? ;
[ ] [ 1000 iota [ dup sq ] H{ } map>assoc "testhash" set ] unit-test
[ V{ } ]
-[ 1000 iota [ dup sq swap "testhash" get at = not ] filter ]
+[ 1000 iota [ dup sq swap "testhash" get at = ] reject ]
unit-test
[ t ]
[ 3 ] [ 2 "lápis" >utf8-index ] unit-test
-[ V{ } ] [ 100000 iota [ [ code-point-length ] [ 1string utf8 encode length ] bi = not ] filter ] unit-test
+[ V{ } ] [ 100000 iota [ [ code-point-length ] [ 1string utf8 encode length ] bi = ] reject ] unit-test
[ { CHAR: replacement-character } ] [ { 0b110,00000 0b10,000000 } decode-utf8-w/stream ] unit-test
[ { CHAR: replacement-character } ] [ { 0b110,00001 0b10,111111 } decode-utf8-w/stream ] unit-test
] [ create-in ] if ;
: ignore-forwards ( seq -- seq' )
- [ forward-reference? not ] filter ;
+ [ forward-reference? ] reject ;
: private? ( word -- ? ) vocabulary>> ".private" tail? ;
USING: source-files tools.test assocs sequences strings
namespaces kernel ;
-[ { } ] [ source-files get keys [ string? not ] filter ] unit-test
+[ { } ] [ source-files get keys [ string? ] reject ] unit-test
SYNTAX: SUBROUTINE:
f current-library get scan-token ";" parse-tokens
- [ "()" subseq? not ] filter define-fortran-function ;
+ [ "()" subseq? ] reject define-fortran-function ;
SYNTAX: FUNCTION:
scan-token current-library get scan-token ";" parse-tokens
- [ "()" subseq? not ] filter define-fortran-function ;
+ [ "()" subseq? ] reject define-fortran-function ;
SYNTAX: LIBRARY:
scan-token
: elf-nm ( path -- )
[
sections dup ".symtab" find-section
- symbols [ name>> empty? not ] filter
+ symbols [ name>> empty? ] reject
[ print-symbol ] with each
] with-mapped-elf ;
MEMO: ip-db ( -- seq )
download-db ascii file-lines
- [ "#" head? not ] filter "\n" join string>csv
+ [ "#" head? ] reject "\n" join string>csv
[ parse-ip-entry ] map ;
: filter-overlaps ( alist -- alist' )
ExecName |
PathName)
-Tokens = Token* => [[ [ comment? not ] filter ]]
+Tokens = Token* => [[ [ comment? ] reject ]]
Program = Tokens Spaces !(.) => [[ parse-proc ]]
dup length 1 = [ first swap prefix ] [ [ ] 2sequence ] if ;
: uniform-tuple-texture-accessors ( uniform-type -- accessors )
- all-uniform-tuple-slots [ uniform-type>> uniform-type-texture-units zero? not ] filter
+ all-uniform-tuple-slots [ uniform-type>> uniform-type-texture-units zero? ] reject
[ uniform-slot-texture-accessor ] map ;
: uniform-texture-accessors ( uniform-type dim -- accessors )
} 3cleave ;
: true-subclasses ( class -- seq )
- [ subclasses ] keep [ = not ] curry filter ;
+ [ subclasses ] keep [ = ] curry reject ;
PRIVATE>
! Random salt is formed by ascii characters
! between 33 and 126
: available-chars ( -- seq )
- 33 126 [a,b] [ CHAR: : = not ] filter ;
+ 33 126 [a,b] [ CHAR: : = ] reject ;
PRIVATE>
[ bl bl bl bl [ write "=" write ] [ write bl ] bi* nl ] assoc-each ;
: form. ( vector -- )
- [ closing?>> not ] filter
+ [ closing?>> ] reject
[
{
{ [ dup name>> "form" = ]
] [ drop ] if* ;
: define-irc-class ( class params -- )
- [ { ":" "_" } member? not ] filter
+ [ { ":" "_" } member? ] reject
[ irc-message ] dip define-tuple-class ;
: define-irc-parameter-slots ( class params -- )
lintable-words load-definitions
! Remove words that are their own definition
- [ [ [ def>> ] [ 1quotation ] bi = not ] filter ] assoc-map
+ [ [ [ def>> ] [ 1quotation ] bi = ] reject ] assoc-map
! Add manual definitions
manual-substitutions over '[ _ push-at ] assoc-each
M: sequence run-lint ( seq -- seq )
[ dup lint ] { } map>assoc trim-self
- [ second empty? not ] filter filter-symbols ;
+ [ second empty? ] reject filter-symbols ;
M: word run-lint ( word -- seq ) 1array run-lint ;
[ symtab_command? ] filter ; inline
: read-array-string ( uchar-array -- string )
- ascii decode [ 0 = not ] filter ;
+ ascii decode [ 0 = ] reject ;
: segment-sections ( segment-command -- sections )
{
:: do-step ( errors summary-file details-file -- )
errors
- [ error-type +linkage-error+ eq? not ] filter
+ [ error-type +linkage-error+ eq? ] reject
[ file>> ] map members natural-sort summary-file to-file
errors details-file utf8 [ errors. ] with-file-writer ;
[ <clumps> ] [ '[ _ count ] map ] bi* ; inline
: nonzero ( seq -- seq' )
- [ zero? not ] filter ;
+ [ zero? ] reject ;
: bartlett ( n -- seq )
dup 1 <= [ 1 = [ 1 1array ] [ { } ] if ] [
0 [ dup fp-nan? [ drop ] [ + ] if ] binary-reduce ;
: nan-min ( seq -- n )
- [ fp-nan? not ] filter infimum ;
+ [ fp-nan? ] reject infimum ;
: nan-max ( seq -- n )
- [ fp-nan? not ] filter supremum ;
+ [ fp-nan? ] reject supremum ;
: fill-nans ( seq -- newseq )
[ first ] keep [
"Example:"
{ $code
"\"/etc/passwd\" ascii file-lines"
- "[ \"#\" head? not ] filter"
+ "[ \"#\" head? ] reject"
"[ \":\" split first ] map"
"."
}
}
{ $slide "Words"
{ "We can define new words with " { $snippet ": name ... ;" } " syntax" }
- { $code ": remove-comments ( lines -- lines' )" " [ \"#\" head? not ] filter ;" }
+ { $code ": remove-comments ( lines -- lines' )" " [ \"#\" head? ] reject ;" }
{ "Words are grouped into " { $emphasis "vocabularies" } }
{ $link "vocab-index" }
"Libraries and applications are vocabularies"
}
{ $slide "Constructing quotations"
{ "Suppose we want a " { $snippet "remove-comments*" } " word" }
- { $code ": remove-comments* ( lines string -- lines' )" " [ ??? head? not ] filter ;" }
+ { $code ": remove-comments* ( lines string -- lines' )" " [ ??? head? ] reject ;" }
{ "We use " { $link POSTPONE: '[ } " instead of " { $link POSTPONE: [ } }
{ "Create “holes” with " { $link _ } }
"Holes filled in left to right when quotation pushed on the stack"
}
{ $slide "Constructing quotations"
- { $code ": remove-comments* ( lines string -- lines' )" " '[ _ head? not ] filter ;" "" ": remove-comments ( lines -- lines' )" " \"#\" remove-comments* ;" }
+ { $code ": remove-comments* ( lines string -- lines' )" " '[ _ head? ] reject ;" "" ": remove-comments ( lines -- lines' )" " \"#\" remove-comments* ;" }
{ { $link @ } " inserts a quotation" }
{ $code ": replicate ( n quot -- seq )" " '[ drop @ ] map ;" }
{ $code "10 [ 1 10 [a,b] random ] replicate ." }
<PRIVATE
: source-004 ( -- seq )
- 100 999 [a,b] [ 10 divisor? not ] filter ;
+ 100 999 [a,b] [ 10 divisor? ] reject ;
: max-palindrome ( seq -- palindrome )
natural-sort [ palindrome? ] find-last nip ;
[ "Topological sort failed" throw ] [ first ] if-empty ;
: remove-source ( seq elt -- seq )
- [ swap member? not ] curry filter ;
+ [ swap member? ] curry reject ;
: (topological-sort) ( seq -- )
dup length 1 > [
[ <resolv.conf> ] dip
utf8 file-lines
[ [ blank? ] trim ] map harvest
- [ "#" head? not ] filter
+ [ "#" head? ] reject
[ parse-resolv.conf-line ] each ;
: default-resolv.conf ( -- resolv.conf )
: normalize-robots.txt ( string -- sitemaps seq )
string-lines
[ [ blank? ] trim ] map
- [ "#" head? not ] filter harvest
+ [ "#" head? ] reject harvest
[ ":" split1 [ [ blank? ] trim ] bi@ [ >lower ] dip ] { } map>assoc
[ first "sitemap" = ] partition [ values ] dip
[
[ empty? not ] swap filter-as ;
: harvest! ( seq -- newseq )
- [ empty? not ] filter! ;
+ [ empty? ] reject! ;
: head-as ( seq n exemplar -- seq' )
[ head-slice ] [ like ] bi* ; inline
{ { } } [
all-words [ normal? ] filter 50 sample
- [ [ word>gc-info-expected ] [ word>gc-info ] bi same-gc-info? not ] filter
+ [ [ word>gc-info-expected ] [ word>gc-info ] bi same-gc-info? ] reject
] unit-test
: base-pointer-groups-expected ( word -- seq )
[ "url" of ] [ "sig" of ] bi "&signature=" glue ;
: sanitize ( title -- title' )
- [ 0 31 between? not ] filter
- [ "\"#$%'*,./:;<>?^|~\\" member? not ] filter
+ [ 0 31 between? ] reject
+ [ "\"#$%'*,./:;<>?^|~\\" member? ] reject
200 short head ;
: download-video ( video-id -- )
[ 0 0 (echelon) ] with-matrix ;\r
\r
: nonzero-rows ( matrix -- matrix' )\r
- [ [ zero? ] all? not ] filter ;\r
+ [ [ zero? ] all? ] reject ;\r
\r
: null/rank ( matrix -- null rank )\r
echelon dup length swap nonzero-rows length [ - ] keep ;\r
: function-types-effect ( -- function types effect )
scan scan swap ")" parse-tokens
- [ "(" subseq? not ] filter swap parse-arglist ;
+ [ "(" subseq? ] reject swap parse-arglist ;
: prototype-string ( function types effect -- str )
[ [ cify-type ] map ] dip