--- /dev/null
+USING: help.markup help.syntax sequences math ;
+IN: bit-sets
+
+ARTICLE: "bit-sets" "Bit sets"
+"The " { $vocab-link "bit-sets" } " vocabulary implements bit-array-backed sets. Bitsets are efficient for implementing relatively dense sets whose members are in a contiguous range of integers starting from 0. One bit is required for each integer in this range in the underlying representation."
+"Bit sets are of the class"
+{ $subsection bit-set }
+"They can be instantiated with the word"
+{ $subsection <bit-set> } ;
+
+ABOUT: "bit-sets"
+
+HELP: bit-set
+{ $class-description "The class of bit-array-based sets. These implement the " { $link "sets" } "." } ;
+
+HELP: <bit-set>
+{ $values { "capacity" integer } { "bit-set" bit-set } }
+{ $description "Creates a new bit set with the given capacity. This set is initially empty and can contain as members integers between 0 and " { $snippet "capacity" } "-1." } ;
-USING: bit-sets tools.test bit-arrays ;
+USING: bit-sets tools.test sets kernel bit-arrays ;
IN: bit-sets.tests
-[ ?{ t f t f t f } ] [
- ?{ t f f f t f }
- ?{ f f t f t f } bit-set-union
+[ T{ bit-set f ?{ t f t f t f } } ] [
+ T{ bit-set f ?{ t f f f t f } }
+ T{ bit-set f ?{ f f t f t f } } union
] unit-test
-[ ?{ f f f f t f } ] [
- ?{ t f f f t f }
- ?{ f f t f t f } bit-set-intersect
+[ T{ bit-set f ?{ f f f f t f } } ] [
+ T{ bit-set f ?{ t f f f t f } }
+ T{ bit-set f ?{ f f t f t f } } intersect
] unit-test
-[ ?{ t f t f f f } ] [
- ?{ t t t f f f }
- ?{ f t f f t t } bit-set-diff
+[ T{ bit-set f ?{ t f t f f f } } ] [
+ T{ bit-set f ?{ t t t f f f } }
+ T{ bit-set f ?{ f t f f t t } } diff
] unit-test
+
+[ f ] [
+ T{ bit-set f ?{ t t t f f f } }
+ T{ bit-set f ?{ f t f f t t } } subset?
+] unit-test
+
+[ t ] [
+ T{ bit-set f ?{ t t t f f f } }
+ T{ bit-set f ?{ f t f f f f } } subset?
+] unit-test
+
+[ t ] [
+ { 0 1 2 }
+ T{ bit-set f ?{ f t f f f f } } subset?
+] unit-test
+
+[ f ] [
+ T{ bit-set f ?{ f t f f f f } }
+ T{ bit-set f ?{ t t t f f f } } subset?
+] unit-test
+
+[ f ] [
+ { 1 }
+ T{ bit-set f ?{ t t t f f f } } subset?
+] unit-test
+
+[ V{ 0 2 5 } ] [ T{ bit-set f ?{ t f t f f t } } members ] unit-test
+
+[ t V{ 1 2 3 } ] [
+ { 1 2 } 5 <bit-set> set-like
+ [ bit-set? ] keep
+ 3 over adjoin
+ members
+] unit-test
+
+[ V{ 0 1 2 5 } ] [ T{ bit-set f ?{ t f t f f t } } clone [ 1 swap adjoin ] keep members ] unit-test
+[ T{ bit-set f ?{ t f t f f t } } clone [ 9 swap adjoin ] keep members ] must-fail
+[ T{ bit-set f ?{ t f t f f t } } clone [ "foo" swap adjoin ] keep members ] must-fail
+
+[ V{ 2 5 } ] [ T{ bit-set f ?{ t f t f f t } } clone [ 0 swap delete ] keep members ] unit-test
+[ V{ 0 2 5 } ] [ T{ bit-set f ?{ t f t f f t } } clone [ 1 swap delete ] keep members ] unit-test
+[ V{ 0 2 5 } ] [ T{ bit-set f ?{ t f t f f t } } clone [ 9 swap delete ] keep members ] unit-test
+[ V{ 0 2 5 } ] [ T{ bit-set f ?{ t f t f f t } } clone [ "foo" swap delete ] keep members ] unit-test
+
+[ T{ bit-set f ?{ f } } T{ bit-set f ?{ t } } ]
+[ 1 <bit-set> dup clone 0 over adjoin ] unit-test
! Copyright (C) 2009 Slava Pestov.
! See http://factorcode.org/license.txt for BSD license.
-USING: kernel accessors sequences byte-arrays bit-arrays math hints ;
+USING: kernel accessors sequences byte-arrays bit-arrays math hints sets ;
IN: bit-sets
+TUPLE: bit-set { table bit-array read-only } ;
+
+: <bit-set> ( capacity -- bit-set )
+ <bit-array> bit-set boa ;
+
+INSTANCE: bit-set set
+
+M: bit-set in?
+ over integer? [ table>> ?nth ] [ 2drop f ] if ; inline
+
+M: bit-set adjoin
+ ! This is allowed to crash when the elt couldn't go in the set
+ [ t ] 2dip table>> set-nth ;
+
+M: bit-set delete
+ ! This isn't allowed to crash if the elt wasn't in the set
+ over integer? [
+ table>> 2dup bounds-check? [
+ [ f ] 2dip set-nth
+ ] [ 2drop ] if
+ ] [ 2drop ] if ;
+
+! If you do binary set operations with a bitset, it's expected
+! that the other thing can also be represented as a bitset
+! of the same length.
<PRIVATE
: bit-set-map ( seq1 seq2 quot -- seq )
] dip 2map
] 3bi bit-array boa ; inline
+: (bit-set-op) ( set1 set2 -- table1 table2 )
+ [ set-like ] keep [ table>> ] bi@ ; inline
+
+: bit-set-op ( set1 set2 quot: ( a b -- c ) -- bit-set )
+ [ (bit-set-op) ] dip bit-set-map bit-set boa ; inline
+
PRIVATE>
-: bit-set-union ( seq1 seq2 -- seq ) [ bitor ] bit-set-map ;
+M: bit-set union
+ [ bitor ] bit-set-op ;
-HINTS: bit-set-union bit-array bit-array ;
+M: bit-set intersect
+ [ bitand ] bit-set-op ;
-: bit-set-intersect ( seq1 seq2 -- seq ) [ bitand ] bit-set-map ;
+M: bit-set diff
+ [ bitnot bitand ] bit-set-op ;
-HINTS: bit-set-intersect bit-array bit-array ;
+M: bit-set subset?
+ [ intersect ] keep = ;
-: bit-set-diff ( seq1 seq2 -- seq ) [ bitnot bitand ] bit-set-map ;
+M: bit-set members
+ [ table>> length iota ] keep [ in? ] curry filter ;
-HINTS: bit-set-diff bit-array bit-array ;
+M: bit-set set-like
+ ! This crashes if there are keys that can't be put in the bit set
+ over bit-set? [ 2dup [ table>> ] bi@ length = ] [ f ] if
+ [ drop ] [
+ [ members ] dip table>> length <bit-set>
+ [ [ adjoin ] curry each ] keep
+ ] if ;
-: bit-set-subset? ( seq1 seq2 -- ? ) dupd bit-set-intersect = ;
\ No newline at end of file
+M: bit-set clone
+ table>> clone bit-set boa ;
compiler.cfg.comparisons
compiler.cfg.instructions
compiler.cfg.representations.preferred ;
+FROM: namespaces => set ;
IN: compiler.cfg.alias-analysis
! We try to eliminate redundant slot operations using some simple heuristics.
histories get
values [
values [ [ store? ] filter [ insn#>> ] map ] map concat
- ] map concat unique
+ ] map concat fast-set
live-stores set ;
GENERIC: eliminate-dead-stores* ( insn -- insn' )
: (eliminate-dead-stores) ( insn -- insn' )
dup insn-slot# [
- insn# get live-stores get key? [
+ insn# get live-stores get in? [
drop f
] unless
] when ;
! Copyright (C) 2008, 2009 Slava Pestov, Daniel Ehrenberg.
! See http://factorcode.org/license.txt for BSD license.
-USING: accessors assocs sets kernel namespaces sequences
+USING: accessors assocs kernel namespaces sequences
compiler.cfg.instructions compiler.cfg.def-use
-compiler.cfg.rpo compiler.cfg.predecessors ;
+compiler.cfg.rpo compiler.cfg.predecessors hash-sets sets ;
+FROM: namespaces => set ;
IN: compiler.cfg.dce
! Maps vregs to sequences of vregs
SYMBOL: live-vregs
: live-vreg? ( vreg -- ? )
- live-vregs get key? ;
+ live-vregs get in? ;
! vregs which are the result of an allocation
SYMBOL: allocations
: allocation? ( vreg -- ? )
- allocations get key? ;
+ allocations get in? ;
: init-dead-code ( -- )
H{ } clone liveness-graph set
- H{ } clone live-vregs set
- H{ } clone allocations set ;
+ HS{ } clone live-vregs set
+ HS{ } clone allocations set ;
GENERIC: build-liveness-graph ( insn -- )
dup src>> setter-liveness-graph ;
M: ##allot build-liveness-graph
- [ dst>> allocations get conjoin ] [ call-next-method ] bi ;
+ [ dst>> allocations get adjoin ] [ call-next-method ] bi ;
M: insn build-liveness-graph
dup defs-vreg dup [ add-edges ] [ 2drop ] if ;
: (record-live) ( vregs -- )
[
- dup live-vregs get key? [ drop ] [
- [ live-vregs get conjoin ]
+ dup live-vreg? [ drop ] [
+ [ live-vregs get adjoin ]
[ liveness-graph get at (record-live) ]
bi
] if
namespaces quotations sequences sets slots words
compiler.cfg.instructions compiler.cfg.instructions.syntax
compiler.cfg.rpo ;
+FROM: namespaces => set ;
+FROM: sets => members ;
IN: compiler.cfg.def-use
GENERIC: defs-vreg ( insn -- vreg/f )
cfg [| block |
block instructions>> [
dup ##phi?
- [ inputs>> [ use conjoin-at ] assoc-each ]
- [ uses-vregs [ block swap use conjoin-at ] each ]
+ [ inputs>> [ use adjoin-at ] assoc-each ]
+ [ uses-vregs [ block swap use adjoin-at ] each ]
if
] each
] each-basic-block
- use [ keys ] assoc-map uses set ;
+ use [ members ] assoc-map uses set ;
USING: accessors assocs combinators sets math fry kernel math.order
dlists deques vectors namespaces sequences sorting locals
compiler.cfg.rpo compiler.cfg.predecessors ;
+FROM: namespaces => set ;
IN: compiler.cfg.dominance
! Reference:
[ accum push ]
[ dom-children work-list push-all-front ] bi
] slurp-deque
- accum ;
\ No newline at end of file
+ accum ;
compiler.cfg.linear-scan.allocation
compiler.cfg.linear-scan.allocation.state
compiler.cfg.linear-scan.live-intervals ;
+FROM: namespaces => set ;
IN: compiler.cfg.linear-scan.assignment
! This contains both active and inactive intervals; any interval
USING: accessors kernel sequences sets arrays math strings fry
namespaces prettyprint compiler.cfg.linear-scan.live-intervals
compiler.cfg.linear-scan.allocation compiler.cfg assocs ;
+FROM: namespaces => set ;
IN: compiler.cfg.linear-scan.debugger
: check-linear-scan ( live-intervals machine-registers -- )
-IN: compiler.cfg.linear-scan.tests
USING: tools.test random sorting sequences sets hashtables assocs
kernel fry arrays splitting namespaces math accessors vectors locals
math.order grouping strings strings.private classes layouts
compiler.cfg.linear-scan.allocation.splitting
compiler.cfg.linear-scan.allocation.spilling
compiler.cfg.linear-scan.debugger ;
+FROM: namespaces => set ;
+IN: compiler.cfg.linear-scan.tests
check-allocation? on
check-numbering? on
! See http://factorcode.org/license.txt for BSD license.
USING: accessors assocs deques dlists kernel make sorting
namespaces sequences combinators combinators.short-circuit
-fry math sets compiler.cfg.rpo compiler.cfg.utilities
-compiler.cfg.loop-detection compiler.cfg.predecessors ;
+fry math compiler.cfg.rpo compiler.cfg.utilities
+compiler.cfg.loop-detection compiler.cfg.predecessors
+sets hash-sets ;
+FROM: namespaces => set ;
IN: compiler.cfg.linearization.order
! This is RPO except loops are rotated. Based on SBCL's src/compiler/control.lisp
SYMBOLS: work-list loop-heads visited ;
-: visited? ( bb -- ? ) visited get key? ;
+: visited? ( bb -- ? ) visited get in? ;
: add-to-work-list ( bb -- )
- dup visited get key? [ drop ] [
+ dup visited? [ drop ] [
work-list get push-back
] if ;
: init-linearization-order ( cfg -- )
<dlist> work-list set
- H{ } clone visited set
+ HS{ } clone visited set
entry>> add-to-work-list ;
: (find-alternate-loop-head) ( bb -- bb' )
: process-block ( bb -- )
dup visited? [ drop ] [
[ , ]
- [ visited get conjoin ]
+ [ visited get adjoin ]
[ sorted-successors [ process-successor ] each ]
tri
] if ;
dup linear-order>> [ ] [
dup (linearization-order)
>>linear-order linear-order>>
- ] ?if ;
\ No newline at end of file
+ ] ?if ;
hashtables dlists compiler.cfg.def-use compiler.cfg.instructions
compiler.cfg.rpo compiler.cfg.liveness compiler.cfg.utilities
compiler.cfg.predecessors ;
+FROM: namespaces => set ;
IN: compiler.cfg.liveness.ssa
! TODO: merge with compiler.cfg.liveness
: live-in? ( vreg bb -- ? ) live-in key? ;
-: live-out? ( vreg bb -- ? ) live-out key? ;
\ No newline at end of file
+: live-out? ( vreg bb -- ? ) live-out key? ;
! See http://factorcode.org/license.txt for BSD license.
USING: accessors assocs combinators deques dlists fry kernel
namespaces sequences sets compiler.cfg compiler.cfg.predecessors ;
+FROM: namespaces => set ;
IN: compiler.cfg.loop-detection
TUPLE: natural-loop header index ends blocks ;
compiler.cfg.utilities compiler.cfg compiler.cfg.rpo
compiler.cfg.instructions compiler.cfg.def-use ;
FROM: compiler.cfg.instructions.syntax => insn-def-slot insn-use-slots insn-temp-slots scalar-rep ;
+FROM: namespaces => set ;
IN: compiler.cfg.representations.preferred
GENERIC: defs-vreg-rep ( insn -- rep/f )
compiler.cfg.loop-detection
compiler.cfg.renaming.functor
compiler.cfg.representations.preferred ;
+FROM: namespaces => set ;
IN: compiler.cfg.representations
! Virtual register representation selection.
! See http://factorcode.org/license.txt for BSD license.
USING: kernel accessors namespaces make math sequences sets
assocs fry compiler.cfg compiler.cfg.instructions ;
+FROM: namespaces => set ;
IN: compiler.cfg.rpo
SYMBOL: visited
compiler.cfg.renaming
compiler.cfg.renaming.functor
compiler.cfg.ssa.construction.tdmsc ;
+FROM: namespaces => set ;
IN: compiler.cfg.ssa.construction
! The phi placement algorithm is implemented in
[ compute-defs compute-phi-nodes insert-phi-nodes ]
[ rename ]
[ ]
- } cleave ;
\ No newline at end of file
+ } cleave ;
compiler.cfg.dominance compiler.cfg.predecessors
compiler.cfg.ssa.construction.tdmsc kernel namespaces sequences
tools.test vectors sets ;
+FROM: namespaces => set ;
IN: compiler.cfg.ssa.construction.tdmsc.tests
: test-tdmsc ( -- )
[ ] [ test-tdmsc ] unit-test
[ V{ 2 } ] [ { 2 3 4 5 } [ get ] map merge-set [ number>> ] map ] unit-test
-[ V{ } ] [ { 0 1 6 7 } [ get ] map merge-set ] unit-test
\ No newline at end of file
+[ V{ } ] [ { 0 1 6 7 } [ get ] map merge-set ] unit-test
USING: accessors arrays assocs bit-arrays bit-sets fry
hashtables hints kernel locals math namespaces sequences sets
compiler.cfg compiler.cfg.dominance compiler.cfg.rpo ;
+FROM: namespaces => set ;
IN: compiler.cfg.ssa.construction.tdmsc
! TDMSC-I algorithm from "A Practical and Fast Iterative Algorithm for
SYMBOLS: visited merge-sets levels again? ;
: init-merge-sets ( cfg -- )
- post-order dup length '[ _ <bit-array> ] H{ } map>assoc merge-sets set ;
+ post-order dup length '[ _ <bit-set> ] H{ } map>assoc merge-sets set ;
: compute-levels ( cfg -- )
0 over entry>> associate [
: level ( bb -- n ) levels get at ; inline
-: set-bit ( bit-array n -- )
- [ t ] 2dip swap set-nth ;
-
: update-merge-set ( tmp to -- )
[ merge-sets get ] dip
'[
_
- [ merge-sets get at bit-set-union ]
- [ dupd number>> set-bit ]
+ [ merge-sets get at union ]
+ [ number>> over adjoin ]
bi
] change-at ;
[ [ predecessors>> ] keep ] dip
'[ _ 2dup j-edge? _ [ 2drop ] if ] each ; inline
-: visited? ( pair -- ? ) visited get key? ;
+: visited? ( pair -- ? ) visited get in? ;
: consistent? ( snode lnode -- ? )
- [ merge-sets get at ] bi@ swap bit-set-subset? ;
+ [ merge-sets get at ] bi@ subset? ;
: (process-edge) ( from to -- )
f walk [
: process-edge ( from to -- )
2dup 2array dup visited? [ 3drop ] [
- visited get conjoin
+ visited get adjoin
(process-edge)
] if ;
[ process-edge ] each-incoming-j-edge ;
: compute-merge-set-step ( bfo -- )
- visited get clear-assoc
+ HS{ } clone visited set
[ process-block ] each ;
: compute-merge-set-loop ( cfg -- )
loop ;
: (merge-set) ( bbs -- flags rpo )
- merge-sets get '[ _ at ] [ bit-set-union ] map-reduce
+ merge-sets get '[ _ at ] [ union ] map-reduce
cfg get reverse-post-order ; inline
-: filter-by ( flags seq -- seq' )
- [ drop ] selector [ 2each ] dip ;
-
-HINTS: filter-by { bit-array object } ;
-
PRIVATE>
: compute-merge-sets ( cfg -- )
needs-dominance
- H{ } clone visited set
+ HS{ } clone visited set
[ compute-levels ]
[ init-merge-sets ]
[ compute-merge-set-loop ]
tri ;
-: merge-set-each ( ... bbs quot: ( ... bb -- ... ) -- ... )
- [ (merge-set) ] dip '[
- swap _ [ drop ] if
- ] 2each ; inline
-
: merge-set ( bbs -- bbs' )
- (merge-set) filter-by ;
+ (merge-set) [ members ] dip nths ;
+
+: merge-set-each ( bbs quot: ( bb -- ) -- )
+ [ merge-set ] dip each ; inline
compiler.cfg.ssa.interference.live-ranges
compiler.cfg.utilities
compiler.utilities ;
+FROM: namespaces => set ;
IN: compiler.cfg.ssa.destruction
! Maps vregs to leaders.
compiler.cfg.dominance
compiler.cfg.def-use
compiler.cfg.instructions ;
+FROM: namespaces => set ;
IN: compiler.cfg.ssa.liveness
! Liveness checking on SSA IR, as described in
compiler.cfg.registers
compiler.cfg.stacks.height
compiler.cfg.parallel-copy ;
+FROM: namespaces => set ;
IN: compiler.cfg.stacks.local
! Local stack analysis. We build three sets for every basic block
: peek-set ( bb -- assoc ) peek-sets get at ;
: replace-set ( bb -- assoc ) replace-sets get at ;
-: kill-set ( bb -- assoc ) kill-sets get at ;
\ No newline at end of file
+: kill-set ( bb -- assoc ) kill-sets get at ;
USING: accessors assocs combinators.short-circuit
compiler.cfg.instructions compiler.cfg.rpo kernel namespaces
sequences sets ;
+FROM: namespaces => set ;
IN: compiler.cfg.write-barrier
SYMBOL: fresh-allocations
compiler.cfg.builder
compiler.codegen.fixup
compiler.utilities ;
+FROM: namespaces => set ;
IN: compiler.codegen
SYMBOL: insn-counts
compiler.tree.def-use
compiler.tree.recursive
compiler.tree.combinators ;
+FROM: namespaces => set ;
IN: compiler.tree.checker
! Check some invariants; this can help catch compiler bugs.
arrays combinators columns stack-checker.backend
stack-checker.branches compiler.tree compiler.tree.combinators
compiler.tree.dead-code.liveness compiler.tree.dead-code.simple ;
+FROM: namespaces => set ;
IN: compiler.tree.dead-code.branches
M: #if mark-live-values* look-at-inputs ;
dlists kernel sequences compiler.utilities words sets
stack-checker.branches compiler.tree compiler.tree.def-use
compiler.tree.combinators ;
+FROM: namespaces => set ;
IN: compiler.tree.dead-code.liveness
SYMBOL: work-list
stack-checker.branches
compiler.tree
compiler.tree.combinators ;
+FROM: namespaces => set ;
+FROM: sets => members ;
IN: compiler.tree.def-use
SYMBOL: def-use
M: #introduce node-uses-values drop f ;
M: #push node-uses-values drop f ;
-M: #phi node-uses-values phi-in-d>> concat remove-bottom prune ;
+M: #phi node-uses-values phi-in-d>> concat remove-bottom members ;
M: #declare node-uses-values drop f ;
M: #terminate node-uses-values [ in-d>> ] [ in-r>> ] bi append ;
M: #shuffle node-uses-values [ in-d>> ] [ in-r>> ] bi append ;
! See http://factorcode.org/license.txt for BSD license.
USING: sequences kernel fry vectors accessors namespaces assocs sets
stack-checker.branches compiler.tree compiler.tree.def-use ;
+FROM: namespaces => set ;
IN: compiler.tree.def-use.simplified
! Simplified def-use follows chains of copies.
! See http://factorcode.org/license.txt for BSD license.
USING: accessors assocs namespaces sequences kernel math
combinators sets disjoint-sets fry stack-checker.values ;
+FROM: namespaces => set ;
IN: compiler.tree.escape-analysis.allocations
! A map from values to classes. Only for #introduce outputs
compiler.tree.def-use
compiler.tree.def-use.simplified
compiler.tree.late-optimizations ;
+FROM: namespaces => set ;
IN: compiler.tree.modular-arithmetic
! This is a late-stage optimization.
sets combinators.short-circuit sequences.private locals growable
stack-checker namespaces compiler.tree.propagation.info ;
FROM: math => float ;
+FROM: sets => set ;
IN: compiler.tree.propagation.transforms
\ equal? [
in-d>> first value-info literal>> {
{ V{ } [ [ drop { } 0 vector boa ] ] }
{ H{ } [ [ drop 0 <hashtable> ] ] }
+ { HS{ } [ [ drop f fast-set ] ] }
[ drop f ]
} case
] "custom-inlining" set-word-prop
[ drop f ] swap
[ literalize [ t ] ] { } map>assoc linear-case-quot
] [
- unique [ key? ] curry
+ tester
] if ;
\ member? [
\ at* [ at-quot ] 1 define-partial-eval
: diff-quot ( seq -- quot: ( seq' -- seq'' ) )
- tester '[ [ @ not ] filter ] ;
+ tester '[ [ [ @ not ] filter ] keep set-like ] ;
-\ diff [ diff-quot ] 1 define-partial-eval
+M\ set diff [ diff-quot ] 1 define-partial-eval
: intersect-quot ( seq -- quot: ( seq' -- seq'' ) )
- tester '[ _ filter ] ;
+ tester '[ [ _ filter ] keep set-like ] ;
-\ intersect [ intersect-quot ] 1 define-partial-eval
+M\ set intersect [ intersect-quot ] 1 define-partial-eval
: fixnum-bits ( -- n )
cell-bits tag-bits get - ;
! See http://factorcode.org/license.txt for BSD license.
USING: kernel assocs arrays namespaces accessors sequences deques fry
search-deques dlists combinators.short-circuit make sets compiler.tree ;
+FROM: namespaces => set ;
IN: compiler.tree.recursive
TUPLE: call-site tail? node label ;
math combinators namespaces urls.encoding xml.syntax xmode.code2html
xml.data arrays strings vectors xml.writer io.streams.string locals
unicode.categories ;
+FROM: namespaces => set ;
IN: farkup
SYMBOL: relative-link-prefix
furnace.auth.providers\r
furnace.auth.providers.db ;\r
FROM: assocs => change-at ;\r
+FROM: namespaces => set ;\r
IN: furnace.auth\r
\r
SYMBOL: logged-in-user\r
ARTICLE: "crossref-test-2" "Crossref test 2"
{ $markup-example { $subsection "crossref-test-1" } } ;
-[ V{ } ] [ "crossref-test-2" >link article-children ] unit-test
+[ { } ] [ "crossref-test-2" >link article-children ] unit-test
}
{ $heading "Other collections" }
{ $subsections
+ "sets"
"lists"
"disjoint-sets"
"interval-maps"
namespaces sequences sequences.deep sets sorting splitting
strings unicode.categories values vocabs vocabs.loader words
words.symbol summary debugger io ;
+FROM: sets => members ;
IN: help.lint.checks
ERROR: simple-lint-error message ;
: effect-values ( word -- seq )
stack-effect
[ in>> ] [ out>> ] bi append
- [ dup pair? [ first ] when effect>string ] map prune ;
+ [ dup pair? [ first ] when effect>string ] map members ;
: effect-effects ( word -- seq )
stack-effect in>> [
: check-see-also ( element -- )
\ $see-also swap elements [
- rest dup prune [ length ] bi@ assert=
+ rest all-unique? t assert=
] each ;
: vocab-exists? ( name -- ? )
sorting splitting strings vectors vocabs vocabs.loader words
words.symbol ;
FROM: prettyprint.sections => with-pprint ;
+FROM: namespaces => set ;
IN: help.markup
PREDICATE: simple-element < array
: elements ( elt-type element -- seq ) [ elements* ] { } make ;
: collect-elements ( element seq -- elements )
- swap '[ _ elements [ rest ] map concat ] map concat prune ;
+ swap '[ _ elements [ rest ] map concat ] gather ;
: <$link> ( topic -- element )
1array \ $link prefix ;
sequences strings io.styles vectors words quotations mirrors
splitting math.parser classes vocabs sets sorting summary
debugger continuations fry combinators ;
+FROM: namespaces => set ;
IN: inspector
SYMBOL: +number-rows+
IN_MOVED_FROM +rename-file-old+ ?flag
IN_MOVED_TO +rename-file-new+ ?flag
drop
- ] { } make prune ;
+ ] { } make members ;
: parse-event-name ( event -- name )
dup len>> zero?
GENERIC: defs-vars* ( seq form -- seq' )
-: defs-vars ( form -- vars ) { } [ defs-vars* ] reduce prune ;
+: defs-vars ( form -- vars ) { } [ defs-vars* ] reduce members ;
M: def defs-vars* local>> unquote suffix ;
GENERIC: uses-vars* ( seq form -- seq' )
-: uses-vars ( form -- vars ) { } [ uses-vars* ] reduce prune ;
+: uses-vars ( form -- vars ) { } [ uses-vars* ] reduce members ;
M: local-writer uses-vars* "local-reader" word-prop suffix ;
[ { 0 1/3 2/3 1 } ] [ 1 0 -1/3 <range> >array reverse ] unit-test
[ 100 ] [
- 1 100 [a,b] [ 2^ [1,b] ] map prune length
+ 1 100 [a,b] [ 2^ [1,b] ] map members length
] unit-test
sets unicode.categories compiler.units parser effects.parser
words quotations memoize accessors locals splitting
combinators.short-circuit generalizations ;
+FROM: namespaces => set ;
IN: peg
TUPLE: parse-result remaining ast ;
io.pathnames io.styles kernel make math math.order math.parser
namespaces prettyprint.config prettyprint.custom
prettyprint.sections prettyprint.stylesheet quotations sbufs
-sequences strings vectors words words.symbol ;
+sequences strings vectors words words.symbol hash-sets ;
+FROM: sets => members ;
IN: prettyprint.backend
M: effect pprint* effect>string "(" ")" surround text ;
M: tuple pprint-delims drop \ T{ \ } ;
M: wrapper pprint-delims drop \ W{ \ } ;
M: callstack pprint-delims drop \ CS{ \ } ;
+M: hash-set pprint-delims drop \ HS{ \ } ;
M: object >pprint-sequence ;
M: vector >pprint-sequence ;
M: hashtable >pprint-sequence >alist ;
M: wrapper >pprint-sequence wrapped>> 1array ;
M: callstack >pprint-sequence callstack>array ;
+M: hash-set >pprint-sequence members ;
: class-slot-sequence ( class slots -- sequence )
[ 1array ] [ [ f 2array ] dip append ] if-empty ;
M: hashtable pprint* pprint-object ;
M: curry pprint* pprint-object ;
M: compose pprint* pprint-object ;
+M: hash-set pprint* pprint-object ;
M: wrapper pprint*
{
parser prettyprint.backend prettyprint.config prettyprint.custom
prettyprint.sections quotations sequences sorting strings vocabs
vocabs.prettyprint words sets generic ;
+FROM: namespaces => set ;
IN: prettyprint
: with-use ( obj quot -- )
namespaces make sequences strings io.styles vectors words
prettyprint.config splitting classes continuations
accessors sets vocabs.parser combinators vocabs ;
+FROM: namespaces => set ;
IN: prettyprint.sections
! State
[ t ] [ 10000 [ iota 0 [ drop 187 random + ] reduce ] keep / 2 * 187 10 ~ ] unit-test
[ t ] [ 10000 [ iota 0 [ drop 400 random + ] reduce ] keep / 2 * 400 10 ~ ] unit-test
-[ t ] [ 1000 [ 400 random ] replicate prune length 256 > ] unit-test
+[ t ] [ 1000 [ 400 random ] replicate members length 256 > ] unit-test
[ f ] [ 0 random ] unit-test
[ { 1 2 } 3 sample ] [ too-many-samples? ] must-fail-with
-[ 3 ] [ { 1 2 3 4 } 3 sample prune length ] unit-test
-[ 99 ] [ 100 iota 99 sample prune length ] unit-test
+[ 3 ] [ { 1 2 3 4 } 3 sample members length ] unit-test
+[ 99 ] [ 100 iota 99 sample members length ] unit-test
[ ]
[ [ 100 random-bytes ] with-system-random drop ] unit-test
! Making classes into nested conditionals
-[ V{ 1 2 3 4 } ] [ T{ and-class f { 1 T{ not-class f 2 } T{ or-class f { 3 4 } } 2 } } class>questions ] unit-test
+[ { 1 2 3 4 } ] [ T{ and-class f { 1 T{ not-class f 2 } T{ or-class f { 3 4 } } 2 } } class>questions ] unit-test
[ { 3 } ] [ { { 3 t } } table>condition ] unit-test
[ { T{ primitive-class } } ] [ { { 1 t } { 2 T{ primitive-class } } } table>questions ] unit-test
[ { { 1 t } { 2 t } } ] [ { { 1 t } { 2 T{ primitive-class } } } T{ primitive-class } t assoc-answer ] unit-test
fry macros arrays assocs sets classes mirrors unicode.script
unicode.data ;
FROM: ascii => ascii? ;
+FROM: sets => members ;
IN: regexp.classes
SINGLETONS: dot letter-class LETTER-class Letter-class digit-class
TUPLE: class-partition integers not-integers simples not-simples and or other ;
: partition-classes ( seq -- class-partition )
- prune
+ members
[ integer? ] partition
[ not-integer? ] partition
[ simple-class? ] partition
[ t swap remove ] change-other
dup contradiction?
[ drop f ]
- [ filter-not-integers class-partition>seq prune t and-class seq>instance ] if ;
+ [ filter-not-integers class-partition>seq members t and-class seq>instance ] if ;
: <and-class> ( seq -- class )
dup and-class flatten partition-classes
[ f swap remove ] change-other
dup tautology?
[ drop t ]
- [ filter-integers class-partition>seq prune f or-class seq>instance ] if ;
+ [ filter-integers class-partition>seq members f or-class seq>instance ] if ;
: <or-class> ( seq -- class )
dup or-class flatten partition-classes
: condition-states ( condition -- states )
dup condition? [
[ yes>> ] [ no>> ] bi
- [ condition-states ] bi@ append prune
+ [ condition-states ] bi@ union
] [ 1array ] if ;
: condition-at ( condition assoc -- new-condition )
! Copyright (C) 2009 Daniel Ehrenberg.
! See http://factorcode.org/license.txt for BSD license.
USING: regexp.classes kernel sequences regexp.negation
-quotations assocs fry math locals combinators
+quotations assocs fry math locals combinators sets
accessors words compiler.units kernel.private strings
sequences.private arrays namespaces unicode.breaks
regexp.transition-tables combinators.short-circuit ;
: word>quot ( word dfa -- quot )
[ transitions>> at ]
- [ final-states>> key? ] 2bi
+ [ final-states>> in? ] 2bi
transitions>quot ;
: states>code ( words dfa -- )
: set-final-states ( nfa dfa -- )
[
- [ final-states>> keys ]
+ [ final-states>> members ]
[ transitions>> keys ] bi*
[ intersects? ] with filter
- unique
+ fast-set
] keep (>>final-states) ;
: initialize-dfa ( nfa -- dfa )
{ 3 H{ } }
} }
{ start-state 0 }
- { final-states H{ { 3 3 } } }
+ { final-states HS{ 3 } }
}
] [
T{ transition-table
{ 6 H{ } }
} }
{ start-state 0 }
- { final-states H{ { 3 3 } { 6 6 } } }
+ { final-states HS{ 3 6 } }
} combine-states
] unit-test
{
[ drop <= ]
[ transitions>> '[ _ at keys ] bi@ set= ]
- [ final-states>> '[ _ key? ] bi@ = ]
+ [ final-states>> '[ _ in? ] bi@ = ]
} 3&& ;
:: initialize-partitions ( transition-table -- partitions )
{ -1 H{ { t -1 } } }
} }
{ start-state 0 }
- { final-states H{ { 0 0 } { -1 -1 } } }
+ { final-states HS{ 0 -1 } }
}
] [
! R/ a/
{ 1 H{ } }
} }
{ start-state 0 }
- { final-states H{ { 1 1 } } }
+ { final-states HS{ 1 } }
} negate-table
] unit-test
USING: regexp.nfa regexp.disambiguate kernel sequences
assocs regexp.classes hashtables accessors fry vectors
regexp.ast regexp.transition-tables regexp.minimize
-regexp.dfa namespaces ;
+regexp.dfa namespaces sets ;
IN: regexp.negation
CONSTANT: fail-state -1
fail-state-recurses ;
: inverse-final-states ( transition-table -- final-states )
- [ transitions>> assoc>set ] [ final-states>> ] bi assoc-diff ;
+ [ transitions>> keys ] [ final-states>> ] bi diff fast-set ;
: negate-table ( transition-table -- transition-table )
clone
[ [ [ 1vector ] assoc-map ] assoc-map ] change-transitions ;
: unify-final-state ( transition-table -- transition-table )
- dup [ final-states>> keys ] keep
+ dup [ final-states>> members ] keep
'[ -2 epsilon _ set-transition ] each
- H{ { -2 -2 } } >>final-states ;
+ HS{ -2 } clone >>final-states ;
: adjoin-dfa ( transition-table -- start end )
unify-final-state renumber-states box-transitions
[ start-state>> ]
- [ final-states>> keys first ]
+ [ final-states>> members first ]
[ nfa-table get [ transitions>> ] bi@ swap assoc-union! drop ] tri ;
: ast>dfa ( parse-tree -- minimal-dfa )
unicode.categories regexp.transition-tables words sets hashtables
combinators.short-circuit unicode.data regexp.ast
regexp.classes memoize ;
+FROM: namespaces => set ;
IN: regexp.nfa
! This uses unicode.data for ch>upper and ch>lower
<transition-table> nfa-table set
nfa-node
nfa-table get
- swap dup associate >>final-states
+ swap 1array fast-set >>final-states
swap >>start-state
] with-scope ;
[ [ simple ] keep ] H{ } map>assoc ;
MEMO: simple-script-table ( -- table )
- script-table interval-values prune simple-table ;
+ script-table interval-values members simple-table ;
MEMO: simple-category-table ( -- table )
categories simple-table ;
! Copyright (C) 2008 Doug Coleman.
! See http://factorcode.org/license.txt for BSD license.
USING: accessors arrays assocs fry hashtables kernel sequences
-vectors locals regexp.classes ;
+vectors locals regexp.classes sets ;
IN: regexp.transition-tables
TUPLE: transition-table transitions start-state final-states ;
: <transition-table> ( -- transition-table )
transition-table new
H{ } clone >>transitions
- H{ } clone >>final-states ;
+ HS{ } clone >>final-states ;
:: (set-transition) ( from to obj hash -- )
from hash at
: add-transition ( from to obj transition-table -- )
transitions>> (add-transition) ;
-: map-set ( assoc quot -- new-assoc )
- '[ drop @ dup ] assoc-map ; inline
+: map-set ( set quot -- new-set )
+ over [ [ members ] dip map ] dip set-like ; inline
: number-transitions ( transitions numbering -- new-transitions )
dup '[
prettyprint.backend prettyprint.config prettyprint.custom
prettyprint.sections sequences sets sorting strings summary words
words.symbol words.constant words.alias vocabs slots ;
+FROM: namespaces => set ;
+FROM: classes => members ;
+RENAME: members sets => set-members
IN: see
GENERIC: synopsis* ( defspec -- )
dup class? [ dup seeing-implementors % ] when
dup generic? [ dup seeing-methods % ] when
drop
- ] { } make prune ;
+ ] { } make set-members ;
: see-methods ( word -- )
methods see-all nl ;
] { } make <interval-map> ;
: process-interval-file ( ranges -- table )
- dup values prune interned
+ dup values members interned
[ expand-ranges ] with-variable ;
: load-interval-file ( filename -- table )
kernel logging sequences combinators splitting assocs strings
math.order math.parser random system calendar summary calendar.format
accessors sets hashtables base64 debugger classes prettyprint words ;
+FROM: namespaces => set ;
IN: smtp
SYMBOL: smtp-domain
stack-checker.visitor stack-checker.errors stack-checker.values
stack-checker.recursive-state stack-checker.dependencies summary ;
FROM: sequences.private => from-end ;
+FROM: namespaces => set ;
IN: stack-checker.backend
: push-d ( obj -- ) meta-d push ;
combinators.short-circuit classes.tuple alien.c-types ;
FROM: classes.tuple.private => tuple-layout ;
FROM: assocs => change-at ;
+FROM: namespaces => set ;
IN: stack-checker.dependencies
! Words that the current quotation depends on
stack-checker.state stack-checker.visitor stack-checker.errors
stack-checker.values stack-checker.recursive-state
stack-checker.dependencies ;
+FROM: namespaces => set ;
IN: stack-checker.transforms
: call-transformer ( stack quot -- newquot )
[ { } ]
[ SA{ } "something" swap query ] unit-test
-[ V{ "unit-test" "(unit-test)" } ]
+[ { "unit-test" "(unit-test)" } ]
[ "suffix-array" get "unit-test" swap query ] unit-test
[ t ]
[ "suffix-array" get "something else" swap query empty? ] unit-test
-[ V{ "rofl" } ] [ SA{ "rofl" } "r" swap query ] unit-test
-[ V{ "rofl" } ] [ SA{ "rofl" } "o" swap query ] unit-test
-[ V{ "rofl" } ] [ SA{ "rofl" } "f" swap query ] unit-test
-[ V{ "rofl" } ] [ SA{ "rofl" } "l" swap query ] unit-test
-[ V{ } ] [ SA{ "rofl" } "t" swap query ] unit-test
+[ { "rofl" } ] [ SA{ "rofl" } "r" swap query ] unit-test
+[ { "rofl" } ] [ SA{ "rofl" } "o" swap query ] unit-test
+[ { "rofl" } ] [ SA{ "rofl" } "f" swap query ] unit-test
+[ { "rofl" } ] [ SA{ "rofl" } "l" swap query ] unit-test
+[ { } ] [ SA{ "rofl" } "t" swap query ] unit-test
: query ( begin suffix-array -- matches )
2dup find-index dup
- [ -rot [ from-to ] keep <funky-slice> [ seq>> ] map prune ]
+ [ -rot [ from-to ] keep <funky-slice> [ seq>> ] map members ]
[ 3drop { } ] if ;
QUALIFIED: source-files.errors
QUALIFIED: vocabs
FROM: alien.libraries.private => >deployed-library-path ;
+FROM: namespaces => set ;
+FROM: sets => members ;
IN: tools.deploy.shaker
! This file is some hairy shit.
: write-vocab-manifest ( vocab-manifest-out -- )
"Writing vocabulary manifest to " write dup print flush
vocabs "VOCABS:" prefix
- deploy-libraries get [ libraries get at path>> ] map prune "LIBRARIES:" prefix append
+ deploy-libraries get [ libraries get at path>> ] map members "LIBRARIES:" prefix append
swap utf8 set-file-lines ;
: prepare-deploy-libraries ( -- )
combinators sorting math.parser vocabs definitions
tools.profiler.private tools.crossref continuations generic
compiler.units compiler.crossref sets classes fry ;
+FROM: sets => members ;
IN: tools.profiler
: profile ( quot -- )
[ smart-usage [ word? ] filter ]
[ generic-call-sites-of keys ]
[ effect-dependencies-of keys ]
- tri 3append prune ;
+ tri 3append members ;
: usage-counters ( word -- alist )
profiler-usage counters ;
tools.test namespaces models kernel dlists deques math
math.parser ui sequences hashtables assocs io arrays prettyprint
io.streams.string math.rectangles ui.gadgets.private sets generic ;
+FROM: namespaces => set ;
IN: ui.gadgets.tests
[ { 300 300 } ]
] each-integer ;
: status-flags ( -- seq )
- { "g" "1" "2" "3" } [ get graft-state>> ] map prune ;
+ { "g" "1" "2" "3" } [ get graft-state>> ] map members ;
: notify-combo ( ? ? -- )
nl "===== Combo: " write 2dup 2array . nl
<dlist> \ graft-queue [
<mock-gadget> "g" set
[ ] [ add-some-children ] unit-test
- [ V{ { f f } } ] [ status-flags ] unit-test
+ [ { { f f } } ] [ status-flags ] unit-test
[ ] [ "g" get graft ] unit-test
- [ V{ { f t } } ] [ status-flags ] unit-test
+ [ { { f t } } ] [ status-flags ] unit-test
dup [ [ ] [ notify-queued ] unit-test ] when
[ ] [ "g" get clear-gadget ] unit-test
[ [ t ] [ graft-queue [ front>> ] [ back>> ] bi eq? ] unit-test ] unless
[ { f t } ] [ "3" get graft-state>> ] unit-test
[ ] [ graft-queue [ "x" print notify ] slurp-deque ] unit-test
[ ] [ notify-queued ] unit-test
- [ V{ { t t } } ] [ status-flags ] unit-test
+ [ { { t t } } ] [ status-flags ] unit-test
] with-variable ;
{ { f f } { f t } { t f } { t t } } [ notify-combo ] assoc-each
math.vectors classes.tuple classes boxes calendar alarms combinators
sets columns fry deques ui.gadgets ui.gadgets.private ascii
combinators.short-circuit ;
+FROM: namespaces => set ;
+FROM: sets => members ;
IN: ui.gestures
: get-gesture-handler ( gesture gadget -- quot )
: modifier ( mod modifiers -- seq )
[ second swap bitand 0 > ] with filter
- 0 <column> prune [ f ] [ >array ] if-empty ;
+ 0 <column> members [ f ] [ >array ] if-empty ;
: drag-loc ( -- loc )
hand-loc get-global hand-click-loc get-global v- ;
ui.tools.listener.history ui.images ui.tools.error-list
tools.errors.model ;
FROM: source-files.errors => all-errors ;
+FROM: namespaces => set ;
IN: ui.tools.listener
! If waiting is t, we're waiting for user input, and invoking
layout-queue [
dup layout find-world [ , ] when*
] slurp-deque
- ] { } make prune ;
+ ] { } make members ;
: redraw-worlds ( seq -- )
[ dup update-hand draw-world ] each ;
compiler.units parser io.encodings.ascii values interval-maps
ascii sets combinators locals math.ranges sorting make
strings.parser io.encodings.utf8 memoize simple-flat-file ;
+FROM: namespaces => set ;
IN: unicode.data
<PRIVATE
] assoc-map ;
: properties>intervals ( properties -- assoc[str,interval] )
- dup values prune [ f ] H{ } map>assoc
+ dup values members [ f ] H{ } map>assoc
[ [ push-at ] curry assoc-each ] keep
[ <interval-set> ] assoc-map ;
#! Hack.\r
[ vocab-prefix? ] partition\r
[\r
- [ vocab-name ] map unique\r
- '[ name>> _ key? not ] filter\r
+ [ vocab-name ] map fast-set\r
+ '[ name>> _ in? not ] filter\r
convert-prefixes\r
] keep\r
append ;\r
dup vocab-tags-path set-vocab-file-contents ;
: add-vocab-tags ( tags vocab -- )
- [ vocab-tags append prune ] keep set-vocab-tags ;
+ [ vocab-tags append members ] keep set-vocab-tags ;
: remove-vocab-tags ( tags vocab -- )
[ vocab-tags swap diff ] keep set-vocab-tags ;
: monitor-thread ( -- )\r
[\r
[\r
- vocab-roots get prune [ add-monitor-for-path ] each\r
+ vocab-roots get [ add-monitor-for-path ] each\r
\r
H{ } clone changed-vocabs set-global\r
vocabs [ changed-vocab ] each\r
USING: accessors assocs checksums checksums.crc32
io.encodings.utf8 io.files kernel namespaces sequences sets
source-files vocabs vocabs.errors vocabs.loader ;
+FROM: namespaces => set ;
IN: vocabs.refresh
: source-modified? ( path -- ? )
[ [ vocab f >>docs-loaded? drop ] each ] bi*
]
[
- append prune
+ union
[ unchanged-vocabs ]
[ require-all load-failures. ] bi
] 2bi ;
: refresh ( prefix -- ) to-refresh do-refresh ;
-: refresh-all ( -- ) "" refresh ;
\ No newline at end of file
+: refresh-all ( -- ) "" refresh ;
xml.data accessors arrays make xml.char-classes fry assocs sequences
math xml.errors sets combinators io.encodings io.encodings.iana
unicode.case xml.dtd strings xml.entities unicode.categories ;
+FROM: namespaces => set ;
IN: xml.elements
: take-interpolated ( quot -- interpolated )
assoc>> >alist ;
: (keyword-map-no-word-sep) ( assoc -- str )
- keys concat [ alpha? not ] filter prune natural-sort ;
+ keys combine [ alpha? not ] filter natural-sort ;
: keyword-map-no-word-sep* ( keyword-map -- str )
dup no-word-sep>> [ ] [
USING: accessors alien alien.accessors alien.syntax byte-arrays arrays
kernel kernel.private namespaces tools.test sequences libc math
system prettyprint layouts alien.libraries sets ;
+FROM: namespaces => set ;
IN: alien.tests
[ t ] [ -1 <alien> alien-address 0 > ] unit-test
[ 7575 ] [ initialize-test [ 7575 ] initialize-alien ] unit-test
-[ V{ BAD-ALIEN } ] [ { BAD-ALIEN BAD-ALIEN BAD-ALIEN } prune ] unit-test
\ No newline at end of file
+[ { BAD-ALIEN } ] [ { BAD-ALIEN BAD-ALIEN BAD-ALIEN } members ] unit-test
"HEX:"
"HOOK:"
"H{"
+ "HS{"
"IN:"
"INSTANCE:"
"M:"
USING: kernel classes classes.private combinators accessors
sequences arrays vectors assocs namespaces words sorting layouts
math hashtables kernel.private sets math.order ;
+FROM: classes => members ;
+RENAME: members sets => set-members
IN: classes.algebra
<PRIVATE
TUPLE: anonymous-union { members read-only } ;
: <anonymous-union> ( members -- class )
- [ null eq? not ] filter prune
+ [ null eq? not ] filter set-members
dup length 1 = [ first ] [ anonymous-union boa ] if ;
TUPLE: anonymous-intersection { participants read-only } ;
: <anonymous-intersection> ( participants -- class )
- prune dup length 1 = [ first ] [ anonymous-intersection boa ] if ;
+ set-members dup length 1 =
+ [ first ] [ anonymous-intersection boa ] if ;
TUPLE: anonymous-complement { class read-only } ;
slots.private namespaces make sequences strings words words.symbol
vectors math quotations combinators sorting effects graphs
vocabs sets ;
+FROM: namespaces => set ;
IN: classes
ERROR: bad-inheritance class superclass ;
: contiguous-range? ( keys -- ? )
dup [ fixnum? ] all? [
dup all-unique? [
- [ prune length ]
+ [ length ]
[ [ supremum ] [ infimum ] bi - ]
bi - 1 =
] [ drop f ] if
sequences words vocabs definitions hashtables init sets math
math.order classes classes.private classes.algebra classes.tuple
classes.tuple.private generic source-files.errors kernel.private ;
+FROM: namespaces => set ;
IN: compiler.units
SYMBOL: old-definitions
! See http://factorcode.org/license.txt for BSD license.
USING: accessors continuations kernel namespaces make
sequences vectors sets assocs init math ;
+FROM: namespaces => set ;
IN: destructors
SYMBOL: disposables
hashtables definitions kernel.private classes classes.private
classes.algebra quotations arrays vocabs effects combinators
sets ;
+FROM: namespaces => set ;
IN: generic
! Method combination protocol
prettyprint byte-vectors bit-vectors specialized-vectors
definitions generic sets graphs assocs grouping see eval ;
QUALIFIED-WITH: alien.c-types c
+FROM: namespaces => set ;
SPECIALIZED-VECTOR: c:double
IN: generic.single.tests
--- /dev/null
+USING: help.markup help.syntax sequences ;
+IN: hash-sets
+
+ARTICLE: "hash-sets" "Hash sets"
+"The " { $vocab-link "hash-sets" } " vocabulary implements hashtable-backed sets. These are of the class:"
+{ $subsection hash-set }
+"They can be instantiated with the word"
+{ $subsection <hash-set> }
+"The syntax for hash sets is described in " { $link "syntax-hash-sets" } "." ;
+
+ABOUT: "hash-sets"
+
+HELP: hash-set
+{ $class-description "The class of hashtable-based sets. These implement the " { $link "sets" } "." } ;
+
+HELP: <hash-set>
+{ $values { "members" sequence } { "hash-set" hash-set } }
+{ $description "Creates a new hash set with the given members." } ;
--- /dev/null
+! Copyright (C) 2010 Daniel Ehrenberg
+! See http://factorcode.org/license.txt for BSD license.
+USING: sets tools.test kernel sorting prettyprint hash-sets ;
+IN: hash-sets.tests
+
+[ { 1 2 3 } ] [ HS{ 1 2 3 } members natural-sort ] unit-test
+
+[ "HS{ 1 2 3 4 }" ] [ HS{ 1 2 3 4 } unparse ] unit-test
+
+[ t ] [ 1 HS{ 0 1 2 } in? ] unit-test
+[ f ] [ 3 HS{ 0 1 2 } in? ] unit-test
+[ HS{ 1 2 3 } ] [ 3 HS{ 1 2 } clone [ adjoin ] keep ] unit-test
+[ HS{ 1 2 } ] [ 2 HS{ 1 2 } clone [ adjoin ] keep ] unit-test
+[ HS{ 1 2 3 } ] [ 4 HS{ 1 2 3 } clone [ delete ] keep ] unit-test
+[ HS{ 1 2 } ] [ 3 HS{ 1 2 3 } clone [ delete ] keep ] unit-test
+[ HS{ 1 2 } ] [ HS{ 1 2 } fast-set ] unit-test
+[ { 1 2 } ] [ HS{ 1 2 } members natural-sort ] unit-test
+
+[ HS{ 1 2 3 4 } ] [ HS{ 1 2 3 } HS{ 2 3 4 } union ] unit-test
+[ HS{ 2 3 } ] [ HS{ 1 2 3 } HS{ 2 3 4 } intersect ] unit-test
+[ t ] [ HS{ 1 2 3 } HS{ 2 3 4 } intersects? ] unit-test
+[ f ] [ HS{ 1 } HS{ 2 3 4 } intersects? ] unit-test
+[ f ] [ HS{ 1 } HS{ 2 3 4 } subset? ] unit-test
+[ f ] [ HS{ 1 2 3 } HS{ 2 3 4 } subset? ] unit-test
+[ t ] [ HS{ 2 3 } HS{ 2 3 4 } subset? ] unit-test
+[ t ] [ HS{ } HS{ 2 3 4 } subset? ] unit-test
+[ HS{ 1 } ] [ HS{ 1 2 3 } HS{ 2 3 4 } diff ] unit-test
+[ t ] [ HS{ 1 2 3 } HS{ 2 1 3 } set= ] unit-test
+[ t ] [ HS{ 1 2 3 } HS{ 2 1 3 } = ] unit-test
+[ f ] [ HS{ 2 3 } HS{ 2 1 3 } set= ] unit-test
+[ f ] [ HS{ 1 2 3 } HS{ 2 3 } set= ] unit-test
+
+[ HS{ 1 2 } HS{ 1 2 3 } ] [ HS{ 1 2 } clone dup clone [ 3 swap adjoin ] keep ] unit-test
--- /dev/null
+! Copyright (C) 2010 Daniel Ehrenberg
+! See http://factorcode.org/license.txt for BSD license.
+USING: accessors assocs hashtables kernel sets
+sequences parser ;
+QUALIFIED: sets
+IN: hash-sets
+
+! In a better implementation, less memory would be used
+TUPLE: hash-set { table hashtable read-only } ;
+
+: <hash-set> ( members -- hash-set )
+ [ dup ] H{ } map>assoc hash-set boa ;
+
+INSTANCE: hash-set set
+M: hash-set in? table>> key? ; inline
+M: hash-set adjoin table>> dupd set-at ; inline
+M: hash-set delete table>> delete-at ; inline
+M: hash-set members table>> keys ; inline
+M: hash-set set-like
+ drop dup hash-set? [ members <hash-set> ] unless ;
+M: hash-set clone
+ table>> clone hash-set boa ;
+
+M: sequence fast-set <hash-set> ;
+M: f fast-set drop H{ } clone hash-set boa ;
+
+M: sequence duplicates
+ HS{ } clone [ [ in? ] [ adjoin ] 2bi ] curry filter ;
USING: assocs hashtables help.markup help.syntax kernel
-quotations sequences ;
+quotations sequences vectors ;
IN: sets
-ARTICLE: "sets" "Set-theoretic operations on sequences"
-"Set-theoretic operations on sequences are defined on the " { $vocab-link "sets" } " vocabulary. All of these operations use hashtables internally to achieve linear running time."
-$nl
-"Remove duplicates:"
-{ $subsections prune }
-"Test for duplicates:"
+ARTICLE: "sets" "Sets"
+"A set is an unordered list of elements. Words for working with sets are in the " { $vocab-link "sets" } " vocabulary."
+"All sets are instances of a mixin class:"
{ $subsections
- all-unique?
- duplicates
+ set
+ set?
+}
+{ $subsections "set-operations" "set-implementations" } ;
+
+ABOUT: "sets"
+
+ARTICLE: "set-operations" "Operations on sets"
+"To test if an object is a member of a set:"
+{ $subsection member? }
+"All sets can be represented as a sequence, without duplicates, of their members:"
+{ $subsection members }
+"Sets can have members added or removed destructively:"
+{ $subsections
+ adjoin
+ delete
}
-"Set operations on sequences:"
+"Basic mathematical operations, which any type of set may override for efficiency:"
{ $subsections
diff
intersect
union
}
-"Set-theoretic predicates:"
+"Mathematical predicates on sets, which may be overridden for efficiency:"
{ $subsections
intersects?
subset?
set=
}
-"A word used to implement the above:"
-{ $subsections unique }
-"Adding elements to sets:"
+"An optional generic word for creating sets of the same class as a given set:"
+{ $subsection set-like }
+"An optional generic word for creating a set with a fast lookup operation, if the set itself has a slow lookup operation:"
+{ $subsection fast-set }
+"For set types that allow duplicates, like sequence sets, some additional words test for duplication:"
{ $subsections
- adjoin
-}
-{ $see-also member? member-eq? any? all? "assocs-sets" } ;
+ all-unique?
+ duplicates
+} ;
-ABOUT: "sets"
+ARTICLE: "set-implementations" "Set implementations"
+"There are several implementations of sets in the Factor library. More can be added if they implement the words of the set protocol, the basic set operations."
+{ $subsections
+ "sequence-sets"
+ "hash-sets"
+ "bit-sets"
+} ;
+
+ARTICLE: "sequence-sets" "Sequences as sets"
+"Any sequence can be used as a set. The members of this set are the elements of the sequence. Calling the word " { $link members } " on a sequence returns a copy of the sequence with only one listing of each member. Destructive operations " { $link adjoin } " and " { $link delete } " only work properly on growable sequences like " { $link vector } "s."
+$nl
+"Care must be taken in writing efficient code using sequence sets. Testing for membership with " { $link in? } ", as well as the destructive set operations, take time proportional to the size of the sequence. Another representation, like " { $link "hash-sets" } ", would take constant time for membership tests. But binary operations like " { $link union } "are asymptotically optimal, taking time proportional to the sum of the size of the inputs."
+$nl
+"As one particlar example, " { $link POSTPONE: f } " is a representation of the empty set, as it represents the empty sequence." ;
+
+HELP: set
+{ $class-description "The class of all sets. Custom implementations of the set protocol should be declared as instances of this mixin for all set implementation to work correctly." } ;
HELP: adjoin
-{ $values { "elt" object } { "seq" "a resizable mutable sequence" } }
-{ $description "Removes all elements equal to " { $snippet "elt" } ", and adds " { $snippet "elt" } " at the end of the sequence." }
+{ $values { "elt" object } { "set" set } }
+{ $description "Destructively adds " { $snippet "elt" } " to " { $snippet "set" } ". For sequences, this guarantees that this element is not duplicated, and that it is at the end of the sequence." $nl "Each mutable set type is expected to implement a method on this generic word." }
{ $examples
{ $example
"USING: namespaces prettyprint sets ;"
"V{ \"beans\" \"cheese\" \"nachos\" \"salsa\" }"
}
}
-{ $side-effects "seq" } ;
+{ $side-effects "set" } ;
-HELP: conjoin
-{ $values { "elt" object } { "assoc" assoc } }
-{ $description "Stores a key/value pair, both equal to " { $snippet "elt" } ", into the assoc." }
-{ $examples
- { $example
- "USING: kernel prettyprint sets ;"
- "H{ } clone 1 over conjoin ."
- "H{ { 1 1 } }"
- }
-}
-{ $side-effects "assoc" } ;
+HELP: delete
+{ $values { "elt" object } { "set" set } }
+{ $description "Destructively removes " { $snippet "elt" } " from " { $snippet "set" } ". If the element is not present, this does nothing." $nl "Each mutable set type is expected to implement a method on this generic word." }
+{ $side-effects "set" } ;
-HELP: conjoin-at
-{ $values { "value" object } { "key" object } { "assoc" assoc } }
-{ $description "Adds " { $snippet "value" } " to the set stored at " { $snippet "key" } " of " { $snippet "assoc" } "." } ;
+HELP: members
+{ $values { "set" set } { "seq" sequence } }
+{ $description "Creates a sequence with a single copy of each member of the set." $nl "Each set type is expected to implement a method on this generic word." } ;
-HELP: unique
-{ $values { "seq" "a sequence" } { "assoc" assoc } }
-{ $description "Outputs a new assoc where the keys and values are equal." }
-{ $examples
- { $example "USING: sets prettyprint ;" "{ 1 1 2 2 3 3 } unique ." "H{ { 1 1 } { 2 2 } { 3 3 } }" }
-} ;
+HELP: in?
+{ $values { "elt" object } { "set" set } { "?" "a boolean" } }
+{ $description "Tests whether the element is a member of the set." $nl "Each set type is expected to implement a method on this generic word as part of the set protocol." } ;
-HELP: prune
-{ $values { "seq" "a sequence" } { "newseq" "a sequence" } }
-{ $description "Outputs a new sequence with each distinct element of " { $snippet "seq" } " appearing only once. Elements are compared for equality using " { $link = } " and elements are ordered according to their position in " { $snippet "seq" } "." }
-{ $examples
- { $example "USING: sets prettyprint ;" "{ 1 1 t 3 t } prune ." "V{ 1 t 3 }" }
-} ;
+HELP: adjoin-at
+{ $values { "value" object } { "key" object } { "assoc" assoc } }
+{ $description "Adds " { $snippet "value" } " to the set stored at " { $snippet "key" } " of " { $snippet "assoc" } "." }
+{ $side-effects "assoc" } ;
HELP: duplicates
-{ $values { "seq" "a sequence" } { "newseq" "a sequence" } }
-{ $description "Outputs a new sequence consisting of elements which occur more than once in " { $snippet "seq" } "." }
+{ $values { "set" set } { "seq" sequence } }
+{ $description "Outputs a sequence consisting of elements which occur more than once in " { $snippet "set" } "." }
{ $examples
- { $example "USING: sets prettyprint ;" "{ 1 2 3 1 2 1 } duplicates ." "{ 1 2 1 }" }
+ { $example "USING: sets prettyprint ;" "{ 1 2 3 1 2 1 } duplicates ." "{ 2 1 2 1 }" }
} ;
HELP: all-unique?
-{ $values { "seq" sequence } { "?" "a boolean" } }
-{ $description "Tests whether a sequence contains any repeated elements." }
+{ $values { "set" set } { "?" "a boolean" } }
+{ $description "Tests whether a set contains any repeated elements." }
{ $example
"USING: sets prettyprint ;"
"{ 0 1 1 2 3 5 } all-unique? ."
} ;
HELP: diff
-{ $values { "seq1" sequence } { "seq2" sequence } { "newseq" sequence } }
-{ $description "Outputs a sequence consisting of elements present in " { $snippet "seq1" } " but not " { $snippet "seq2" } ", comparing elements for equality."
+{ $values { "set1" set } { "set2" set } { "set" set } }
+{ $description "Outputs a set consisting of elements present in " { $snippet "set1" } " but not " { $snippet "set2" } ", comparing elements for equality."
+"This word has a default definition which works for all sets, but set implementations may override the default for efficiency."
} { $examples
{ $example "USING: sets prettyprint ;" "{ 1 2 3 } { 2 3 4 } diff ." "{ 1 }" }
} ;
HELP: intersect
-{ $values { "seq1" sequence } { "seq2" sequence } { "newseq" sequence } }
-{ $description "Outputs a sequence consisting of elements present in both " { $snippet "seq1" } " and " { $snippet "seq2" } "." }
+{ $values { "set1" set } { "set2" set } { "set" set } }
+{ $description "Outputs a set consisting of elements present in both " { $snippet "set1" } " and " { $snippet "set2" } "."
+"This word has a default definition which works for all sets, but set implementations may override the default for efficiency." }
{ $examples
{ $example "USING: sets prettyprint ;" "{ 1 2 3 } { 2 3 4 } intersect ." "{ 2 3 }" }
} ;
HELP: union
-{ $values { "seq1" sequence } { "seq2" sequence } { "newseq" sequence } }
-{ $description "Outputs a sequence consisting of elements present in " { $snippet "seq1" } " and " { $snippet "seq2" } " which does not contain duplicate values." }
+{ $values { "set1" set } { "set2" set } { "set" set } }
+{ $description "Outputs a set consisting of elements present in either " { $snippet "set1" } " or " { $snippet "set2" } " which does not contain duplicate values."
+"This word has a default definition which works for all sets, but set implementations may override the default for efficiency." }
{ $examples
- { $example "USING: sets prettyprint ;" "{ 1 2 3 } { 2 3 4 } union ." "V{ 1 2 3 4 }" }
+ { $example "USING: sets prettyprint ;" "{ 1 2 3 } { 2 3 4 } union ." "{ 1 2 3 4 }" }
} ;
{ diff intersect union } related-words
HELP: intersects?
-{ $values { "seq1" sequence } { "seq2" sequence } { "?" "a boolean" } }
-{ $description "Tests if " { $snippet "seq1" } " and " { $snippet "seq2" } " have any elements in common." }
-{ $notes "If one of the sequences is empty, the result is always " { $link f } "." } ;
+{ $values { "set1" set } { "set2" set } { "?" "a boolean" } }
+{ $description "Tests if " { $snippet "set1" } " and " { $snippet "set2" } " have any elements in common." }
+{ $notes "If one of the sets is empty, the result is always " { $link f } "." } ;
HELP: subset?
-{ $values { "seq1" sequence } { "seq2" sequence } { "?" "a boolean" } }
-{ $description "Tests if every element of " { $snippet "seq1" } " is contained in " { $snippet "seq2" } "." }
-{ $notes "If " { $snippet "seq1" } " is empty, the result is always " { $link t } "." } ;
+{ $values { "set1" set } { "set2" set } { "?" "a boolean" } }
+{ $description "Tests if every element of " { $snippet "set1" } " is contained in " { $snippet "set2" } "." }
+{ $notes "If " { $snippet "set1" } " is empty, the result is always " { $link t } "." } ;
HELP: set=
-{ $values { "seq1" sequence } { "seq2" sequence } { "?" "a boolean" } }
-{ $description "Tests if both sequences contain the same elements, disregrading order and duplicates." } ;
+{ $values { "set1" set } { "set2" set } { "?" "a boolean" } }
+{ $description "Tests if both sets contain the same elements, disregrading order and duplicates." } ;
HELP: gather
{ $values
{ "newseq" sequence } }
{ $description "Maps a quotation onto a sequence, concatenates the results of the mapping, and removes duplicates." } ;
+HELP: set-like
+{ $values { "set" set } { "exemplar" set } { "set'" set } }
+{ $description "If the conversion is defined for the exemplar, converts the set into a set of the exemplar's class. This is not guaranteed to create a new set, for example if the input set and exemplar are of the same class." $nl
+"Set implementations may optionally implement a method on this generic word. The default implementation returns its input set." }
+{ $examples
+ { $example "USING: sets prettyprint ;" "{ 1 2 3 } HS{ } set-like ." "HS{ 1 2 3 }" }
+} ;
-USING: kernel sets tools.test ;
+! Copyright (C) 2010 Daniel Ehrenberg
+! See http://factorcode.org/license.txt for BSD license.
+USING: sets tools.test kernel prettyprint hash-sets sorting ;
IN: sets.tests
-[ f ] [ { 0 1 1 2 3 5 } all-unique? ] unit-test
-[ t ] [ { 0 1 2 3 4 5 } all-unique? ] unit-test
-
-[ V{ 1 2 3 } ] [ { 1 2 2 3 3 } prune ] unit-test
-[ V{ 3 2 1 } ] [ { 3 3 2 2 1 } prune ] unit-test
-
[ { } ] [ { } { } intersect ] unit-test
[ { 2 3 } ] [ { 1 2 3 } { 2 3 4 } intersect ] unit-test
[ { } ] [ { } { } diff ] unit-test
[ { 1 } ] [ { 1 2 3 } { 2 3 4 } diff ] unit-test
-[ V{ } ] [ { } { } union ] unit-test
-[ V{ 1 2 3 4 } ] [ { 1 2 3 } { 2 3 4 } union ] unit-test
-
-[ V{ 1 2 3 } ]
-[ 3 V{ 1 2 } clone [ adjoin ] keep ] unit-test
-
-[ V{ 1 2 3 } ]
-[ 3 V{ 1 3 2 } clone [ adjoin ] keep ] unit-test
+[ { } ] [ { } { } union ] unit-test
+[ { 1 2 3 4 } ] [ { 1 2 3 } { 2 3 4 } union ] unit-test
[ t ] [ { 1 2 } { 1 3 } intersects? ] unit-test
[ f ] [ { 1 } { } intersects? ] unit-test
+[ t ] [ 4 { 2 4 5 } in? ] unit-test
+[ f ] [ 1 { 2 4 5 } in? ] unit-test
+
+[ V{ 1 2 3 } ] [ 3 V{ 1 2 } clone [ adjoin ] keep ] unit-test
+[ V{ 1 2 } ] [ 2 V{ 1 2 } clone [ adjoin ] keep ] unit-test
+[ V{ 1 2 } ] [ 3 V{ 1 2 } clone [ delete ] keep ] unit-test
+[ V{ 2 } ] [ 1 V{ 1 2 } clone [ delete ] keep ] unit-test
+
+[ t ] [ { 1 2 3 } { 2 1 3 } set= ] unit-test
+[ f ] [ { 2 3 } { 1 2 3 } set= ] unit-test
+[ f ] [ { 1 2 3 } { 2 3 } set= ] unit-test
+
+[ { 1 } ] [ { 1 } members ] unit-test
+
+[ { 1 2 3 } ] [ { 1 1 1 2 2 3 3 3 3 3 } dup set-like natural-sort ] unit-test
+[ { 1 2 3 } ] [ HS{ 1 2 3 } { } set-like natural-sort ] unit-test
+
+[ HS{ 1 2 3 } ] [ { 1 2 3 } fast-set ] unit-test
+
+[ { 1 2 3 } ] [ { { 1 } { 2 } { 1 3 } } combine ] unit-test
+
+[ f ] [ { 0 1 1 2 3 5 } all-unique? ] unit-test
+[ t ] [ { 0 1 2 3 4 5 } all-unique? ] unit-test
+
+[ { 1 2 3 } ] [ { 1 2 2 3 3 } { } set-like ] unit-test
+[ { 3 2 1 } ] [ { 3 3 2 2 1 } { } set-like ] unit-test
+
+[ { 2 1 2 1 } ] [ { 1 2 3 2 1 2 1 } duplicates ] unit-test
+[ f ] [ HS{ 1 2 3 1 2 1 } duplicates ] unit-test
+
+[ H{ { 3 HS{ 1 2 } } } ] [ H{ } clone 1 3 pick adjoin-at 2 3 pick adjoin-at ] unit-test
-! Copyright (C) 2008, 2009 Slava Pestov, Doug Coleman.
+! Copyright (C) 2010 Daniel Ehrenberg
! See http://factorcode.org/license.txt for BSD license.
-USING: assocs hashtables kernel sequences vectors ;
+USING: accessors assocs hashtables kernel vectors
+math sequences ;
IN: sets
-: adjoin ( elt seq -- ) [ remove! drop ] [ push ] 2bi ;
+! Set protocol
+MIXIN: set
+GENERIC: adjoin ( elt set -- )
+GENERIC: in? ( elt set -- ? )
+GENERIC: delete ( elt set -- )
+GENERIC: set-like ( set exemplar -- set' )
+GENERIC: fast-set ( set -- set' )
+GENERIC: members ( set -- sequence )
+GENERIC: union ( set1 set2 -- set )
+GENERIC: intersect ( set1 set2 -- set )
+GENERIC: intersects? ( set1 set2 -- ? )
+GENERIC: diff ( set1 set2 -- set )
+GENERIC: subset? ( set1 set2 -- ? )
+GENERIC: set= ( set1 set2 -- ? )
+GENERIC: duplicates ( set -- seq )
+GENERIC: all-unique? ( set -- ? )
+
+! Defaults for some methods.
+! Override them for efficiency
+
+M: set set-like drop ; inline
+
+M: set union
+ [ [ members ] bi@ append ] keep set-like ;
-: conjoin ( elt assoc -- ) dupd set-at ;
+<PRIVATE
-: conjoin-at ( value key assoc -- )
- [ dupd ?set-at ] change-at ;
+: tester ( set -- quot )
+ fast-set [ in? ] curry ; inline
-: (prune) ( elt hash vec -- )
- 3dup drop key? [ 3drop ] [
- [ drop conjoin ] [ nip push ] 3bi
- ] if ; inline
+: sequence/tester ( set1 set2 -- set1' quot )
+ [ members ] [ tester ] bi* ; inline
-: prune ( seq -- newseq )
- [ ] [ length <hashtable> ] [ length <vector> ] tri
- [ [ (prune) ] 2curry each ] keep ;
+PRIVATE>
-: duplicates ( seq -- newseq )
- H{ } clone [ [ key? ] [ conjoin ] 2bi ] curry filter ;
+M: set intersect
+ [ sequence/tester filter ] keep set-like ;
-: gather ( seq quot -- newseq )
- map concat prune ; inline
+M: set diff
+ [ sequence/tester [ not ] compose filter ] keep set-like ;
-: unique ( seq -- assoc )
- [ dup ] H{ } map>assoc ;
+M: set intersects?
+ sequence/tester any? ;
+
+M: set subset?
+ sequence/tester all? ;
+
+M: set set=
+ 2dup subset? [ swap subset? ] [ 2drop f ] if ;
-: (all-unique?) ( elt hash -- ? )
- 2dup key? [ 2drop f ] [ conjoin t ] if ;
+M: set fast-set ;
-: all-unique? ( seq -- ? )
- dup length <hashtable> [ (all-unique?) ] curry all? ;
+M: set duplicates drop f ;
+
+M: set all-unique? drop t ;
<PRIVATE
-: tester ( seq -- quot ) unique [ key? ] curry ; inline
+: (pruned) ( elt hash vec -- )
+ 3dup drop in? [ 3drop ] [
+ [ drop adjoin ] [ nip push ] 3bi
+ ] if ; inline
+
+: pruned ( seq -- newseq )
+ [ f fast-set ] [ length <vector> ] bi
+ [ [ (pruned) ] 2curry each ] keep ;
PRIVATE>
-: intersect ( seq1 seq2 -- newseq )
- tester filter ;
+! Sequences are sets
+INSTANCE: sequence set
-: intersects? ( seq1 seq2 -- ? )
- tester any? ;
+M: sequence in?
+ member? ; inline
-: diff ( seq1 seq2 -- newseq )
- tester [ not ] compose filter ;
+M: sequence adjoin
+ [ delete ] [ push ] 2bi ;
-: union ( seq1 seq2 -- newseq )
- append prune ;
+M: sequence delete
+ remove! drop ; inline
-: subset? ( seq1 seq2 -- ? )
- tester all? ;
+M: sequence set-like
+ [ members ] dip like ;
-: set= ( seq1 seq2 -- ? )
- [ unique ] bi@ = ;
+M: sequence members
+ [ pruned ] keep like ;
+
+M: sequence all-unique?
+ dup pruned sequence= ;
+
+: combine ( sets -- set )
+ f [ union ] reduce ;
+
+: gather ( seq quot -- newseq )
+ map concat members ; inline
+
+: adjoin-at ( value key assoc -- )
+ [ [ f fast-set ] unless* [ adjoin ] keep ] change-at ;
+
+! Temporarily for compatibility
+
+: unique ( seq -- assoc )
+ [ dup ] H{ } map>assoc ;
+: conjoin ( elt assoc -- )
+ dupd set-at ;
+: conjoin-at ( value key assoc -- )
+ [ dupd ?set-at ] change-at ;
{ $subsections POSTPONE: H{ }
"Hashtables are documented in " { $link "hashtables" } "." ;
+ARTICLE: "syntax-hash-sets" "Hash set syntax"
+{ $subsections POSTPONE: HS{ }
+"Hashtables are documented in " { $link "hash-sets" } "." ;
+
ARTICLE: "syntax-tuples" "Tuple syntax"
{ $subsections POSTPONE: T{ }
"Tuples are documented in " { $link "tuples" } "." ;
"syntax-vectors"
"syntax-sbufs"
"syntax-hashtables"
+ "syntax-hash-sets"
"syntax-tuples"
"syntax-pathnames"
"syntax-effects"
$nl
"Parsing words can use this word as a generic end delimiter." } ;
-{ POSTPONE: { POSTPONE: V{ POSTPONE: H{ POSTPONE: C{ POSTPONE: T{ POSTPONE: W{ POSTPONE: } } related-words
+{ POSTPONE: { POSTPONE: V{ POSTPONE: H{ POSTPONE: HS{ POSTPONE: C{ POSTPONE: T{ POSTPONE: W{ POSTPONE: } } related-words
HELP: {
{ $syntax "{ elements... }" }
{ $description "Marks the beginning of a literal hashtable, given as a list of two-element arrays holding key/value pairs. Literal hashtables are terminated by " { $link POSTPONE: } } "." }
{ $examples { $code "H{ { \"tuna\" \"fish\" } { \"jalapeno\" \"vegetable\" } }" } } ;
+HELP: HS{
+{ $syntax "HS{ members ... }" }
+{ $values { "members" "a list of objects" } }
+{ $description "Marks the beginning of a literal hash set, given as a list of its members. Literal hashtables are terminated by " { $link POSTPONE: } } "." }
+{ $examples { $code "HS{ 3 \"foo\" }" } } ;
+
HELP: C{
{ $syntax "C{ real-part imaginary-part }" }
{ $values { "real-part" "a real number" } { "imaginary-part" "a real number" } }
io.pathnames vocabs vocabs.parser classes.parser classes.union
classes.intersection classes.mixin classes.predicate
classes.singleton classes.tuple.parser compiler.units
-combinators effects.parser slots ;
+combinators effects.parser slots hash-sets ;
IN: bootstrap.syntax
! These words are defined as a top-level form, instead of with
"H{" [ \ } [ >hashtable ] parse-literal ] define-core-syntax
"T{" [ parse-tuple-literal suffix! ] define-core-syntax
"W{" [ \ } [ first <wrapper> ] parse-literal ] define-core-syntax
+ "HS{" [ \ } [ <hash-set> ] parse-literal ] define-core-syntax
"POSTPONE:" [ scan-word suffix! ] define-core-syntax
"\\" [ scan-word <wrapper> suffix! ] define-core-syntax
] with-directory ;
: patch-counts ( authors -- assoc )
- dup prune
+ dup members
[ dup rot [ = ] with count ] with
{ } map>assoc ;
[ word? ] filter [ word>xref ] map ;
: filter-prefix ( seq prefix -- seq )
- [ drop-prefix nip length 0 = ] curry filter prune ;
+ [ drop-prefix nip length 0 = ] curry filter members ;
MEMO: (vocab-words) ( name -- seq )
>vocab-link words [ name>> ] map ;
append H{ } [ assoc-union ] reduce keys ;
: vocabs-words ( names -- seq )
- prune [ (vocab-words) ] map concat ;
+ members [ (vocab-words) ] map concat ;
PRIVATE>
[ >url ] map ;
: find-all-links ( vector -- vector' )
- [ find-hrefs ] [ find-frame-links ] bi append prune ;
+ [ find-hrefs ] [ find-frame-links ] bi union ;
: find-forms ( vector -- vector' )
"form" over find-opening-tags-by-name
math.vectors math.matrices math.matrices.elimination namespaces
parser prettyprint sequences words combinators math.parser
splitting sorting shuffle sets math.order ;
+FROM: namespaces => set ;
IN: koszul
! Utilities
[ nth ] 2keep swap 1 + tail-slice (inversions) +
] curry each ;
-: duplicates? ( seq -- ? )
- dup prune [ length ] bi@ > ;
-
: (wedge) ( n basis1 basis2 -- n basis )
- append dup duplicates? [
+ append dup all-unique? not [
2drop 0 { }
] [
dup permutation inversions -1^ rot *
namespaces parser sequences sorting splitting strings.parser
unicode.case unicode.categories calendar calendar.format
locals io.encodings.binary io.encodings.string prettyprint ;
+FROM: namespaces => set ;
IN: managed-server.chat
TUPLE: chat-server < managed-server ;
io.streams.duplex fry kernel locals math math.ranges multiline
namespaces prettyprint random sequences sets splitting threads
tools.continuations ;
+FROM: namespaces => set ;
IN: managed-server
TUPLE: managed-server < threaded-server clients ;
:: do-step ( errors summary-file details-file -- )
errors
[ error-type +linkage-error+ eq? not ] filter
- [ file>> ] map prune natural-sort summary-file to-file
+ [ file>> ] map members natural-sort summary-file to-file
errors details-file utf8 [ errors. ] with-file-writer ;
: do-tests ( -- )
"" to-refresh drop 2dup [ empty? not ] either?
[
"Boot image is out of date. Changed vocabs:" print
- append prune [ print ] each
+ members [ print ] each
flush
1 exit
] [ 2drop ] if ;
quotations generalizations debugger io compiler.units
kernel.private effects accessors hashtables sorting shuffle
math.order sets see effects.parser ;
+FROM: namespaces => set ;
IN: multi-methods
! PART I: Converting hook specializers
PRIVATE>
: euler004 ( -- answer )
- source-004 dup [ * ] cartesian-map concat prune max-palindrome ;
+ source-004 dup [ * ] cartesian-map combine max-palindrome ;
! [ euler004 ] 100 ave-time
! 1164 ms ave run time - 39.35 SD (100 trials)
! --------
: euler029 ( -- answer )
- 2 100 [a,b] dup [ ^ ] cartesian-map concat prune length ;
+ 2 100 [a,b] dup [ ^ ] cartesian-map concat members length ;
! [ euler029 ] 100 ave-time
! 704 ms ave run time - 28.07 SD (100 trials)
PRIVATE>
: euler032 ( -- answer )
- source-032 [ valid? ] filter products prune sum ;
+ source-032 [ valid? ] filter products members sum ;
! [ euler032 ] 10 ave-time
! 16361 ms ave run time - 417.8 SD (10 trials)
50 [1,b] 2000 [1,b]
[ mmp ] cartesian-map concat
[ pandigital? ] filter
- products prune sum ;
+ products members sum ;
! [ euler032a ] 10 ave-time
! 2624 ms ave run time - 131.91 SD (10 trials)
USING: assocs kernel math math.combinatorics math.functions
math.parser math.primes namespaces project-euler.common
sequences sets strings grouping math.ranges arrays fry math.order ;
+FROM: namespaces => set ;
IN: project-euler.051
<PRIVATE
SYMBOL: family-count
over length <rollover> swap [ bitxor ] 2map ;
: frequency-analysis ( seq -- seq )
- dup prune [
+ dup members [
[ 2dup [ = ] curry count 2array , ] each
] { } make nip ; inline
] { } make ;
: find-source ( seq -- elt )
- unzip diff prune
+ unzip diff
[ "Topological sort failed" throw ] [ first ] if-empty ;
: remove-source ( seq elt -- seq )
: topological-sort ( seq -- seq )
[ [ (topological-sort) ] { } make ] keep
- concat prune over diff append ;
+ combine over diff append ;
: euler079 ( -- answer )
source-079 >edges topological-sort 10 digits>integer ;
! [ euler079 ] 100 ave-time
! 1 ms ave run time - 0.46 SD (100 trials)
-! TODO: prune and diff are relatively slow; topological sort could be
+! TODO: set words on sequences are relatively slow; topological sort could be
! cleaned up and generalized much better, but it works for this problem
SOLUTION: euler079
[ 0 prefix ] [ 0 suffix ] bi [ + ] 2map ;
: generate ( n -- seq )
- 1 - { 1 } [ (generate) ] iterate concat prune ;
+ 1 - { 1 } [ (generate) ] iterate combine ;
: squarefree ( n -- ? )
factors all-unique? ;
nonmatching>> push-links ;
: filter-base-links ( spider spider-result -- base-links nonmatching-links )
- [ base>> host>> ] [ links>> prune ] bi*
+ [ base>> host>> ] [ links>> members ] bi*
[ host>> = ] with partition ;
: add-spidered ( spider spider-result -- )