- name: bootstrap
run: ./build.sh net-bootstrap
- name: load-all
- run: './factor -e="USING: memory namespaces parser.notes vocabs.hierarchy ; parser-quiet? off load-all save"'
+ run: './factor -e="USING: memory namespaces parser.notes syntax vocabs.hierarchy ; parser-quiet? off auto-use? off load-all save"'
- name: test
run: './factor -run=tools.test resource:core'
- name: help-lint
- name: build
run: arch -x86_64 ./build.sh net-bootstrap
- name: load-all
- run: './factor -e="USING: memory namespaces parser.notes vocabs.hierarchy ; parser-quiet? off load-all save"'
+ run: './factor -e="USING: memory namespaces parser.notes syntax vocabs.hierarchy ; parser-quiet? off auto-use? off load-all save"'
- name: test
run: './factor -run=tools.test resource:core'
- name: help-lint
run: build.cmd net-bootstrap
- name: load-all
shell: cmd
- run: 'factor -e="USING: memory namespaces parser.notes vocabs.hierarchy ; parser-quiet? off load-all save"'
+ run: 'factor -e="USING: memory namespaces parser.notes syntax vocabs.hierarchy ; parser-quiet? off load-all save"'
- name: test
shell: cmd
run: 'factor -run=tools.test resource:core'
- name: bootstrap
run: ./build.sh net-bootstrap
- name: load-all
- run: './factor -e="USING: memory namespaces parser.notes vocabs.hierarchy ; parser-quiet? off load-all save"'
+ run: './factor -e="USING: memory namespaces parser.notes vocabs.hierarchy ; parser-quiet? off auto-use? off load-all save"'
- name: test
run: './factor -run=tools.test resource:core'
- name: help-lint
- name: build
run: arch -x86_64 ./build.sh net-bootstrap
- name: load-all
- run: './factor -e="USING: memory namespaces parser.notes vocabs.hierarchy ; parser-quiet? off load-all save"'
+ run: './factor -e="USING: memory namespaces parser.notes vocabs.hierarchy ; parser-quiet? off auto-use? off load-all save"'
- name: test
run: './factor -run=tools.test resource:core'
- name: help-lint
! See https://factorcode.org/license.txt for BSD license.
USING: accessors alien.c-types arrays cocoa cocoa.application
cocoa.classes cocoa.messages cocoa.runtime cocoa.subclassing
-compiler.units kernel locals.backend math.parser sequences ;
+compiler.units kernel locals.backend math.parser quotations
+sequences ;
IN: cocoa.statusbar
<< {
: get-system-statusbar ( -- alien )
NSStatusBar -> systemStatusBar ;
-TUPLE: platform-menu name items ;
-TUPLE: platform-menu-item title quot key-equivalent selector target ;
+TUPLE: platform-menu name items menu-alien statusbar-alien ;
+TUPLE: platform-menu-item title { quot callable } key-equivalent selector target ;
+
+: <platform-menu> ( name items -- platform-menu )
+ platform-menu new
+ swap >>items
+ swap >>name ;
+
+: <platform-menu-item> ( title quot -- platform-menu-item )
+ platform-menu-item new
+ swap >>quot
+ swap >>title ;
: menu>dummy-class ( menu -- object )
[ name>> "NSObject" V{ } ]
ns-menu ns-menu-items [ -> addItem: ] with each
ns-menu ;
-:: show-menu ( menu -- menu-alien statusbar-item-alien )
- menu menu>alien :> menu-alien
+:: show-menu* ( platform-menu -- menu-alien statusbar-alien )
+ platform-menu menu>alien :> menu-alien
get-system-statusbar :> system-alien
system-alien
NSVariableStatusItemLength -> statusItemWithLength: [ -> retain ] keep :> ns-status-item
- ns-status-item menu name>> <NSString> -> setTitle:
+ ns-status-item platform-menu name>> <NSString> -> setTitle:
menu-alien -> setMenu:
menu-alien ns-status-item ;
+: show-statusbar ( platform-menu -- platform-menu )
+ [ show-menu* ] keep
+ [ statusbar-alien<< ] keep
+ [ menu-alien<< ] keep ;
+
: enable-menu-item ( alien -- ) 1 -> setEnabled: ;
: disable-menu-item ( alien -- ) 0 -> setEnabled: ;
: hide-statusbar-item ( statusbar-item-alien -- )
[ get-system-statusbar ] dip -> removeStatusItem: ;
+
+: hide-statusbar* ( platform-menu -- )
+ [ get-system-statusbar ] dip -> removeStatusItem: ;
+
+: hide-statusbar ( platform-menu -- platform-menu )
+ [ statusbar-alien>> hide-statusbar* ] keep ;
--- /dev/null
+Doug Coleman
--- /dev/null
+! Copyright (C) 2024 Doug Coleman.
+! See https://factorcode.org/license.txt for BSD license.
+USING: editors io.pathnames io.standard-paths kernel make sequences system ;
+IN: editors.notepadnext
+
+SINGLETON: notepadnext
+
+HOOK: find-notepadnext-path os ( -- path line#? )
+
+M: macosx find-notepadnext-path
+ {
+ "com.yourcompany.NotepadNext"
+ "io.github.dail8859.NotepadNext"
+ } [
+ find-native-bundle [
+ "Contents/MacOS/NotepadNext" append-path
+ ] [
+ f
+ ] if*
+ ] map-find "io.github.dail8859.NotepadNext" = ;
+
+M: windows find-notepadnext-path
+ { "Notepad Next" } "NotepadNext.exe" find-in-applications
+ [ "NotepadNext.exe" ] unless* t ;
+
+M: linux find-notepadnext-path
+ "NotepadNext" find-in-path t ;
+
+M: notepadnext editor-command
+ '[
+ find-notepadnext-path
+ [ , _ , ] [ [ "-n" , _ , ] when ] bi*
+ ] { } make ;
--- /dev/null
+NotepadNext editor integration
{ "assoc-all-value?" { "all-values?" "0.100" } }
{ "assoc-any-key?" { "any-key?" "0.100" } }
{ "assoc-any-value?" { "any-value?" "0.100" } }
- { "?download-to" { "download-once-to" "0.100" } }
+ { "?download-to" { "download-once-into" "0.100" } }
+ { "download-to" { "download-into" "0.100" } }
}
: compute-assoc-fixups ( continuation name assoc -- seq )
strings urls ;
IN: http.download
+
HELP: download
{ $values { "url" { $or url string } } { "path" "a pathname string" } }
{ $description "Downloads the contents of the URL to a file in the " { $link current-directory } " having the same file name and returns the pathname." }
{ $notes "Use this to download the file every time." }
{ $errors "Throws an error if the HTTP request fails." } ;
-HELP: download-to
-{ $values { "url" { $or url string } } { "file" "a pathname string" } { "path" "a pathname string" } }
+HELP: download-into
+{ $values
+ { "url" url } { "directory" "a pathname string" }
+ { "path" "a pathname string" }
+}
+{ $description "Downloads the contents of the URL to a file the given directory and returns the pathname." } ;
+
+HELP: download-as
+{ $values { "url" { $or url string } } { "path" "a pathname string" } }
{ $description "Downloads the contents of the URL to a file with the given pathname and returns the pathname." }
{ $notes "Use this to download the file every time." }
{ $errors "Throws an error if the HTTP request fails." } ;
-HELP: download-once-to
-{ $values { "url" { $or url string } } { "file" "a pathname string" } { "path" "a pathname string" } }
+
+HELP: download-once
+{ $values
+ { "url" url }
+ { "path" "a pathname string" }
+}
+{ $description "Downloads a file to " { $link current-directory } " and returns the path. If the path already exists, this word does not download it again." } ;
+
+HELP: download-once-into
+{ $values
+ { "url" url } { "directory" "a pathname string" }
+ { "path" "a pathname string" }
+}
+{ $description "Downloads a file to " { $snippet "directory" } " and returns the path. If the path already exists, this word does not download it again." } ;
+
+HELP: download-once-as
+{ $values { "url" { $or url string } } { "path" "a pathname string" } }
{ $description "If the file exists on disk, returns that pathname without downloading anything. Otherwise, downloads the contents of the URL to a file with the given pathname and returns the pathname." }
{ $notes "Use this if the contents of the URL are not expected to change." }
{ $errors "Throws an error if the HTTP request fails." } ;
+HELP: download-outdated
+{ $values
+ { "url" url } { "duration" duration }
+ { "path" "a pathname string" }
+}
+{ $description "Download a URL into " { $link current-directory } " unless the an existing file has a timestamp newer than " { $snippet "duration" } " ago." } ;
+
+HELP: download-outdated-as
+{ $values
+ { "url" url } { "path" "a pathname string" } { "duration" duration }
+ { "path'" "a pathname string" }
+}
+{ $description "Download a URL into a directory unless the an existing file has a timestamp newer than " { $snippet "duration" } " ago." } ;
+
+HELP: download-outdated-into
+{ $values
+ { "url" url } { "directory" "a pathname string" } { "duration" duration }
+ { "path" "a pathname string" }
+}
+{ $description "Download a URL into a directory unless the an existing file has a timestamp newer than " { $snippet "duration" } " ago." } ;
+
+
+HELP: download-to-temporary-file
+{ $values
+ { "url" url }
+ { "path" "a pathname string" }
+}
+{ $description "Downloads a url to a unique temporary file in " { $link current-directory } " named " { $snippet "temp.XXXXXXXXXreal-file-name.ext.temp" } "." } ;
+
+HELP: download-name
+{ $values
+ { "url" url }
+ { "name" object }
+}
+{ $description "Turns a URL into a filename suitable for downloading to locally." } ;
ARTICLE: "http.download" "HTTP Download Utilities"
"The " { $vocab-link "http.download" } " vocabulary provides utilities for downloading files from the web."
"Utilities to retrieve a " { $link url } " and save the contents to a file:"
{ $subsections
download
- download-to
- download-once-to
+ download-into
+ download-as
+ download-once
+ download-once-into
+ download-once-as
+ download-outdated
+ download-outdated-into
+ download-outdated-as
}
-;
+
+"Helper words:"
+{ $subsections
+ download-to-temporary-file
+ download-name
+} ;
ABOUT: "http.download"
USING: accessors calendar checksums combinators.short-circuit
http.client io io.directories io.encodings.binary io.files
io.files.info io.files.unique io.pathnames kernel math
-math.order math.parser present sequences shuffle splitting ;
-
+math.order math.parser namespaces present sequences shuffle
+splitting ;
IN: http.download
-: file-too-old-or-not-exists? ( file duration -- ? )
+: file-too-old-or-not-exists? ( path duration -- ? )
[ ?file-info [ created>> ] ?call ]
[ ago ] bi*
over [ before? ] [ 2drop t ] if ;
-: delete-when-old ( file duration -- deleted? )
+: delete-when-old ( path duration -- deleted/missing? )
dupd file-too-old-or-not-exists? [ ?delete-file t ] [ drop f ] if ;
-: file-matches-checksum? ( file checksum-type bytes -- ? )
+: file-matches-checksum? ( path checksum-type bytes -- ? )
[ checksum-file ] dip = ;
-: delete-when-checksum-mismatches ( file checksum-type bytes -- deleted? )
+: delete-when-checksum-mismatches ( path checksum-type bytes -- deleted? )
dupdd file-matches-checksum? [ drop f ] [ ?delete-file t ] if ;
: file-size= ( path n -- ? ) [ ?file-info [ size>> ] ?call ] dip = ;
drop t
] if ;
-: delete-when-file-size-mismatches? ( file size -- deleted? )
+: delete-when-file-size-mismatches? ( path size -- deleted? )
dupd file-size= [ drop f ] [ ?delete-file t ] if ;
: download-name ( url -- name )
PRIVATE>
-: download-to ( url file -- path )
- [
- [ download-temporary-name binary ] keep
- '[ _ http-write-request ] with-unique-file-writer
- ] dip [ move-file ] keep ;
-
-: download-once-to ( url file -- path )
- dup file-exists? [ nip ] [ download-to ] if ;
+: download-to-temporary-file ( url -- path )
+ [ download-temporary-name binary ] keep
+ '[ _ http-write-request ] with-unique-file-writer ;
-: download-once ( url -- path )
- dup download-name download-once-to ;
+: download-as ( url path -- path )
+ [ download-to-temporary-file ] dip [ ?move-file ] keep ;
-: download-outdated-to ( url file duration -- path )
- 2dup delete-when-old [ drop download-to ] [ drop nip ] if ;
+: download-into ( url directory -- path )
+ [ [ download-to-temporary-file ] keep ] dip
+ dup make-directories to-directory nip
+ [ move-file ] keep ;
: download ( url -- path )
- dup download-name download-to ;
+ dup download-name download-as ;
+
+: download-once-as ( url path -- path )
+ dup file-exists? [ nip ] [ download-as ] if ;
+
+: download-once-into ( url directory -- path ) to-directory download-once-as ;
+
+: download-once ( url -- path ) current-directory get download-once-into ;
+
+: download-outdated-as ( url path duration -- path' )
+ 2dup delete-when-old [ drop download-as ] [ drop nip ] if ;
+
+: download-outdated-into ( url directory duration -- path )
+ [ to-directory ] dip download-outdated-as ;
+
+: download-outdated ( url duration -- path )
+ [ dup download-name current-directory get to-directory nip ] dip download-outdated-as ;
" } switch ;" }
{ $see-also undo } ;
+HELP: under
+{ $values { "invertible-quot" quotation } { "quot" quotation } }
+{ $description "Applies " { $snippet "invertible-quot" } ", then " { $snippet "quot" } " and finally the inverse of " { $snippet "invertible-quot" } "." }
+{ $examples
+ "Round a decimal number to two decimals:"
+ { $example
+ "USING: inverse math math.functions prettyprint ;"
+ "123.456 [ 100 * ] [ round ] under ."
+ "123.46"
+ }
+} ;
+
ARTICLE: { "inverse" "intro" } "Invertible quotations"
"The inverse vocab defines a way to 'undo' quotations, and builds a pattern matching framework on that basis. A quotation can be inverted by reversing it and inverting each word. To define the inverse for particular word, use"
{ $subsections
{ 0 } [ { 1 2 } [ [ 1 + 2 ] { } output>sequence ] undo ] unit-test
{ { 0 1 } } [ 1 2 [ [ [ 1 + ] bi@ ] input<sequence ] undo ] unit-test
+
+{ 123.46 } [ 123.456 [ 100 * ] [ round ] under ] unit-test
SYNTAX: INVERSE: scan-word parse-definition define-inverse ;
SYNTAX: DUAL: scan-word scan-word define-dual ;
+
+MACRO: under ( invertible-quot quot -- quot )
+ over [undo] '[ @ @ @ ] ;
"file6" link-info drop
] unit-test
+ { "file7 contents" } [
+ "file8" touch-file
+ "file7 contents" "file7" utf8 set-file-contents
+ "file7" "file8" move-file
+ "file8" utf8 file-contents
+ "file8" delete-file
+ ] unit-test
+
{ } [
{ "Hello world." }
"test-foo.txt" ascii set-file-lines
HOOK: move-file io-backend ( from to -- )
+: create-parent-directory ( path -- )
+ normalize-path parent-directory make-directories ;
+
: ?move-file ( from to -- )
- over file-exists? [ move-file ] [ 2drop ] if ;
+ over file-exists? [
+ dup create-parent-directory move-file
+ ] [ 2drop ] if ;
HOOK: move-file-atomically io-backend ( from to -- )
! Copyright (C) 2008 Doug Coleman.
! See https://factorcode.org/license.txt for BSD license.
USING: system io.directories alien.strings
-io.pathnames io.backend io.files.windows destructors
+io.pathnames io.backend io.files.windows literals destructors
kernel accessors calendar windows windows.errors
windows.kernel32 alien.c-types sequences splitting
fry continuations classes.struct windows.time ;
] with-disposal ;
M: windows move-file
- [ normalize-path ] bi@ MoveFile win32-error=0/f ;
+ [ normalize-path ] bi@
+ flags{ MOVEFILE_REPLACE_EXISTING MOVEFILE_COPY_ALLOWED }
+ MoveFileEx win32-error=0/f ;
M: windows move-file-atomically
- [ normalize-path ] bi@ 0 MoveFileEx win32-error=0/f ;
+ [ normalize-path ] bi@ MOVEFILE_REPLACE_EXISTING
+ MoveFileEx win32-error=0/f ;
ERROR: file-delete-failed path error ;
! See https://factorcode.org/license.txt for BSD license.
USING: accessors arrays assocs classes.tuple combinators hints
-kernel kernel.private make math math.functions math.order
-ranges sequences sequences.private sorting strings vectors ;
+kernel kernel.private make math math.functions math.order ranges
+sequences sequences.private sets sorting strings vectors ;
IN: math.combinatorics
<PRIVATE
: all-combinations ( seq k -- seq' )
[ ] map-combinations ;
+: all-unique-combinations ( seq n -- seq' )
+ HS{ } clone [ '[ _ adjoin ] each-combination ] keep members ;
+
: all-combinations? ( ... seq k quot: ( ... elt -- ... ? ) -- ... ? )
combinations-quot all? ; inline
{ $values
{ "obj1" object }
{ "obj2" object }
- { "sort-spec" "a sequence of sequences of accessors and a comparator" }
+ { "sort-spec" "a sequence of sequences of accessors/quotations and a comparator" }
{ "<=>" { $link +lt+ } ", " { $link +eq+ } " or " { $link +gt+ } }
}
{ $description "Compares two objects using a chain of intrinsic linear orders such that if two objects are " { $link +eq+ } ", then the next ordering is tried." } ;
arrays sequences kernel assocs multiline sorting.functor ;
IN: sorting.specification.tests
+
TUPLE: sort-test a b c tuple2 ;
TUPLE: tuple2 d ;
} { { a>> <=> } { b>> >=< } { c>> <=> } } sort-with-spec
] unit-test
+! Test with quotations too even though it's basically the same
+{
+ {
+ T{ sort-test { a 1 } { b 3 } { c 9 } }
+ T{ sort-test { a 1 } { b 1 } { c 10 } }
+ T{ sort-test { a 1 } { b 1 } { c 11 } }
+ T{ sort-test { a 2 } { b 5 } { c 2 } }
+ T{ sort-test { a 2 } { b 5 } { c 3 } }
+ }
+} [
+ {
+ T{ sort-test f 1 3 9 }
+ T{ sort-test f 1 1 10 }
+ T{ sort-test f 1 1 11 }
+ T{ sort-test f 2 5 3 }
+ T{ sort-test f 2 5 2 }
+ } { { [ a>> ] <=> } { [ b>> ] >=< } { [ c>> ] <=> } } sort-with-spec
+] unit-test
+
{ { } } [
{ } { { a>> <=> } { b>> >=< } { c>> <=> } } sort-with-spec
] unit-test
! Copyright (C) 2009 Slava Pestov, Doug Coleman.
! See https://factorcode.org/license.txt for BSD license.
-USING: arrays assocs kernel math.order sequences sorting ;
+USING: arrays assocs kernel math.order quotations sequences
+sorting ;
IN: sorting.specification
: execute-comparator ( obj1 obj2 word -- <=>/f )
execute( obj1 obj2 -- <=> ) dup +eq+ eq? [ drop f ] when ;
+: call-accessor ( obj1 obj2 quot -- obj1' obj2' )
+ '[ _ call( obj -- value ) ] bi@ ;
+
: execute-accessor ( obj1 obj2 word -- obj1' obj2' )
'[ _ execute( tuple -- value ) ] bi@ ;
[
dup array? [
unclip-last-slice
- [ [ execute-accessor ] each ] dip
+ [ [ dup quotation? [ call-accessor ] [ execute-accessor ] if ] each ] dip
] when execute-comparator
] 2with map-find drop +eq+ or ;
: grapheme-break-test ( -- filename )
"https://downloads.factorcode.org/misc/UCD/15.1.0/auxiliary/GraphemeBreakTest.txt"
- "GraphemeBreakTest-15.1.0.txt" cache-file download-once-to ;
+ "GraphemeBreakTest-15.1.0.txt" cache-file download-once-as ;
: word-break-test ( -- filename )
"https://downloads.factorcode.org/misc/UCD/15.1.0/auxiliary/WordBreakTest.txt"
- "WordBreakTest-15.1.0.txt" cache-file download-once-to ;
+ "WordBreakTest-15.1.0.txt" cache-file download-once-as ;
: parse-test-file ( file-name -- tests )
utf8 file-lines
: collation-test-lines ( -- lines )
"https://downloads.factorcode.org/misc/UCA/15.1.0/CollationTest_SHIFTED.txt"
- "CollationTest_SHIFTED_15.1.0.txt" cache-file download-once-to
+ "CollationTest_SHIFTED_15.1.0.txt" cache-file download-once-as
utf8 file-lines [ "#" head? ] reject harvest ;
: parse-collation-test-shifted ( -- lines )
! Could use simple-flat-file after some cleanup
: parse-normalization-tests ( -- tests )
"https://downloads.factorcode.org/misc/UCD/15.1.0/NormalizationTest.txt"
- "NormalizationTest-15.1.0.txt" cache-file download-once-to
+ "NormalizationTest-15.1.0.txt" cache-file download-once-as
utf8 file-lines [ "#" head? ] reject
[ "@" head? ] split*-when
2 <groups> [ first2 [ first ] dip 2array ] map
CONSTANT: FILE_SUPPORTS_USN_JOURNAL 0x02000000
CONSTANT: FILE_DAX_VOLUME 0x20000000
+CONSTANT: MOVEFILE_COPY_ALLOWED 0x2
+CONSTANT: MOVEFILE_CREATE_HARDLINK 0x10
+CONSTANT: MOVEFILE_DELAY_UNTIL_REBOOT 0x4
+CONSTANT: MOVEFILE_FAIL_IF_NOT_TRACKABLE 0x20
+CONSTANT: MOVEFILE_REPLACE_EXISTING 0x1
+CONSTANT: MOVEFILE_WRITE_THROUGH 0x8
+
CONSTANT: DONT_RESOLVE_DLL_REFERENCES 1
STRUCT: FILE_NOTIFY_INFORMATION
] if*
] with-directory ; inline
-: ?download ( path -- )
- dup file-name file-exists? [ drop ] [ download drop ] if ; inline
-
: with-tar-gz ( path quot -- )
'[
_ dup "build-from-source considering tar.gz %s" sprintf print
] [
"- building..." write
[
- [ ?download ]
- [ file-name { "tar" "xvfz" } swap suffix try-process ]
- [ file-name ".tar.gz" ?tail drop ] tri
- prepend-current-path _ with-directory
+ download-once
+ [ { "tar" "xvfz" } swap suffix try-process ]
+ [ ".tar.gz" ?tail drop ] bi
+ _ with-directory
now timestamp>rfc3339
] dip utf8 set-file-contents
"done!" print
! Copyright (C) 2023 Doug Coleman.
! See https://factorcode.org/license.txt for BSD license.
USING: build-from-source combinators.smart continuations
-environment http.client io.directories io.files.temp io.launcher
+environment http.download io.directories io.files.temp io.launcher
io.pathnames kernel layouts qw sequences windows.shell32 ;
IN: build-from-source.windows
! Probably not needed on Windows 10+
: install-windows-redistributable ( -- )
[
- "https://aka.ms/vs/17/release/vc_redist.x64.exe" download
+ "https://aka.ms/vs/17/release/vc_redist.x64.exe" download drop
qw{ vc_redist.x64.exe /install /passive /norestart } try-process
] with-temp-directory ;
"https://downloads.factorcode.org/misc/bun_zipper.ply"
: download-bunny ( -- path )
- model-url model-path download-once-to ;
+ model-url model-path download-once-as ;
:: (draw-triangle) ( ns vs triple -- )
triple [| elt |
: uses-yaml? ( paths -- ? ) [ yaml-file? ] any? ;
: docker-file? ( path -- ? ) >lower file-name { "dockerfile" ".dockerignore" "docker-compose.yaml" } member? ;
-: docker-files ( paths -- paths' ) [ docker-file? ] filter ;
+: docker-files ( paths -- paths' )
+ [ [ docker-file? ] filter ]
+ [ [ >lower "dockerfile" subseq-of? ] filter ] bi
+ append members ;
: uses-docker? ( paths -- ? ) [ docker-file? ] any? ;
: automake-file? ( path -- ? )
: analyze-codebase-path ( path -- )
{
- [ normalize-path "project at path `%s`" sprintf print nl ]
+ [ normalize-path "project at path `%s`" sprintf print ]
[ uses-git? [ "uses git" print ] when ]
[ has-package-json? [ "has a package.json file" print ] when ]
} cleave ;
+: file. ( path -- ) >pathname ... ;
+: files. ( paths -- ) [ file. ] each ;
+
: analyze-codebase-paths ( paths -- )
{
[
[ length "%d binary files" sprintf print ]
[ length "%d text files" sprintf print ] bi*
]
- [ github-files [ sort "has .github files" print ... ] unless-empty ]
- [ license-files [ sort [ length "has %d license files" sprintf print ] [ ... ] bi ] unless-empty ]
- [ readme-files [ sort "has readme files" print ... ] unless-empty ]
- [ owners-files [ sort "has owners files" print ... ] unless-empty ]
- [ codenotify-files [ sort "has codenotify files" print ... ] unless-empty ]
- [ contributing-files [ sort "has contributing files" print ... ] unless-empty ]
- [ changelog-files [ sort "has changelog files" print ... ] unless-empty ]
- [ security-files [ sort "has security files" print ... ] unless-empty ]
- [ notice-files [ sort "has notice files" print ... ] unless-empty ]
- [ version-files [ sort "has version files" print ... ] unless-empty ]
+ [ github-files [ sort "has .github files" print files. ] unless-empty ]
+ [ license-files [ sort [ length "has %d license files" sprintf print ] [ files. ] bi ] unless-empty ]
+ [ readme-files [ sort "has readme files" print files. ] unless-empty ]
+ [ owners-files [ sort "has owners files" print files. ] unless-empty ]
+ [ codenotify-files [ sort "has codenotify files" print files. ] unless-empty ]
+ [ contributing-files [ sort "has contributing files" print files. ] unless-empty ]
+ [ changelog-files [ sort "has changelog files" print files. ] unless-empty ]
+ [ security-files [ sort "has security files" print files. ] unless-empty ]
+ [ notice-files [ sort "has notice files" print files. ] unless-empty ]
+ [ version-files [ sort "has version files" print files. ] unless-empty ]
[
{ [ dot-files ] [ rc-files diff ] [ ignore-files diff ] } cleave
- [ sort "has dot files" print ... ] unless-empty
+ [ sort "has dot files" print files. ] unless-empty
]
- [ rc-files [ sort [ length "has %d rc files" sprintf print ] [ ... ] bi ] unless-empty ]
- [ configure-files [ sort "uses configure files" print ... ] unless-empty ]
- [ automake-files [ sort "uses automake" print ... ] unless-empty ]
- [ make-files [ sort "uses make" print ... ] unless-empty ]
- [ nmake-files [ sort "uses nmake" print ... ] unless-empty ]
- [ cmake-files [ sort "uses cmake" print ... ] unless-empty ]
- [ gradle-files [ sort "uses gradle" print ... ] unless-empty ]
- [ cargo-files [ sort "uses rust/cargo" print ... ] unless-empty ]
- [ julia-project-files [ sort "uses julia Project.toml" print ... ] unless-empty ]
- [ in-files [ sort "uses 'in' files" print ... ] unless-empty ]
- [ ignore-files [ sort [ length "has %d ignore files" sprintf print ] [ ... ] bi ] unless-empty nl ]
- [ [ rust-project-dir? ] filter [ [ "rust projects at " print . ] [ [ analyze-rust-project ] each ] bi ] unless-empty nl ]
+ [ rc-files [ sort [ length "has %d rc files" sprintf print ] [ files. ] bi ] unless-empty ]
+ [ configure-files [ sort "uses configure files" print files. ] unless-empty ]
+ [ automake-files [ sort "uses automake" print files. ] unless-empty ]
+ [ make-files [ sort "uses make" print files. ] unless-empty ]
+ [ nmake-files [ sort "uses nmake" print files. ] unless-empty ]
+ [ cmake-files [ sort "uses cmake" print files. ] unless-empty ]
+ [ docker-files [ sort "uses docker" print files. ] unless-empty ]
+ [ gradle-files [ sort "uses gradle" print files. ] unless-empty ]
+ [ cargo-files [ sort "uses rust/cargo" print files. ] unless-empty ]
+ [ julia-project-files [ sort "uses julia Project.toml" print files. ] unless-empty ]
+ [ in-files [ sort "uses 'in' files" print files. ] unless-empty ]
+ [ ignore-files [ sort [ length "has %d ignore files" sprintf print ] [ files. ] bi ] unless-empty ]
+ [ [ rust-project-dir? ] filter [ [ "rust projects at " print file. ] [ [ analyze-rust-project ] each ] bi ] unless-empty ]
[
[ upper-files ] keep
{
[ notice-files diff ]
[ version-files diff ]
} cleave
- [ sort [ length "has %d UPPER files (minus github,license,readme,owner,codenotify,contributing,changelog,security,notice,version)" sprintf print ] [ ... ] bi ] unless-empty nl
+ [ sort [ length "has %d UPPER files (minus github,license,readme,owner,codenotify,contributing,changelog,security,notice,version)" sprintf print ] [ files. ] bi ] unless-empty
]
- [ "Top 20 largest files" print file-sizes sort-values 20 index-or-length tail* [ normalize-path ] map-keys reverse assoc. nl ]
- [ "Top 10 file extension sizes" print sum-sizes-by-extension 10 index-or-length tail* reverse assoc. nl ]
- [ "Top 10 text file line counts" print sum-line-counts-by-extension 10 index-or-length tail* reverse assoc. nl ]
- [ "Top 10 file extension counts" print count-by-file-extension 10 index-or-length tail* reverse assoc. nl ]
+ [ "Top 20 largest files" print file-sizes sort-values 20 index-or-length tail* [ normalize-path ] map-keys reverse assoc. ]
+ [ "Top 10 file extension sizes" print sum-sizes-by-extension 10 index-or-length tail* reverse assoc. ]
+ [ "Top 10 text file line counts" print sum-line-counts-by-extension 10 index-or-length tail* reverse assoc. ]
+ [ "Top 10 file extension counts" print count-by-file-extension 10 index-or-length tail* reverse assoc. ]
} cleave ;
: analyze-codebase ( path -- )
! Copyright (C) 2014 John Benediktsson
! See https://factorcode.org/license.txt for BSD license
-USING: alien alien.c-types alien.data alien.destructors
-alien.syntax command-line curl.ffi destructors io
-io.encodings.string io.encodings.utf8 io.streams.c kernel math
-namespaces present sequences ;
+USING: alien.destructors command-line curl.ffi destructors
+http.download io.backend io.streams.c kernel namespaces present
+sequences ;
IN: curl
CURLOPT_URL swap present curl-set-opt ;
: curl-set-file ( CURL path -- )
- CURLOPT_FILE swap "wb" fopen &fclose curl-set-opt ;
+ CURLOPT_FILE swap normalize-path "wb" fopen &fclose curl-set-opt ;
: curl-perform ( CURL -- )
curl_easy_perform check-code ;
PRIVATE>
-: curl-download-to ( url path -- )
+: curl-download-as ( url path -- )
[
curl-init
[ swap curl-set-file ]
[ curl-perform ] tri
] with-destructors ;
+: curl-download ( url -- path )
+ dup download-name [ curl-download-as ] keep ;
+
: curl-main ( -- )
command-line get [
curl-init
TUPLE: discord-bot-config
client-id client-secret
- token application-id guild-id channel-id permissions
+ token application-id guild-id channel-id
+ permissions intents
user-callback obey-names
metadata
discord-bot mailbox connect-thread ;
discord-post-request json-request ;
: discord-post-json ( payload route -- json )
[ >json ] dip discord-post-request add-json-header json-request ;
+: discord-post-json-no-resp ( payload route -- )
+ [ >json ] dip discord-post-request add-json-header http-request 2drop ;
: discord-patch-json ( payload route -- json )
[ >json ] dip discord-patch-request add-json-header json-request ;
: discord-delete-json ( route -- json )
: delete-discord-application-guild-command ( application-id -- json )
"/applications/%s/commands" sprintf discord-delete-json ;
-: create-interaction-response ( interaction-id interaction-token -- json )
- [ H{ { "type" 4 } { "data" "pang" } } clone ] 2dip
- "/webhooks/%s/%s/messages/callback" sprintf discord-post ;
-
+: create-interaction-response ( json interaction-id interaction-token -- )
+ "/interactions/%s/%s/callback" sprintf discord-post-json-no-resp ;
: get-original-interaction-response ( application-id interaction-token -- json )
"/webhooks/%s/%s/messages/@original" sprintf discord-get ;
-
+: edit-interaction-response ( json application-id interaction-token -- json )
+ "/webhooks/%s/%s/messages/@original" sprintf discord-patch-json ;
: send-message* ( string channel-id -- json )
: get-discord-bot-gateway ( -- json ) "/gateway/bot" discord-get ;
: gateway-identify-json ( -- json )
- \ discord-bot get config>> token>> [[ {
+ \ discord-bot get
+ [ config>> ] ?call
+ [ [ token>> ] ?call "0" or ]
+ [ [ intents>> ] ?call 3276541 or ] bi
+ [[ {
"op": 2,
"d": {
"token": "%s",
"device": "discord.factor"
},
"large_threshold": 250,
- "intents": 3276541
+ "intents": %d
}
}]] sprintf json> >json ;
: translate-tts ( text -- file )
"https://translate.google.com/translate_tts?tl=en" >url
swap "q" set-query-param [
- "" ".mp3" unique-file download-to
+ "" ".mp3" unique-file download-as
] with-temp-directory ;
! Example:
"https://downloads.factorcode.org/misc/bun_zipper.ply"
: download-bunny ( -- path )
- bunny-model-url bunny-model-path download-once-to ;
+ bunny-model-url bunny-model-path download-once-as ;
: get-bunny-data ( bunny-state -- )
download-bunny bunny-data
-USING: arrays grouping help.markup help.syntax kernel math
+USING: arrays assocs grouping help.markup help.syntax kernel math
quotations sequences ;
IN: grouping.extras
HELP: group-by
{ $values { "seq" sequence } { "quot" { $quotation ( elt -- key ) } } { "groups" "a new assoc" } }
-{ $description "Groups the elements by the key received by applying quot to each element in the sequence." }
+{ $description "Groups consecutive elements by the key received by applying quot to each element in the sequence." }
{ $examples
{ $example
"USING: grouping.extras unicode.data prettyprint sequences strings ;"
"\"THis String Has CasE!\" [ category ] group-by [ last >string ] { } map-as ."
"{ \"TH\" \"is\" \" \" \"S\" \"tring\" \" \" \"H\" \"as\" \" \" \"C\" \"as\" \"E\" \"!\" }"
}
+ { $example
+ "USING: grouping.extras prettyprint sequences strings ;"
+ "{ \"apple\" \"anchovy\" \"banana\" \"anise\" \"bagel\" \"bratwurst\" } [ first 1string ] group-by ."
+ "V{\n { \"a\" V{ \"apple\" \"anchovy\" } }\n { \"b\" V{ \"banana\" } }\n { \"a\" V{ \"anise\" } }\n { \"b\" V{ \"bagel\" \"bratwurst\" } }\n}"
+ }
} ;
+{ group-by collect-by } related-words
+
HELP: <n-groups>
{ $values
{ "seq" sequence } { "n" integer }
"https://github.com/golbin/TensorFlow-MNIST/raw/master/mnist/data/t10k-images-idx3-ubyte.gz"
"https://github.com/golbin/TensorFlow-MNIST/raw/master/mnist/data/t10k-labels-idx1-ubyte.gz"
}
- [ [ download-once ] parallel-each ]
+ [ [ download-once-into ] parallel-each ]
[ [ dup file-stem file-exists? [ drop ] [ file-name gzip-decompress-file ] if ] each ]
[ [ file-stem binary file-contents ] map ] tri
first4 {
: a.macho ( -- path )
URL" https://downloads.factorcode.org/misc/a.macho"
- "a.macho" cache-file download-once-to ;
+ "a.macho" cache-file download-once-as ;
: a2.macho ( -- path )
URL" https://downloads.factorcode.org/misc/a2.macho"
- "a2.macho" cache-file download-once-to ;
+ "a2.macho" cache-file download-once-as ;
cpu ppc? [
{ $ validation-output }
: wordlist ( -- seq )
"https://projecteuler.net/project/resources/p098_words.txt"
- "p098_words.txt" temp-file download-once-to
+ "p098_words.txt" temp-file download-once-as
utf8 file-contents "," split [ rest-slice but-last ] map ;
: squarelist ( n -- seq )
: default-word-list ( -- path )
URL" https://raw.githubusercontent.com/quinnj/Rosetta-Julia/master/unixdict.txt"
- "unixdict.txt" temp-file download-once-to ;
+ "unixdict.txt" temp-file download-once-as ;
: longest-deranged-anagrams ( -- anagrams )
default-word-list (longest-deranged-anagrams) ;
MEMO: word-list ( -- seq )
URL" https://raw.githubusercontent.com/quinnj/Rosetta-Julia/master/unixdict.txt"
"unixdict.txt" temp-file
- download-once-to utf8 file-lines ;
+ download-once-as utf8 file-lines ;
: ordered-word? ( word -- ? )
>lower [ <= ] monotonic? ;
MEMO: mlijobs ( -- lines )
URL" https://raw.githubusercontent.com/def-/nim-unsorted/master/mlijobs.txt"
- "mlijobs.txt" temp-file download-once-to ascii file-lines ;
+ "mlijobs.txt" temp-file download-once-as ascii file-lines ;
PRIVATE>
! Copyright (C) 2024 Doug Coleman.
! See https://factorcode.org/license.txt for BSD license.
USING: accessors arrays assocs assocs.extras calendar
-combinators http.download images.loader images.viewer io
-io.directories json json.http kernel math math.parser
-math.statistics namespaces sequences sequences.extras sets
-sorting splitting ui.gadgets.panes unicode urls ;
+calendar.parser combinators combinators.short-circuit
+combinators.smart formatting grouping http.download
+images.loader images.viewer io io.directories json json.http
+kernel math math.combinatorics math.order math.parser
+math.statistics namespaces random sequences sequences.deep
+sequences.extras sequences.generalizations sets sorting
+sorting.specification splitting splitting.extras strings
+ui.gadgets.panes unicode urls ;
IN: scryfall
CONSTANT: scryfall-oracle-json-path "resource:scryfall-oracle-json"
CONSTANT: scryfall-rulings-json-path "resource:scryfall-rulings-json"
CONSTANT: scryfall-images-path "resource:scryfall-images/"
+: ?write ( str/f -- ) [ write ] when* ;
: ?print ( str/f -- ) [ print ] [ nl ] if* ;
: download-scryfall-bulk-json ( -- json )
: load-scryfall-json ( type path -- uri )
[ find-scryfall-json "download_uri" of ] dip
- 120 hours download-outdated-to path>json ;
+ 30 days download-outdated-as path>json ;
MEMO: mtg-oracle-cards ( -- json )
"oracle_cards" scryfall-oracle-json-path load-scryfall-json ;
-: mtg-artwork-cards ( -- json )
+MEMO: mtg-artwork-cards ( -- json )
"unique_artwork" scryfall-artwork-json-path load-scryfall-json ;
MEMO: scryfall-default-cards-json ( -- json )
>url path>> "/" ?head drop "/" "-" replace
scryfall-images-path "" prepend-as ;
-: map-card-faces ( assoc quot -- seq )
- [ "card_faces" of ] dip map ; inline
+: filter-multi-card-faces ( assoc -- seq )
+ [ "card_faces" of length 1 > ] filter ; inline
+
+: multi-card-faces? ( assoc -- seq )
+ "card_faces" of length 1 > ; inline
: card>image-uris ( assoc -- seq )
[ "image_uris" of ]
: download-scryfall-image ( assoc -- path )
dup scryfall-local-image-path dup delete-when-zero-size
- [ download-once-to ] [ nip ] if ;
+ [ download-once-as ] [ nip ] if ;
: download-normal-images ( seq -- seq' )
ensure-scryfall-images-directory
ensure-scryfall-images-directory
small-images [ download-scryfall-image load-image ] map ;
-MEMO: all-cards-by-name ( -- assoc )
- mtg-oracle-cards
- [ "name" of ] collect-by
- [ first ] map-values ;
-
-: find-card-by-name ( seq name -- card ) '[ "name" of _ = ] filter ;
-: cards-by-name ( seq -- assoc ) [ "name" of ] collect-by ;
-: cards-by-cmc ( seq -- assoc ) [ "cmc" of ] collect-by ;
-: cards-by-color-identity ( seq -- assoc ) [ "color_identity" of ] collect-by-multi ;
-: red-color-identity ( seq -- seq' ) cards-by-color-identity "R" of ;
-: blue-color-identity ( seq -- seq' ) cards-by-color-identity "U" of ;
-: green-color-identity ( seq -- seq' ) cards-by-color-identity "G" of ;
-: black-color-identity ( seq -- seq' ) cards-by-color-identity "B" of ;
-: white-color-identity ( seq -- seq' ) cards-by-color-identity "W" of ;
+: collect-cards-by-name ( seq -- assoc ) [ "name" of ] collect-by ;
+: collect-cards-by-cmc ( seq -- assoc ) [ "cmc" of ] collect-by ;
+: collect-cards-by-mana-cost ( seq -- assoc ) [ "mana_cost" of ] collect-by ;
+: collect-cards-by-color-identity ( seq -- assoc ) [ "color_identity" of ] collect-by-multi ;
+: red-color-identity ( seq -- seq' ) collect-cards-by-color-identity "R" of ;
+: blue-color-identity ( seq -- seq' ) collect-cards-by-color-identity "U" of ;
+: green-color-identity ( seq -- seq' ) collect-cards-by-color-identity "G" of ;
+: black-color-identity ( seq -- seq' ) collect-cards-by-color-identity "B" of ;
+: white-color-identity ( seq -- seq' ) collect-cards-by-color-identity "W" of ;
: find-card-by-color-identity-intersect ( cards colors -- cards' )
- [ cards-by-color-identity ] dip [ of ] with map intersect-all ;
+ [ collect-cards-by-color-identity ] dip [ of ] with map intersect-all ;
: find-any-color-identities ( cards colors -- cards' )
- [ cards-by-color-identity ] dip [ of ] with map union-all ;
+ [ collect-cards-by-color-identity ] dip [ of ] with map union-all ;
+
+: color-identity-complement ( seq -- seq' ) [ { "B" "G" "R" "U" "W" } ] dip diff ;
-: color-identity-complement ( seq -- seq' ) [ { "W" "U" "B" "R" "G" } ] dip diff ;
+: split-mana-cost ( string -- seq )
+ f like [ " // " split1 swap ] { } loop>sequence nip ;
+
+: casting-cost-combinations ( seq -- seq' )
+ sequence-cartesian-product [ [ first ] sort-by ] map ;
+
+: parse-mana-cost ( string -- seq )
+ split-mana-cost
+ [
+ "{}" split harvest
+ [ "/" split ] map
+ casting-cost-combinations
+ ] map ;
: remove-color-identities ( cards colors -- cards' )
dupd find-any-color-identities diff ;
: filter-sultai-exact ( seq -- seq' ) { "B" "G" "U" } find-exact-color-identities ;
: filter-temur-exact ( seq -- seq' ) { "G" "U" "R" } find-exact-color-identities ;
-: filter-non-white ( seq -- seq' ) { "U" "B" "R" "G" } find-any-color-identities ;
-: filter-non-blue ( seq -- seq' ) { "W" "B" "R" "G" } find-any-color-identities ;
-: filter-non-black ( seq -- seq' ) { "W" "U" "R" "G" } find-any-color-identities ;
-: filter-non-red ( seq -- seq' ) { "W" "U" "B" "G" } find-any-color-identities ;
-: filter-non-green ( seq -- seq' ) { "W" "U" "B" "R" } find-any-color-identities ;
+: filter-non-white ( seq -- seq' ) { "U" "B" "R" "G" } find-only-color-identities ;
+: filter-non-blue ( seq -- seq' ) { "W" "B" "R" "G" } find-only-color-identities ;
+: filter-non-black ( seq -- seq' ) { "W" "U" "R" "G" } find-only-color-identities ;
+: filter-non-red ( seq -- seq' ) { "W" "U" "B" "G" } find-only-color-identities ;
+: filter-non-green ( seq -- seq' ) { "W" "U" "B" "R" } find-only-color-identities ;
: filter-legalities ( seq name -- seq' ) '[ "legalities" of _ of "legal" = ] filter ;
: filter-standard ( seq -- seq' ) "standard" filter-legalities ;
: filter-premodern ( seq -- seq' ) "premodern" filter-legalities ;
: filter-predh ( seq -- seq' ) "predh" filter-legalities ;
+: spanish-standard-cards ( -- seq )
+ scryfall-all-cards-json
+ filter-standard
+ [ "lang" of "es" = ] filter ;
+
: filter-red-any ( seq -- seq' ) [ "colors" of "R" swap member? ] filter ;
: filter-red-only ( seq -- seq' ) [ "colors" of { "R" } = ] filter ;
: filter-blue-any ( seq -- seq' ) [ "colors" of "U" swap member? ] filter ;
] bi@ 2array sift ;
: type-line-of ( assoc -- string ) "type_line" of parse-type-line ;
-: any-type? ( seq name -- ? ) [ type-line-of ] dip '[ first _ member-of? ] any? ;
-: any-subtype? ( seq name -- ? ) [ type-line-of ] dip '[ second _ member-of? ] any? ;
-: filter-creature-type ( seq type -- seq' ) '[ _ any-subtype? ] filter ;
+: types-of ( assoc -- seq ) type-line-of [ first ] map concat ;
+: subtypes-of ( assoc -- seq ) type-line-of [ second ] map concat ;
+
+! cards can have several type lines (one for each face)
+: any-type? ( json name -- ? )
+ [ type-line-of ] dip >lower '[ first [ >lower ] map _ member-of? ] any? ;
+: any-subtype? ( json name -- ? )
+ [ type-line-of ] dip >lower '[ second [ >lower ] map _ member-of? ] any? ;
+: type-intersects? ( json types -- ? )
+ [ type-line-of ] dip [ >lower ] map '[ first [ >lower ] map _ intersects? ] any? ;
+: subtype-intersects? ( json subtypes -- ? )
+ [ type-line-of ] dip [ >lower ] map '[ second [ >lower ] map _ intersects? ] any? ;
+
+: filter-type ( seq text -- seq' ) '[ _ any-type? ] filter ;
+: filter-subtype ( seq text -- seq' ) '[ _ any-subtype? ] filter ;
+: filter-type-intersects ( seq text -- seq' ) '[ _ type-intersects? ] filter ;
+: filter-subtype-intersects ( seq text -- seq' ) '[ _ subtype-intersects? ] filter ;
+
+: filter-basic ( seq -- seq' ) [ "Basic" any-type? ] filter ;
+: filter-basic-subtype ( seq text -- seq' ) [ filter-basic ] dip filter-subtype ;
: filter-land ( seq -- seq' ) [ "Land" any-type? ] filter ;
+: filter-land-subtype ( seq text -- seq' ) [ filter-land ] dip filter-subtype ;
: filter-creature ( seq -- seq' ) [ "Creature" any-type? ] filter ;
+: filter-creature-subtype ( seq text -- seq' ) [ filter-creature ] dip filter-subtype ;
+: filter-emblem ( seq -- seq' ) [ "Emblem" any-type? ] filter ;
+: filter-emblem-subtype ( seq text -- seq' ) [ filter-emblem ] dip filter-subtype ;
: filter-enchantment ( seq -- seq' ) [ "Enchantment" any-type? ] filter ;
+: filter-enchantment-subtype ( seq text -- seq' ) [ filter-enchantment ] dip filter-subtype ;
+: filter-saga ( seq -- seq' ) "saga" filter-enchantment-subtype ;
: filter-instant ( seq -- seq' ) [ "Instant" any-type? ] filter ;
+: filter-instant-subtype ( seq text -- seq' ) [ filter-instant ] dip filter-subtype ;
: filter-sorcery ( seq -- seq' ) [ "Sorcery" any-type? ] filter ;
+: filter-sorcery-subtype ( seq text -- seq' ) [ filter-sorcery ] dip filter-subtype ;
: filter-planeswalker ( seq -- seq' ) [ "Planeswalker" any-type? ] filter ;
+: filter-planeswalker-subtype ( seq text -- seq' ) [ filter-planeswalker ] dip filter-subtype ;
+: filter-legendary ( seq -- seq' ) [ "Legendary" any-type? ] filter ;
+: filter-legendary-subtype ( seq text -- seq' ) [ filter-legendary ] dip filter-subtype ;
+: filter-battle ( seq -- seq' ) [ "Battle" any-type? ] filter ;
+: filter-battle-subtype ( seq text -- seq' ) [ filter-battle ] dip filter-subtype ;
+: filter-artifact ( seq -- seq' ) [ "Artifact" any-type? ] filter ;
+: filter-artifact-subtype ( seq text -- seq' ) [ filter-artifact ] dip filter-subtype ;
+
+: reject-basic ( seq -- seq' ) [ "Basic" any-type? ] reject ;
+: reject-land ( seq -- seq' ) [ "Land" any-type? ] reject ;
+: reject-creature ( seq -- seq' ) [ "Creature" any-type? ] reject ;
+: reject-emblem ( seq -- seq' ) [ "Emblem" any-type? ] reject ;
+: reject-enchantment ( seq -- seq' ) [ "Enchantment" any-type? ] reject ;
+: reject-instant ( seq -- seq' ) [ "Instant" any-type? ] reject ;
+: reject-sorcery ( seq -- seq' ) [ "Sorcery" any-type? ] reject ;
+: reject-planeswalker ( seq -- seq' ) [ "Planeswalker" any-type? ] reject ;
+: reject-legendary ( seq -- seq' ) [ "Legendary" any-type? ] reject ;
+: reject-battle ( seq -- seq' ) [ "Battle" any-type? ] reject ;
+: reject-artifact ( seq -- seq' ) [ "Artifact" any-type? ] reject ;
+
+: filter-mounts ( seq -- seq' ) "mount" filter-subtype ;
+: filter-vehicles ( seq -- seq' ) "vehicle" filter-subtype ;
+: filter-adventure ( seq -- seq' ) "adventure" filter-subtype ;
+: filter-aura ( seq -- seq' ) "aura" filter-subtype ;
+: filter-aura-subtype ( seq text -- seq' ) [ filter-aura ] dip filter-subtype ;
+: filter-equipment ( seq -- seq' ) "Equipment" filter-subtype ;
+: filter-equipment-subtype ( seq text -- seq' ) [ filter-equipment ] dip filter-subtype ;
: filter-common ( seq -- seq' ) '[ "rarity" of "common" = ] filter ;
: filter-uncommon ( seq -- seq' ) '[ "rarity" of "uncommon" = ] filter ;
: filter-mythic ( seq -- seq' ) '[ "rarity" of "mythic" = ] filter ;
: standard-cards ( -- seq' ) mtg-oracle-cards filter-standard ;
+: historic-cards ( -- seq' ) mtg-oracle-cards filter-historic ;
+: modern-cards ( -- seq' ) mtg-oracle-cards filter-modern ;
: sort-by-cmc ( assoc -- assoc' ) [ "cmc" of ] sort-by ;
: histogram-by-cmc ( assoc -- assoc' ) [ "cmc" of ] histogram-by sort-keys ;
+: filter-by-itext-prop ( seq string prop -- seq' )
+ swap >lower '[ _ of >lower _ subseq-of? ] filter ;
+
+: filter-by-text-prop ( seq string prop -- seq' )
+ swap '[ _ of _ subseq-of? ] filter ;
+
+: map-card-faces ( json quot -- seq )
+ '[ [ "card_faces" of ] [ ] [ 1array ] ?if _ map ] map ; inline
+
+: all-card-types ( seq -- seq' )
+ [ "type_line" of ] map-card-faces
+ concat members sort ;
+
+: card>faces ( assoc -- seq )
+ [ "card_faces" of ] [ ] [ 1array ] ?if ;
+
+: filter-card-faces-sub-card ( seq quot -- seq )
+ [ [ card>faces ] map concat ] dip filter ; inline
+
+: filter-card-faces-sub-card-prop ( seq string prop -- seq' )
+ swap '[ _ of _ subseq-of? ] filter-card-faces-sub-card ;
+
+: filter-card-faces-sub-card-iprop ( seq string prop -- seq' )
+ swap >lower '[ _ of >lower _ subseq-of? ] filter-card-faces-sub-card ;
+
+: filter-card-faces-main-card ( seq quot -- seq )
+ dup '[ [ "card_faces" of ] [ _ any? ] _ ?if ] filter ; inline
+
+: filter-card-faces-main-card-prop ( seq string prop -- seq' )
+ swap '[ _ of _ subseq-of? ] filter-card-faces-main-card ;
+
+: filter-card-faces-main-card-iprop ( seq string prop -- seq' )
+ swap >lower '[ _ of >lower _ subseq-of? ] filter-card-faces-main-card ;
+
+: filter-by-flavor-text ( seq string -- seq' )
+ "flavor_text" filter-card-faces-main-card-prop ;
+
+: filter-by-flavor-itext ( seq string -- seq' )
+ "flavor_text" filter-card-faces-main-card-iprop ;
+
: filter-by-oracle-text ( seq string -- seq' )
- '[ "oracle_text" of _ subseq-of? ] filter ;
+ "oracle_text" filter-card-faces-main-card-prop ;
: filter-by-oracle-itext ( seq string -- seq' )
- >lower
- '[ "oracle_text" of >lower _ subseq-of? ] filter ;
-
-: filter-flash ( seq -- seq' ) "Flash" filter-by-oracle-text ;
+ "oracle_text" filter-card-faces-main-card-iprop ;
+
+: filter-by-name-text ( seq string -- seq' ) "name" filter-by-text-prop ;
+: filter-by-name-itext ( seq string -- seq' ) "name" filter-by-itext-prop ;
+
+: filter-create-treasure ( seq -- seq' ) "create a treasure token" filter-by-oracle-itext ;
+: filter-treasure-token ( seq -- seq' ) "treasure token" filter-by-oracle-itext ;
+: filter-create-blood-token ( seq -- seq' ) "create a blood token" filter-by-oracle-itext ;
+: filter-blood-token ( seq -- seq' ) "blood token" filter-by-oracle-itext ;
+: filter-create-map-token ( seq -- seq' ) "create a map token" filter-by-oracle-itext ;
+: filter-map-token ( seq -- seq' ) "map token" filter-by-oracle-itext ;
+
+: filter-affinity ( seq -- seq' ) "affinity" filter-by-oracle-itext ;
+: filter-backup ( seq -- seq' ) "backup" filter-by-oracle-itext ;
+: filter-blitz ( seq -- seq' ) "blitz" filter-by-oracle-itext ;
+: filter-compleated ( seq -- seq' ) "compleated" filter-by-oracle-itext ;
+: filter-corrupted ( seq -- seq' ) "corrupted" filter-by-oracle-itext ;
+: filter-counter ( seq -- seq' ) "counter" filter-by-oracle-itext ;
+: filter-crew ( seq -- seq' ) "crew" filter-by-oracle-itext ;
+: filter-cycling ( seq -- seq' ) "cycling" filter-by-oracle-itext ;
+: filter-deathtouch ( seq -- seq' ) "deathtouch" filter-by-oracle-itext ;
+: filter-defender ( seq -- seq' ) "defender" filter-by-oracle-itext ;
+: filter-descend ( seq -- seq' ) "descend" filter-by-oracle-itext ;
+: filter-destroy-target ( seq -- seq' ) "destroy target" filter-by-oracle-itext ;
+: filter-discover ( seq -- seq' ) "discover" filter-by-oracle-itext ;
+: filter-disguise ( seq -- seq' ) "disguise" filter-by-oracle-itext ;
+: filter-domain ( seq -- seq' ) "domain" filter-by-oracle-itext ;
+: filter-double-strike ( seq -- seq' ) "double strike" filter-by-oracle-itext ;
+: filter-equip ( seq -- seq' ) "equip" filter-by-oracle-itext ;
+: filter-equip-n ( seq -- seq' ) "equip {" filter-by-oracle-itext ;
+: filter-exile ( seq -- seq' ) "exile" filter-by-oracle-itext ;
+: filter-fights ( seq -- seq' ) "fights" filter-by-oracle-itext ;
+: filter-first-strike ( seq -- seq' ) "first strike" filter-by-oracle-itext ;
+: filter-flash ( seq -- seq' ) "flash" filter-by-oracle-itext ;
+: filter-flying ( seq -- seq' ) "flying" filter-by-oracle-itext ;
+: filter-for-mirrodin ( seq -- seq' ) "for mirrodin!" filter-by-oracle-itext ;
+: filter-graveyard ( seq -- seq' ) "graveyard" filter-by-oracle-itext ;
+: filter-haste ( seq -- seq' ) "haste" filter-by-oracle-itext ;
+: filter-hideaway ( seq -- seq' ) "hideaway" filter-by-oracle-itext ;
+: filter-hexproof ( seq -- seq' ) "hexproof" filter-by-oracle-itext ;
+: filter-indestructible ( seq -- seq' ) "indestructible" filter-by-oracle-itext ;
+: filter-investigate ( seq -- seq' ) "investigate" filter-by-oracle-itext ;
+: filter-lifelink ( seq -- seq' ) "lifelink" filter-by-oracle-itext ;
+: filter-madness ( seq -- seq' ) "madness" filter-by-oracle-itext ;
+: filter-menace ( seq -- seq' ) "menace" filter-by-oracle-itext ;
+: filter-mill ( seq -- seq' ) "mill" filter-by-oracle-itext ;
+: filter-ninjutsu ( seq -- seq' ) "ninjutsu" filter-by-oracle-itext ;
+: filter-proliferate ( seq -- seq' ) "proliferate" filter-by-oracle-itext ;
+: filter-protection ( seq -- seq' ) "protection" filter-by-oracle-itext ;
+: filter-prowess ( seq -- seq' ) "prowess" filter-by-oracle-itext ;
+: filter-reach ( seq -- seq' ) "reach" filter-by-oracle-itext ;
+: filter-read-ahead ( seq -- seq' ) "read ahead" filter-by-oracle-itext ;
+: filter-reconfigure ( seq -- seq' ) "reconfigure" filter-by-oracle-itext ;
+: filter-role ( seq -- seq' ) "role" filter-by-oracle-itext ;
+: filter-sacrifice ( seq -- seq' ) "sacrifice" filter-by-oracle-itext ;
+: filter-scry ( seq -- seq' ) "scry" filter-by-oracle-itext ;
+: filter-shroud ( seq -- seq' ) "shroud" filter-by-oracle-itext ;
+: filter-token ( seq -- seq' ) "token" filter-by-oracle-itext ;
+: filter-toxic ( seq -- seq' ) "toxic" filter-by-oracle-itext ;
+: filter-trample ( seq -- seq' ) "trample" filter-by-oracle-itext ;
+: filter-vehicle ( seq -- seq' ) "vehicle" filter-by-oracle-itext ;
+: filter-vigilance ( seq -- seq' ) "vigilance" filter-by-oracle-itext ;
+: filter-ward ( seq -- seq' ) "ward" filter-by-oracle-itext ;
+
+: filter-day ( seq -- seq' ) "day" filter-by-oracle-itext ;
+: filter-night ( seq -- seq' ) "night" filter-by-oracle-itext ;
+: filter-daybound ( seq -- seq' ) "daybound" filter-by-oracle-itext ;
+: filter-nightbound ( seq -- seq' ) "nightbound" filter-by-oracle-itext ;
+
+: filter-cave ( seq -- seq' ) "cave" filter-land-subtype ;
+: filter-sphere ( seq -- seq' ) "sphere" filter-land-subtype ;
+
+: filter-mount ( seq -- seq' ) "mount" filter-by-oracle-itext ;
+: filter-outlaw ( seq -- seq' )
+ { "Assassin" "Mercenary" "Pirate" "Rogue" "Warlock" } filter-subtype-intersects ;
+: filter-plot ( seq -- seq' ) "plot" filter-by-oracle-itext ;
+: filter-saddle ( seq -- seq' ) "saddle" filter-by-oracle-itext ;
+: filter-spree ( seq -- seq' ) "saddle" filter-by-oracle-itext ;
+
+: power>n ( string -- n/f )
+ [ "*" = ] [ drop -1 ] [ string>number ] ?if ;
+
+: mtg< ( string/n/f n -- seq' ) [ power>n ] dip { [ and ] [ < ] } 2&& ;
+: mtg<= ( string/n/f n -- seq' ) [ power>n ] dip { [ and ] [ <= ] } 2&& ;
+: mtg> ( string/n/f n -- seq' ) [ power>n ] dip { [ and ] [ > ] } 2&& ;
+: mtg>= ( string/n/f n -- seq' ) [ power>n ] dip { [ and ] [ >= ] } 2&& ;
+: mtg= ( string/n/f n -- seq' ) [ power>n ] dip { [ and ] [ = ] } 2&& ;
+
+: filter-power=* ( seq -- seq' ) [ "power" of "*" = ] filter-card-faces-main-card ;
+: filter-toughness=* ( seq -- seq' ) [ "toughness" of "*" = ] filter-card-faces-main-card ;
+
+: filter-power= ( seq n -- seq' ) '[ "power" of _ mtg= ] filter-card-faces-main-card ;
+: filter-power< ( seq n -- seq' ) '[ "power" of _ mtg< ] filter-card-faces-main-card ;
+: filter-power> ( seq n -- seq' ) '[ "power" of _ mtg> ] filter-card-faces-main-card ;
+: filter-power<= ( seq n -- seq' ) '[ "power" of _ mtg<= ] filter-card-faces-main-card ;
+: filter-power>= ( seq n -- seq' ) '[ "power" of _ mtg>= ] filter-card-faces-main-card ;
+
+: filter-toughness= ( seq n -- seq' ) '[ "toughness" of _ mtg= ] filter-card-faces-main-card ;
+: filter-toughness< ( seq n -- seq' ) '[ "toughness" of _ mtg< ] filter-card-faces-main-card ;
+: filter-toughness> ( seq n -- seq' ) '[ "toughness" of _ mtg> ] filter-card-faces-main-card ;
+: filter-toughness<= ( seq n -- seq' ) '[ "toughness" of _ mtg<= ] filter-card-faces-main-card ;
+: filter-toughness>= ( seq n -- seq' ) '[ "toughness" of _ mtg>= ] filter-card-faces-main-card ;
: map-props ( seq props -- seq' ) '[ _ intersect-keys ] map ;
: images. ( seq -- ) [ <image-gadget> ] map gadgets. ;
+: normal-images-grid. ( seq -- )
+ 4 group
+ [ [ card>image-uris ] map concat download-normal-images images. ] each ;
+
: small-card. ( assoc -- )
card>image-uris download-small-images images. ;
card>image-uris download-normal-images images. ;
: normal-cards. ( seq -- ) [ normal-card. ] each ;
+: standard-cards. ( seq -- ) filter-standard normal-cards. ;
+: historic-cards. ( seq -- ) filter-historic normal-cards. ;
+: modern-cards. ( seq -- ) filter-modern normal-cards. ;
-: card-face-summary. ( seq -- )
+! rarity is only on main card `json` (if there are two faces)
+: card-face-summary. ( json seq -- )
{
- [ "name" of write bl ]
- [ "mana_cost" of ?print ]
- [ "type_line" of ?print ]
- [ [ "power" of ] [ "toughness" of ] bi 2dup and [ "/" glue print ] [ 2drop ] if ]
- [ "oracle_text" of ?print ]
- } cleave nl ;
+ [ nip "name" of write bl ]
+ [ nip "mana_cost" of ?print ]
+ [ nip "type_line" of ?write ]
+ [ drop bl "--" write bl "rarity" of >title ?print ]
+ [ nip [ "power" of ] [ "toughness" of ] bi 2dup and [ "/" glue print ] [ 2drop ] if ]
+ [ nip "oracle_text" of ?print ]
+ } 2cleave nl ;
-: card-face-summaries. ( seq -- ) [ card-face-summary. ] each ;
+: card-face-summaries. ( json seq -- ) [ card-face-summary. ] with each ;
: card-summary. ( assoc -- )
- {
- [
- [ "card_faces" of ]
- [ [ length number>string "Card Faces: " prepend print ] [ card-face-summaries. ] bi ]
- [ card-face-summary. ] ?if
- ]
- } cleave nl nl nl ;
+ dup
+ [ "card_faces" of ]
+ [ [ length number>string "Card Faces: " prepend print ] [ card-face-summaries. ] bi ]
+ [ card-face-summary. ] ?if nl nl nl ;
: card-summaries. ( seq -- ) [ card-summary. ] each ;
: standard-dragons. ( -- )
standard-cards
- "Dragon" filter-creature-type
+ "Dragon" filter-creature-subtype
sort-by-cmc
normal-cards. ;
: collect-by-cmc ( seq -- seq' ) [ "cmc" of ] collect-by ;
+
+MEMO: mtg-sets-by-abbrev ( -- assoc )
+ scryfall-all-cards-json
+ [ [ "set" of ] [ "set_name" of ] bi ] H{ } map>assoc ;
+
+MEMO: mtg-sets-by-name ( -- assoc )
+ scryfall-all-cards-json
+ [ [ "set_name" of ] [ "set" of ] bi ] H{ } map>assoc ;
+
+: filter-mtg-set ( seq abbrev -- seq ) '[ "set" of _ = ] filter ;
+
+: unique-set-names ( seq -- seq' ) [ "set_name" of ] map members ;
+: unique-set-abbrevs ( seq -- seq' ) [ "set" of ] map members ;
+
+: standard-set-names ( -- seq ) standard-cards unique-set-names ;
+: standard-set-abbrevs ( -- seq ) standard-cards unique-set-abbrevs ;
+
+
+: sets-by-release-date ( -- assoc )
+ scryfall-all-cards-json
+ [ [ "set_name" of ] [ "released_at" of ] bi ] H{ } map>assoc
+ sort-values ;
+
+: collect-cards-by-set-abbrev ( seq -- assoc ) [ "set" of ] collect-by ;
+: collect-cards-by-set-name ( seq -- assoc ) [ "set_name" of ] collect-by ;
+: cards-by-set-abbrev ( -- assoc ) mtg-oracle-cards collect-cards-by-set-abbrev ;
+: cards-by-set-name ( -- assoc ) mtg-oracle-cards collect-cards-by-set-name ;
+
+: filter-set ( seq abbrev -- seq ) >lower '[ "set" of _ = ] filter ;
+: filter-set-intersect ( seq abbrevs -- seq ) [ >lower ] map '[ "set" of _ member? ] filter ;
+
+: mid-cards ( -- seq ) mtg-oracle-cards "mid" filter-set ;
+: vow-cards ( -- seq ) mtg-oracle-cards "vow" filter-set ;
+: neo-cards ( -- seq ) mtg-oracle-cards "neo" filter-set ;
+: snc-cards ( -- seq ) mtg-oracle-cards "snc" filter-set ;
+: dmu-cards ( -- seq ) mtg-oracle-cards "dmu" filter-set ;
+: bro-cards ( -- seq ) mtg-oracle-cards "bro" filter-set ;
+: one-cards ( -- seq ) mtg-oracle-cards "one" filter-set ;
+: mom-cards ( -- seq ) mtg-oracle-cards "mom" filter-set ;
+: mat-cards ( -- seq ) mtg-oracle-cards "mat" filter-set ;
+: woe-cards ( -- seq ) mtg-oracle-cards "woe" filter-set ;
+: woe-cards-bonus ( -- seq ) mtg-oracle-cards [ "set" of "wot" = ] filter-set ;
+: woe-cards-all ( -- seq ) mtg-oracle-cards { "woe" "wot" } filter-set-intersect ;
+: lci-cards ( -- seq ) mtg-oracle-cards "lci" filter-set ;
+: mkm-cards ( -- seq ) mtg-oracle-cards "mkm" filter-set ;
+: otj-cards ( -- seq ) mtg-oracle-cards "otj" filter-set ;
+: otj-cards-bonus ( -- seq ) mtg-oracle-cards "big" filter-set ;
+: otj-cards-all ( -- seq ) mtg-oracle-cards { "otj" "big" } filter-set-intersect ;
+
+: sort-by-colors ( seq -- seq' )
+ {
+ { [ "color_identity" of length ] <=> }
+ { [ "color_identity" of sort ?first "A" or ] <=> }
+ { [ "cmc" of ] <=> }
+ { [ "mana_cost" of length ] <=> }
+ { [ "creature" any-type? -1 1 ? ] <=> }
+ { [ "power" of -1 1 ? ] <=> }
+ { [ "toughness" of -1 1 ? ] <=> }
+ { [ "name" of ] <=> }
+ } sort-with-spec ;
+
+: cards-by-color. ( seq -- ) sort-by-colors normal-cards. ;
+
+CONSTANT: rarity-to-number H{
+ { "common" 0 }
+ { "uncommon" 1 }
+ { "rare" 2 }
+ { "mythic" 3 }
+}
+
+: sort-by-rarity ( seq -- seq' )
+ {
+ { [ "rarity" of rarity-to-number at ] <=> }
+ { [ "color_identity" of length ] <=> }
+ { [ "color_identity" of sort ?first "A" or ] <=> }
+ { [ "cmc" of ] <=> }
+ { [ "mana_cost" of length ] <=> }
+ { [ "name" of ] <=> }
+ } sort-with-spec ;
+
+: cards-by-rarity. ( seq -- ) sort-by-rarity normal-cards. ;
+
+: sort-by-release ( seq -- seq' )
+ {
+ { [ "released_at" of ymd>timestamp ] <=> }
+ { [ "set" of ] <=> }
+ } sort-with-spec ;
+
+: cards-by-release. ( seq -- ) sort-by-release normal-cards. ;
+
+: sort-by-set-colors ( seq -- seq' )
+ {
+ { [ "released_at" of ymd>timestamp ] <=> }
+ { [ "set" of ] <=> }
+ { [ "color_identity" of length ] <=> }
+ { [ "color_identity" of sort ?first "A" or ] <=> }
+ { [ "cmc" of ] <=> }
+ { [ "mana_cost" of length ] <=> }
+ { [ "creature" any-type? -1 1 ? ] <=> }
+ { [ "power" of -1 1 ? ] <=> }
+ { [ "toughness" of -1 1 ? ] <=> }
+ { [ "name" of ] <=> }
+ } sort-with-spec ;
+
+: cards-by-set-colors. ( seq -- ) sort-by-set-colors normal-cards. ;
+
+: cards-by-name ( name -- seq' ) [ mtg-oracle-cards ] dip filter-by-name-itext sort-by-release ;
+: card-by-name ( name -- card )
+ [ mtg-oracle-cards ] dip >lower
+ [ '[ "name" of >lower _ = ] filter ?first ]
+ [ '[ "name" of >lower _ head? ] filter ?first ] 2bi or ;
+: cards-by-name. ( name -- ) cards-by-name normal-cards. ;
+: standard-cards-by-name. ( name -- ) cards-by-name standard-cards. ;
+: historic-cards-by-name. ( name -- ) cards-by-name historic-cards. ;
+: modern-cards-by-name. ( name -- ) cards-by-name modern-cards. ;
+
+: paren-set? ( string -- ? )
+ { [ "(" head? ] [ ")" tail? ] [ length 5 = ] } 1&& ;
+
+: remove-set-and-num ( string -- string' )
+ " " split
+ dup 2 ?lastn
+ [ paren-set? ] [ string>number ] bi* and [
+ 2 head*
+ ] when " " join ;
+
+: assoc>cards ( assoc -- seq )
+ [ card-by-name <array> ] { } assoc>map concat ;
+
+: parse-mtga-card-line ( string -- array )
+ [ blank? ] trim
+ " " split1
+ [ string>number ]
+ [ remove-set-and-num card-by-name ] bi* <array> ;
+
+: parse-mtga-cards ( strings -- seq )
+ [ parse-mtga-card-line ] map concat ;
+
+TUPLE: mtga-deck name deck sideboard section ;
+
+: <mtga-deck> ( -- mtga-deck )
+ mtga-deck new "Deck" >>section ;
+
+: <moxfield-deck> ( name deck sideboard -- deck )
+ mtga-deck new
+ swap >>sideboard
+ swap >>deck
+ swap >>name ;
+
+ERROR: unknown-mtga-deck-section section ;
+: parse-mtga-deck ( string -- mtga-deck )
+ string-lines [ [ blank? ] trim ] map harvest
+ { "About" "Deck" "Sideboard" } split*
+ [ <mtga-deck> ] dip
+ [
+ dup { "About" "Deck" "Sideboard" } intersects? [
+ first >>section
+ ] [
+ over section>> {
+ { "About" [ first "Name " ?head drop [ blank? ] trim >>name ] }
+ { "Deck" [ parse-mtga-cards >>deck ] }
+ { "Sideboard" [ parse-mtga-cards >>sideboard ] }
+ [
+ unknown-mtga-deck-section
+ ]
+ } case
+ ] if
+ ] each ;
+
+: sort-by-deck-order ( seq -- seq' )
+ [ "Land" any-type? not ] partition
+ [ sort-by-set-colors ] bi@ append ;
+
+: cards. ( seq -- ) sort-by-deck-order normal-cards. ;
+
+: sideboard. ( seq -- )
+ sideboard>> [ "Sideboard" print sort-by-deck-order normal-cards. ] when* ;
+
+GENERIC: deck. ( obj -- )
+
+M: string deck. parse-mtga-deck deck. ;
+
+M: mtga-deck deck. [ name>> ?print ] [ deck>> cards. ] bi ;
+
+M: sequence deck. cards. ;
+
+GENERIC: deck-and-sideboard. ( mtga-deck -- )
+
+M: string deck-and-sideboard. parse-mtga-deck deck-and-sideboard. ;
+
+M: mtga-deck deck-and-sideboard. [ deck. ] [ sideboard. ] bi ;
+
+M: sequence deck-and-sideboard. deck. ;
+
+: filter-mtg-cheat-sheet ( seq -- seq' )
+ [
+ {
+ [ filter-instant ]
+ [ filter-flash ]
+ [ filter-cycling ]
+ [ filter-disguise ]
+ [ filter-madness ]
+ } cleave
+ ] { } append-outputs-as sort-by-colors ;
+
+: mtg-cheat-sheet. ( seq -- ) filter-mtg-cheat-sheet normal-cards. ;
+: mtg-cheat-sheet-text. ( seq -- ) filter-mtg-cheat-sheet card-summaries. ;
+
+MEMO: get-moxfield-user ( username -- json )
+ "https://api2.moxfield.com/v2/users/%s/decks?pageNumber=1&pageSize=100" sprintf http-get-json nip ;
+
+MEMO: get-moxfield-deck ( public-id -- json )
+ "https://api2.moxfield.com/v3/decks/all/" prepend http-get-json nip ;
+
+: moxfield-board>cards ( board -- seq )
+ "cards" of values [
+ [ "quantity" of ] [ "card" of "name" of ] bi 2array
+ ] map assoc>cards ;
+
+: json>moxfield-deck ( json -- mtga-deck )
+ [ "name" of ]
+ [
+ "boards" of
+ [ "mainboard" of moxfield-board>cards ]
+ [ "sideboard" of moxfield-board>cards ] bi
+ ] bi
+ <moxfield-deck> ;
+
+: moxfield-decks-for-username ( username -- json )
+ get-moxfield-user "data" of ;
+
+: moxfield-random-deck-for-username ( username -- json )
+ moxfield-decks-for-username
+ random "publicId" of get-moxfield-deck
+ json>moxfield-deck ;
+
+: moxfield-latest-deck-for-username ( username -- json )
+ get-moxfield-user
+ "data" of ?first "publicId" of get-moxfield-deck
+ json>moxfield-deck ;
+
+: moxfield-latest-deck-for-username. ( username -- )
+ moxfield-latest-deck-for-username deck. ;
+
+: moxfield-latest-deck-and-sideboard-for-username. ( username -- )
+ moxfield-latest-deck-for-username deck-and-sideboard. ;
{ "34_01_" } [ 2 0 3 "01_34_" [ exchange-subseq ] keep ] unit-test
{ "cdebaf" } [ 3 0 2 "abcdef" [ exchange-subseq ] keep ] unit-test
+
+{ { } } [ { } sequence-cartesian-product ] unit-test
+{ { } } [ { { } } sequence-cartesian-product ] unit-test
+{ { } } [ { { 1 2 } { } } sequence-cartesian-product ] unit-test
+{ { { 1 } { 2 } } } [ { { 1 2 } } sequence-cartesian-product ] unit-test
+
+{
+ {
+ { 1 3 5 6 { 9 } }
+ { 1 3 5 7 { 9 } }
+ { 1 4 5 6 { 9 } }
+ { 1 4 5 7 { 9 } }
+ { 2 3 5 6 { 9 } }
+ { 2 3 5 7 { 9 } }
+ { 2 4 5 6 { 9 } }
+ { 2 4 5 7 { 9 } }
+ }
+} [
+ { { 1 2 } { 3 4 } { 5 } { 6 7 } { { 9 } } } sequence-cartesian-product
+] unit-test
[ find drop ] keepd swap
[ cut ] [ f over like ] if* ; inline
+: ?cut ( seq n -- before after ) [ index-or-length head ] [ index-or-length tail ] 2bi ;
+
: nth* ( n seq -- elt )
[ length 1 - swap - ] [ nth ] bi ; inline
2dup _ exchange-unsafe
[ 1 - ] [ 1 + ] [ 1 + ] tri*
] [ pick 0 > ] swap while 3drop ;
+
+: sequence-cartesian-product ( seqs -- seqs' )
+ dup length 1 <= [
+ [ [ 1array ] map ] map concat
+ ] [
+ 2 cut [ first2 cartesian-product concat ] dip swap
+ [ [ suffix ] cartesian-map concat ] reduce
+ ] if ;
MEMO: default-dictionary ( -- counts )
URL" https://norvig.com/big.txt" "big.txt" temp-file
- download-once-to load-dictionary ;
+ download-once-as load-dictionary ;
: (correct) ( word dictionary -- word/f )
corrections ?first ;
: download-tldr ( -- )
"tldr" cache-file dup make-directory [
- tldr-zip "tldr.zip" download-once-to drop
+ tldr-zip "tldr.zip" download-once-as drop
{ "unzip" "tldr.zip" } try-process
] with-directory ;
"At Tracy's." >>body
send-email
----
-USING: io.files io.encodings.utf8 kernel
-sequences splitting ;
+USING: io.encodings.utf8 io.files sequences
+splitting ;
"table.txt" utf8 [
- file-lines
[ "|" split ] map flip [ "|" join ] map
-] 2keep
-set-file-lines
+] change-file-lines
----
USING: sequences xml.syntax xml.writer ;
video-formats [ "type" of "video/mp4" head? ] find nip
video-download-url
] [
- "title" of sanitize ".mp4" append download-once-to
+ "title" of sanitize ".mp4" append download-once-as
] tri ;
[ download-boot-checksum-branch ] if ;
: download-boot-image ( path url -- )
- '[ _ my-arch-name "boot.%s.image" sprintf download-to drop ] with-directory ;
+ '[ _ my-arch-name "boot.%s.image" sprintf download-as drop ] with-directory ;
: arch-git-boot-image-path ( arch git-id -- str )
"https://downloads.factorcode.org/images/build/boot.%s.image.%s" sprintf ;