]> gitweb.factorcode.org Git - factor.git/commitdiff
Rename io.launcher.unix.parser to simple-tokenizer since ftp.server uses it
authorSlava Pestov <slava@slava-pestovs-macbook-pro.local>
Sun, 21 Feb 2010 04:00:48 +0000 (17:00 +1300)
committerSlava Pestov <slava@slava-pestovs-macbook-pro.local>
Sun, 21 Feb 2010 04:48:04 +0000 (17:48 +1300)
basis/ftp/server/server.factor
basis/io/launcher/unix/parser/parser-tests.factor [deleted file]
basis/io/launcher/unix/parser/parser.factor [deleted file]
basis/io/launcher/unix/parser/platforms.txt [deleted file]
basis/io/launcher/unix/unix.factor
basis/simple-tokenizer/authors.txt [new file with mode: 0644]
basis/simple-tokenizer/simple-tokenizer-docs.factor [new file with mode: 0644]
basis/simple-tokenizer/simple-tokenizer-tests.factor [new file with mode: 0644]
basis/simple-tokenizer/simple-tokenizer.factor [new file with mode: 0644]
basis/simple-tokenizer/tags.txt [new file with mode: 0644]

index 1077aebf079f954bcf61cc794b9c6f61db6bc683..f1bc8adef996aff83726defec4233bec8950e1d0 100644 (file)
@@ -1,15 +1,14 @@
 ! Copyright (C) 2008 Doug Coleman.
 ! See http://factorcode.org/license.txt for BSD license.
-USING: accessors assocs byte-arrays calendar classes
-combinators combinators.short-circuit concurrency.promises
-continuations destructors ftp io io.backend io.directories
-io.encodings io.encodings.binary
-tools.files io.encodings.utf8 io.files io.files.info
-io.pathnames io.launcher.unix.parser io.servers.connection
-io.sockets io.streams.duplex io.streams.string io.timeouts
-kernel make math math.bitwise math.parser namespaces sequences
-splitting threads unicode.case logging calendar.format
-strings io.files.links io.files.types io.encodings.8-bit.latin1 ;
+USING: accessors assocs byte-arrays calendar classes combinators
+combinators.short-circuit concurrency.promises continuations
+destructors ftp io io.backend io.directories io.encodings
+io.encodings.binary tools.files io.encodings.utf8 io.files
+io.files.info io.pathnames io.servers.connection io.sockets
+io.streams.duplex io.streams.string io.timeouts kernel make math
+math.bitwise math.parser namespaces sequences splitting threads
+unicode.case logging calendar.format strings io.files.links
+io.files.types io.encodings.8-bit.latin1 simple-tokenizer ;
 IN: ftp.server
 
 SYMBOL: server
@@ -24,7 +23,7 @@ TUPLE: ftp-command raw tokenized ;
     dup \ <ftp-command> DEBUG log-message
     ftp-command new
         over >>raw
-        swap tokenize-command >>tokenized ;
+        swap tokenize >>tokenized ;
 
 TUPLE: ftp-get path ;
 : <ftp-get> ( path -- obj )
diff --git a/basis/io/launcher/unix/parser/parser-tests.factor b/basis/io/launcher/unix/parser/parser-tests.factor
deleted file mode 100644 (file)
index 90504cc..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-IN: io.launcher.unix.parser.tests
-USING: io.launcher.unix.parser tools.test ;
-
-[ "" tokenize-command ] must-fail
-[ "   " tokenize-command ] must-fail
-[ V{ "a" } ] [ "a" tokenize-command ] unit-test
-[ V{ "abc" } ] [ "abc" tokenize-command ] unit-test
-[ V{ "abc" } ] [ "abc   " tokenize-command ] unit-test
-[ V{ "abc" } ] [ "   abc" tokenize-command ] unit-test
-[ V{ "abc" "def" } ] [ "abc def" tokenize-command ] unit-test
-[ V{ "abc def" } ] [ "abc\\ def" tokenize-command ] unit-test
-[ V{ "abc\\" "def" } ] [ "abc\\\\ def" tokenize-command ] unit-test
-[ V{ "abc\\ def" } ] [ "\"abc\\\\ def\"" tokenize-command ] unit-test
-[ V{ "abc\\ def" } ] [ "  \"abc\\\\ def\"" tokenize-command ] unit-test
-[ V{ "abc\\ def" "hey" } ] [ "\"abc\\\\ def\" hey" tokenize-command ] unit-test
-[ V{ "abc def" "hey" } ] [ "\"abc def\" \"hey\"" tokenize-command ] unit-test
-[ "\"abc def\" \"hey" tokenize-command ] must-fail
-[ "\"abc def" tokenize-command ] must-fail
-[ V{ "abc def" "h\"ey" } ] [ "\"abc def\" \"h\\\"ey\"  " tokenize-command ] unit-test
-
-[
-    V{
-        "Hello world.app/Contents/MacOS/hello-ui"
-        "-i=boot.macosx-ppc.image"
-        "-include= math compiler ui"
-        "-deploy-vocab=hello-ui"
-        "-output-image=Hello world.app/Contents/Resources/hello-ui.image"
-        "-no-stack-traces"
-        "-no-user-init"
-    }
-] [
-    "\"Hello world.app/Contents/MacOS/hello-ui\" -i=boot.macosx-ppc.image \"-include= math compiler ui\" -deploy-vocab=hello-ui \"-output-image=Hello world.app/Contents/Resources/hello-ui.image\" -no-stack-traces -no-user-init" tokenize-command
-] unit-test
diff --git a/basis/io/launcher/unix/parser/parser.factor b/basis/io/launcher/unix/parser/parser.factor
deleted file mode 100644 (file)
index bcc5f96..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-! Copyright (C) 2008 Slava Pestov
-! See http://factorcode.org/license.txt for BSD license.
-USING: peg peg.ebnf arrays sequences strings kernel ;
-IN: io.launcher.unix.parser
-
-! Our command line parser. Supported syntax:
-! foo bar baz -- simple tokens
-! foo\ bar -- escaping the space
-! "foo bar" -- quotation
-EBNF: tokenize-command
-space = " "
-escaped-char = "\" .:ch => [[ ch ]]
-quoted = '"' (escaped-char | [^"])*:a '"' => [[ a ]]
-unquoted = (escaped-char | [^ "])+
-argument = (quoted | unquoted) => [[ >string ]]
-command = space* (argument:a space* => [[ a ]])+:c !(.) => [[ c ]]
-;EBNF
diff --git a/basis/io/launcher/unix/parser/platforms.txt b/basis/io/launcher/unix/parser/platforms.txt
deleted file mode 100644 (file)
index 509143d..0000000
+++ /dev/null
@@ -1 +0,0 @@
-unix
index d8b55d3d1710d73b5026f3d614e0a1bbe6cc8fb8..aaaccd4719e1d9b92a32324763a33da2459accdc 100644 (file)
@@ -1,15 +1,14 @@
-! Copyright (C) 2007, 2008 Slava Pestov.
+! Copyright (C) 2007, 2010 Slava Pestov.
 ! See http://factorcode.org/license.txt for BSD license.
 USING: accessors alien.c-types arrays assocs combinators
 continuations environment io io.backend io.backend.unix
-io.files io.files.private io.files.unix io.launcher
-io.launcher.unix.parser io.pathnames io.ports kernel math
-namespaces sequences strings system threads unix
-unix.process unix.ffi ;
+io.files io.files.private io.files.unix io.launcher io.pathnames
+io.ports kernel math namespaces sequences strings system threads
+unix unix.process unix.ffi simple-tokenizer ;
 IN: io.launcher.unix
 
 : get-arguments ( process -- seq )
-    command>> dup string? [ tokenize-command ] when ;
+    command>> dup string? [ tokenize ] when ;
 
 : assoc>env ( assoc -- env )
     [ "=" glue ] { } assoc>map ;
diff --git a/basis/simple-tokenizer/authors.txt b/basis/simple-tokenizer/authors.txt
new file mode 100644 (file)
index 0000000..1901f27
--- /dev/null
@@ -0,0 +1 @@
+Slava Pestov
diff --git a/basis/simple-tokenizer/simple-tokenizer-docs.factor b/basis/simple-tokenizer/simple-tokenizer-docs.factor
new file mode 100644 (file)
index 0000000..57e14f0
--- /dev/null
@@ -0,0 +1,13 @@
+USING: help.markup help.syntax strings ;
+IN: simple-tokenizer
+
+HELP: tokenize
+{ $values { "input" string } { "ast" "a sequence of strings" } }
+{ $description
+    "Tokenize a string. Supported syntax:"
+    { $list
+        { { $snippet "foo bar baz" } " - simple tokens" }
+        { { $snippet "foo\\ bar" } " - token with an escaped space"}
+        { { $snippet "\"foo bar\"" } " - quoted token" }
+    }
+} ;
diff --git a/basis/simple-tokenizer/simple-tokenizer-tests.factor b/basis/simple-tokenizer/simple-tokenizer-tests.factor
new file mode 100644 (file)
index 0000000..3b44f03
--- /dev/null
@@ -0,0 +1,33 @@
+IN: simple-tokenizer.tests
+USING: simple-tokenizer tools.test ;
+
+[ "" tokenize ] must-fail
+[ "   " tokenize ] must-fail
+[ V{ "a" } ] [ "a" tokenize ] unit-test
+[ V{ "abc" } ] [ "abc" tokenize ] unit-test
+[ V{ "abc" } ] [ "abc   " tokenize ] unit-test
+[ V{ "abc" } ] [ "   abc" tokenize ] unit-test
+[ V{ "abc" "def" } ] [ "abc def" tokenize ] unit-test
+[ V{ "abc def" } ] [ "abc\\ def" tokenize ] unit-test
+[ V{ "abc\\" "def" } ] [ "abc\\\\ def" tokenize ] unit-test
+[ V{ "abc\\ def" } ] [ "\"abc\\\\ def\"" tokenize ] unit-test
+[ V{ "abc\\ def" } ] [ "  \"abc\\\\ def\"" tokenize ] unit-test
+[ V{ "abc\\ def" "hey" } ] [ "\"abc\\\\ def\" hey" tokenize ] unit-test
+[ V{ "abc def" "hey" } ] [ "\"abc def\" \"hey\"" tokenize ] unit-test
+[ "\"abc def\" \"hey" tokenize ] must-fail
+[ "\"abc def" tokenize ] must-fail
+[ V{ "abc def" "h\"ey" } ] [ "\"abc def\" \"h\\\"ey\"  " tokenize ] unit-test
+
+[
+    V{
+        "Hello world.app/Contents/MacOS/hello-ui"
+        "-i=boot.macosx-ppc.image"
+        "-include= math compiler ui"
+        "-deploy-vocab=hello-ui"
+        "-output-image=Hello world.app/Contents/Resources/hello-ui.image"
+        "-no-stack-traces"
+        "-no-user-init"
+    }
+] [
+    "\"Hello world.app/Contents/MacOS/hello-ui\" -i=boot.macosx-ppc.image \"-include= math compiler ui\" -deploy-vocab=hello-ui \"-output-image=Hello world.app/Contents/Resources/hello-ui.image\" -no-stack-traces -no-user-init" tokenize
+] unit-test
diff --git a/basis/simple-tokenizer/simple-tokenizer.factor b/basis/simple-tokenizer/simple-tokenizer.factor
new file mode 100644 (file)
index 0000000..f6698a6
--- /dev/null
@@ -0,0 +1,13 @@
+! Copyright (C) 2008, 2010 Slava Pestov
+! See http://factorcode.org/license.txt for BSD license.
+USING: peg peg.ebnf arrays sequences strings kernel ;
+IN: simple-tokenizer
+
+EBNF: tokenize
+space = " "
+escaped-char = "\" .:ch => [[ ch ]]
+quoted = '"' (escaped-char | [^"])*:a '"' => [[ a ]]
+unquoted = (escaped-char | [^ "])+
+argument = (quoted | unquoted) => [[ >string ]]
+command = space* (argument:a space* => [[ a ]])+:c !(.) => [[ c ]]
+;EBNF
diff --git a/basis/simple-tokenizer/tags.txt b/basis/simple-tokenizer/tags.txt
new file mode 100644 (file)
index 0000000..8e27be7
--- /dev/null
@@ -0,0 +1 @@
+text