From eab105590be0f1c7b59ae260198eec88e8219796 Mon Sep 17 00:00:00 2001 From: Slava Pestov Date: Sun, 21 Feb 2010 17:00:48 +1300 Subject: [PATCH] Rename io.launcher.unix.parser to simple-tokenizer since ftp.server uses it --- basis/ftp/server/server.factor | 21 ++++++------ .../launcher/unix/parser/parser-tests.factor | 33 ------------------- basis/io/launcher/unix/parser/platforms.txt | 1 - basis/io/launcher/unix/unix.factor | 11 +++---- basis/simple-tokenizer/authors.txt | 1 + .../simple-tokenizer-docs.factor | 13 ++++++++ .../simple-tokenizer-tests.factor | 33 +++++++++++++++++++ .../simple-tokenizer.factor} | 10 ++---- basis/simple-tokenizer/tags.txt | 1 + 9 files changed, 66 insertions(+), 58 deletions(-) delete mode 100644 basis/io/launcher/unix/parser/parser-tests.factor delete mode 100644 basis/io/launcher/unix/parser/platforms.txt create mode 100644 basis/simple-tokenizer/authors.txt create mode 100644 basis/simple-tokenizer/simple-tokenizer-docs.factor create mode 100644 basis/simple-tokenizer/simple-tokenizer-tests.factor rename basis/{io/launcher/unix/parser/parser.factor => simple-tokenizer/simple-tokenizer.factor} (62%) create mode 100644 basis/simple-tokenizer/tags.txt diff --git a/basis/ftp/server/server.factor b/basis/ftp/server/server.factor index 1077aebf07..f1bc8adef9 100644 --- a/basis/ftp/server/server.factor +++ b/basis/ftp/server/server.factor @@ -1,15 +1,14 @@ ! Copyright (C) 2008 Doug Coleman. ! See http://factorcode.org/license.txt for BSD license. -USING: accessors assocs byte-arrays calendar classes -combinators combinators.short-circuit concurrency.promises -continuations destructors ftp io io.backend io.directories -io.encodings io.encodings.binary -tools.files io.encodings.utf8 io.files io.files.info -io.pathnames io.launcher.unix.parser io.servers.connection -io.sockets io.streams.duplex io.streams.string io.timeouts -kernel make math math.bitwise math.parser namespaces sequences -splitting threads unicode.case logging calendar.format -strings io.files.links io.files.types io.encodings.8-bit.latin1 ; +USING: accessors assocs byte-arrays calendar classes combinators +combinators.short-circuit concurrency.promises continuations +destructors ftp io io.backend io.directories io.encodings +io.encodings.binary tools.files io.encodings.utf8 io.files +io.files.info io.pathnames io.servers.connection io.sockets +io.streams.duplex io.streams.string io.timeouts kernel make math +math.bitwise math.parser namespaces sequences splitting threads +unicode.case logging calendar.format strings io.files.links +io.files.types io.encodings.8-bit.latin1 simple-tokenizer ; IN: ftp.server SYMBOL: server @@ -24,7 +23,7 @@ TUPLE: ftp-command raw tokenized ; dup \ DEBUG log-message ftp-command new over >>raw - swap tokenize-command >>tokenized ; + swap tokenize >>tokenized ; TUPLE: ftp-get path ; : ( path -- obj ) diff --git a/basis/io/launcher/unix/parser/parser-tests.factor b/basis/io/launcher/unix/parser/parser-tests.factor deleted file mode 100644 index 90504ccac2..0000000000 --- a/basis/io/launcher/unix/parser/parser-tests.factor +++ /dev/null @@ -1,33 +0,0 @@ -IN: io.launcher.unix.parser.tests -USING: io.launcher.unix.parser tools.test ; - -[ "" tokenize-command ] must-fail -[ " " tokenize-command ] must-fail -[ V{ "a" } ] [ "a" tokenize-command ] unit-test -[ V{ "abc" } ] [ "abc" tokenize-command ] unit-test -[ V{ "abc" } ] [ "abc " tokenize-command ] unit-test -[ V{ "abc" } ] [ " abc" tokenize-command ] unit-test -[ V{ "abc" "def" } ] [ "abc def" tokenize-command ] unit-test -[ V{ "abc def" } ] [ "abc\\ def" tokenize-command ] unit-test -[ V{ "abc\\" "def" } ] [ "abc\\\\ def" tokenize-command ] unit-test -[ V{ "abc\\ def" } ] [ "\"abc\\\\ def\"" tokenize-command ] unit-test -[ V{ "abc\\ def" } ] [ " \"abc\\\\ def\"" tokenize-command ] unit-test -[ V{ "abc\\ def" "hey" } ] [ "\"abc\\\\ def\" hey" tokenize-command ] unit-test -[ V{ "abc def" "hey" } ] [ "\"abc def\" \"hey\"" tokenize-command ] unit-test -[ "\"abc def\" \"hey" tokenize-command ] must-fail -[ "\"abc def" tokenize-command ] must-fail -[ V{ "abc def" "h\"ey" } ] [ "\"abc def\" \"h\\\"ey\" " tokenize-command ] unit-test - -[ - V{ - "Hello world.app/Contents/MacOS/hello-ui" - "-i=boot.macosx-ppc.image" - "-include= math compiler ui" - "-deploy-vocab=hello-ui" - "-output-image=Hello world.app/Contents/Resources/hello-ui.image" - "-no-stack-traces" - "-no-user-init" - } -] [ - "\"Hello world.app/Contents/MacOS/hello-ui\" -i=boot.macosx-ppc.image \"-include= math compiler ui\" -deploy-vocab=hello-ui \"-output-image=Hello world.app/Contents/Resources/hello-ui.image\" -no-stack-traces -no-user-init" tokenize-command -] unit-test diff --git a/basis/io/launcher/unix/parser/platforms.txt b/basis/io/launcher/unix/parser/platforms.txt deleted file mode 100644 index 509143d863..0000000000 --- a/basis/io/launcher/unix/parser/platforms.txt +++ /dev/null @@ -1 +0,0 @@ -unix diff --git a/basis/io/launcher/unix/unix.factor b/basis/io/launcher/unix/unix.factor index d8b55d3d17..aaaccd4719 100644 --- a/basis/io/launcher/unix/unix.factor +++ b/basis/io/launcher/unix/unix.factor @@ -1,15 +1,14 @@ -! Copyright (C) 2007, 2008 Slava Pestov. +! Copyright (C) 2007, 2010 Slava Pestov. ! See http://factorcode.org/license.txt for BSD license. USING: accessors alien.c-types arrays assocs combinators continuations environment io io.backend io.backend.unix -io.files io.files.private io.files.unix io.launcher -io.launcher.unix.parser io.pathnames io.ports kernel math -namespaces sequences strings system threads unix -unix.process unix.ffi ; +io.files io.files.private io.files.unix io.launcher io.pathnames +io.ports kernel math namespaces sequences strings system threads +unix unix.process unix.ffi simple-tokenizer ; IN: io.launcher.unix : get-arguments ( process -- seq ) - command>> dup string? [ tokenize-command ] when ; + command>> dup string? [ tokenize ] when ; : assoc>env ( assoc -- env ) [ "=" glue ] { } assoc>map ; diff --git a/basis/simple-tokenizer/authors.txt b/basis/simple-tokenizer/authors.txt new file mode 100644 index 0000000000..1901f27a24 --- /dev/null +++ b/basis/simple-tokenizer/authors.txt @@ -0,0 +1 @@ +Slava Pestov diff --git a/basis/simple-tokenizer/simple-tokenizer-docs.factor b/basis/simple-tokenizer/simple-tokenizer-docs.factor new file mode 100644 index 0000000000..57e14f09ba --- /dev/null +++ b/basis/simple-tokenizer/simple-tokenizer-docs.factor @@ -0,0 +1,13 @@ +USING: help.markup help.syntax strings ; +IN: simple-tokenizer + +HELP: tokenize +{ $values { "input" string } { "ast" "a sequence of strings" } } +{ $description + "Tokenize a string. Supported syntax:" + { $list + { { $snippet "foo bar baz" } " - simple tokens" } + { { $snippet "foo\\ bar" } " - token with an escaped space"} + { { $snippet "\"foo bar\"" } " - quoted token" } + } +} ; diff --git a/basis/simple-tokenizer/simple-tokenizer-tests.factor b/basis/simple-tokenizer/simple-tokenizer-tests.factor new file mode 100644 index 0000000000..3b44f03650 --- /dev/null +++ b/basis/simple-tokenizer/simple-tokenizer-tests.factor @@ -0,0 +1,33 @@ +IN: simple-tokenizer.tests +USING: simple-tokenizer tools.test ; + +[ "" tokenize ] must-fail +[ " " tokenize ] must-fail +[ V{ "a" } ] [ "a" tokenize ] unit-test +[ V{ "abc" } ] [ "abc" tokenize ] unit-test +[ V{ "abc" } ] [ "abc " tokenize ] unit-test +[ V{ "abc" } ] [ " abc" tokenize ] unit-test +[ V{ "abc" "def" } ] [ "abc def" tokenize ] unit-test +[ V{ "abc def" } ] [ "abc\\ def" tokenize ] unit-test +[ V{ "abc\\" "def" } ] [ "abc\\\\ def" tokenize ] unit-test +[ V{ "abc\\ def" } ] [ "\"abc\\\\ def\"" tokenize ] unit-test +[ V{ "abc\\ def" } ] [ " \"abc\\\\ def\"" tokenize ] unit-test +[ V{ "abc\\ def" "hey" } ] [ "\"abc\\\\ def\" hey" tokenize ] unit-test +[ V{ "abc def" "hey" } ] [ "\"abc def\" \"hey\"" tokenize ] unit-test +[ "\"abc def\" \"hey" tokenize ] must-fail +[ "\"abc def" tokenize ] must-fail +[ V{ "abc def" "h\"ey" } ] [ "\"abc def\" \"h\\\"ey\" " tokenize ] unit-test + +[ + V{ + "Hello world.app/Contents/MacOS/hello-ui" + "-i=boot.macosx-ppc.image" + "-include= math compiler ui" + "-deploy-vocab=hello-ui" + "-output-image=Hello world.app/Contents/Resources/hello-ui.image" + "-no-stack-traces" + "-no-user-init" + } +] [ + "\"Hello world.app/Contents/MacOS/hello-ui\" -i=boot.macosx-ppc.image \"-include= math compiler ui\" -deploy-vocab=hello-ui \"-output-image=Hello world.app/Contents/Resources/hello-ui.image\" -no-stack-traces -no-user-init" tokenize +] unit-test diff --git a/basis/io/launcher/unix/parser/parser.factor b/basis/simple-tokenizer/simple-tokenizer.factor similarity index 62% rename from basis/io/launcher/unix/parser/parser.factor rename to basis/simple-tokenizer/simple-tokenizer.factor index bcc5f965e9..f6698a65f0 100644 --- a/basis/io/launcher/unix/parser/parser.factor +++ b/basis/simple-tokenizer/simple-tokenizer.factor @@ -1,13 +1,9 @@ -! Copyright (C) 2008 Slava Pestov +! Copyright (C) 2008, 2010 Slava Pestov ! See http://factorcode.org/license.txt for BSD license. USING: peg peg.ebnf arrays sequences strings kernel ; -IN: io.launcher.unix.parser +IN: simple-tokenizer -! Our command line parser. Supported syntax: -! foo bar baz -- simple tokens -! foo\ bar -- escaping the space -! "foo bar" -- quotation -EBNF: tokenize-command +EBNF: tokenize space = " " escaped-char = "\" .:ch => [[ ch ]] quoted = '"' (escaped-char | [^"])*:a '"' => [[ a ]] diff --git a/basis/simple-tokenizer/tags.txt b/basis/simple-tokenizer/tags.txt new file mode 100644 index 0000000000..8e27be7d61 --- /dev/null +++ b/basis/simple-tokenizer/tags.txt @@ -0,0 +1 @@ +text