peg.ebnf: Clean up EBNF: syntax.
The syntax for an EBNF parser is now ``EBNF: identifier string``. The inline syntax went from [EBNF ... EBNF] to EBNF[[ ]], EBNF[=[ ]=], etc. <EBNF looked for a main word, but this functionality is just EBNF[[ ]] now.modern-harvey2
parent
de1cdfec52
commit
15b13870ae
|
@ -1,15 +1,15 @@
|
||||||
USING: peg.ebnf strings tools.test ;
|
USING: peg.ebnf strings tools.test multiline ;
|
||||||
IN: compiler.tests.peg-regression-2
|
IN: compiler.tests.peg-regression-2
|
||||||
|
|
||||||
GENERIC: <times> ( times -- term' )
|
GENERIC: <times> ( times -- term' )
|
||||||
M: string <times> ;
|
M: string <times> ;
|
||||||
|
|
||||||
EBNF: parse-regexp
|
EBNF: parse-regexp [=[
|
||||||
|
|
||||||
Times = .* => [[ "foo" ]]
|
Times = .* => [[ "foo" ]]
|
||||||
|
|
||||||
Regexp = Times:t => [[ t <times> ]]
|
Regexp = Times:t => [[ t <times> ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
[ "foo" ] [ "a" parse-regexp ] unit-test
|
[ "foo" ] [ "a" parse-regexp ] unit-test
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
! optimization, which would batch generic word updates at the
|
! optimization, which would batch generic word updates at the
|
||||||
! end of a compilation unit.
|
! end of a compilation unit.
|
||||||
|
|
||||||
USING: kernel accessors peg.ebnf words ;
|
USING: kernel accessors peg.ebnf words multiline ;
|
||||||
IN: compiler.tests.peg-regression
|
IN: compiler.tests.peg-regression
|
||||||
|
|
||||||
TUPLE: pipeline-expr background ;
|
TUPLE: pipeline-expr background ;
|
||||||
|
@ -16,9 +16,9 @@ M: pipeline-expr blah ;
|
||||||
: ast>pipeline-expr ( -- obj )
|
: ast>pipeline-expr ( -- obj )
|
||||||
pipeline-expr new blah ;
|
pipeline-expr new blah ;
|
||||||
|
|
||||||
EBNF: expr
|
EBNF: expr [=[
|
||||||
pipeline = "hello" => [[ ast>pipeline-expr ]]
|
pipeline = "hello" => [[ ast>pipeline-expr ]]
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
USE: tools.test
|
USE: tools.test
|
||||||
|
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: kernel db.errors peg.ebnf strings sequences math
|
USING: kernel db.errors peg.ebnf strings sequences math
|
||||||
combinators.short-circuit accessors math.parser quoting
|
combinators.short-circuit accessors math.parser quoting
|
||||||
locals ;
|
locals multiline ;
|
||||||
IN: db.postgresql.errors
|
IN: db.postgresql.errors
|
||||||
|
|
||||||
EBNF: parse-postgresql-sql-error
|
EBNF: parse-postgresql-sql-error [=[
|
||||||
|
|
||||||
Error = "ERROR:" [ ]+
|
Error = "ERROR:" [ ]+
|
||||||
|
|
||||||
|
@ -35,18 +35,18 @@ UnknownError = .* => [[ >string <sql-unknown-error> ]]
|
||||||
|
|
||||||
PostgresqlSqlError = (TableError | DatabaseError | FunctionError | SyntaxError | UnknownError)
|
PostgresqlSqlError = (TableError | DatabaseError | FunctionError | SyntaxError | UnknownError)
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
|
|
||||||
TUPLE: parse-postgresql-location column line text ;
|
TUPLE: parse-postgresql-location column line text ;
|
||||||
C: <parse-postgresql-location> parse-postgresql-location
|
C: <parse-postgresql-location> parse-postgresql-location
|
||||||
|
|
||||||
EBNF: parse-postgresql-line-error
|
EBNF: parse-postgresql-line-error [=[
|
||||||
|
|
||||||
Line = "LINE " [0-9]+:line ": " .+:sql
|
Line = "LINE " [0-9]+:line ": " .+:sql
|
||||||
=> [[ f line >string string>number sql >string <parse-postgresql-location> ]]
|
=> [[ f line >string string>number sql >string <parse-postgresql-location> ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
:: set-caret-position ( error caret-line -- error )
|
:: set-caret-position ( error caret-line -- error )
|
||||||
caret-line length
|
caret-line length
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
! Copyright (C) 2009 Doug Coleman.
|
! Copyright (C) 2009 Doug Coleman.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: accessors combinators db kernel sequences peg.ebnf
|
USING: accessors combinators db kernel sequences peg.ebnf
|
||||||
strings db.errors ;
|
strings db.errors multiline ;
|
||||||
IN: db.sqlite.errors
|
IN: db.sqlite.errors
|
||||||
|
|
||||||
TUPLE: unparsed-sqlite-error error ;
|
TUPLE: unparsed-sqlite-error error ;
|
||||||
C: <unparsed-sqlite-error> unparsed-sqlite-error
|
C: <unparsed-sqlite-error> unparsed-sqlite-error
|
||||||
|
|
||||||
EBNF: parse-sqlite-sql-error
|
EBNF: parse-sqlite-sql-error [=[
|
||||||
|
|
||||||
AlreadyExists = " already exists"
|
AlreadyExists = " already exists"
|
||||||
|
|
||||||
|
@ -20,4 +20,4 @@ SqliteError =
|
||||||
=> [[ table >string <sql-table-missing> ]]
|
=> [[ table >string <sql-table-missing> ]]
|
||||||
| .*:error
|
| .*:error
|
||||||
=> [[ error >string <unparsed-sqlite-error> ]]
|
=> [[ error >string <unparsed-sqlite-error> ]]
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -5,7 +5,7 @@ combinators.smart fry generalizations io io.streams.string
|
||||||
kernel macros math math.functions math.parser namespaces
|
kernel macros math math.functions math.parser namespaces
|
||||||
peg.ebnf present prettyprint quotations sequences
|
peg.ebnf present prettyprint quotations sequences
|
||||||
sequences.generalizations strings unicode vectors
|
sequences.generalizations strings unicode vectors
|
||||||
math.functions.integer-logs splitting ;
|
math.functions.integer-logs splitting multiline ;
|
||||||
FROM: math.parser.private => format-float ;
|
FROM: math.parser.private => format-float ;
|
||||||
IN: formatting
|
IN: formatting
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ IN: formatting
|
||||||
|
|
||||||
ERROR: unknown-printf-directive ;
|
ERROR: unknown-printf-directive ;
|
||||||
|
|
||||||
EBNF: parse-printf
|
EBNF: parse-printf [=[
|
||||||
|
|
||||||
zero = "0" => [[ CHAR: 0 ]]
|
zero = "0" => [[ CHAR: 0 ]]
|
||||||
char = "'" (.) => [[ second ]]
|
char = "'" (.) => [[ second ]]
|
||||||
|
@ -146,7 +146,7 @@ plain-text = (!("%").)+ => [[ >string ]]
|
||||||
|
|
||||||
text = (formats|plain-text)* => [[ ]]
|
text = (formats|plain-text)* => [[ ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
: printf-quot ( format-string -- format-quot n )
|
: printf-quot ( format-string -- format-quot n )
|
||||||
parse-printf [ [ callable? ] count ] keep [
|
parse-printf [ [ callable? ] count ] keep [
|
||||||
|
@ -211,7 +211,7 @@ MACRO: sprintf ( format-string -- quot )
|
||||||
|
|
||||||
: week-of-year-monday ( timestamp -- n ) 1 week-of-year ; inline
|
: week-of-year-monday ( timestamp -- n ) 1 week-of-year ; inline
|
||||||
|
|
||||||
EBNF: parse-strftime
|
EBNF: parse-strftime [=[
|
||||||
|
|
||||||
fmt-% = "%" => [[ "%" ]]
|
fmt-% = "%" => [[ "%" ]]
|
||||||
fmt-a = "a" => [[ [ day-of-week day-abbreviation3 ] ]]
|
fmt-a = "a" => [[ [ day-of-week day-abbreviation3 ] ]]
|
||||||
|
@ -247,7 +247,7 @@ plain-text = (!("%").)+ => [[ >string ]]
|
||||||
|
|
||||||
text = (formats|plain-text)* => [[ ]]
|
text = (formats|plain-text)* => [[ ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
PRIVATE>
|
PRIVATE>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
USING: accessors arrays combinators combinators.short-circuit
|
USING: accessors arrays combinators combinators.short-circuit
|
||||||
io.directories io.files io.files.info io.pathnames kernel locals
|
io.directories io.files io.files.info io.pathnames kernel locals
|
||||||
make peg.ebnf regexp regexp.combinators sequences strings system
|
make peg.ebnf regexp regexp.combinators sequences strings system
|
||||||
unicode ;
|
unicode multiline ;
|
||||||
IN: globs
|
IN: globs
|
||||||
|
|
||||||
: not-path-separator ( -- sep )
|
: not-path-separator ( -- sep )
|
||||||
|
@ -12,7 +12,7 @@ IN: globs
|
||||||
: wild-path-separator ( -- sep )
|
: wild-path-separator ( -- sep )
|
||||||
os windows? R/ [^\\/\\][\\/\\]|[^\\/\\]/ R/ [^\\/][\\/]|[^\\/]/ ? ; foldable
|
os windows? R/ [^\\/\\][\\/\\]|[^\\/\\]/ R/ [^\\/][\\/]|[^\\/]/ ? ; foldable
|
||||||
|
|
||||||
EBNF: <glob>
|
EBNF: <glob> [=[
|
||||||
|
|
||||||
Character = "\\" .:c => [[ c 1string <literal> ]]
|
Character = "\\" .:c => [[ c 1string <literal> ]]
|
||||||
| !(","|"}") . => [[ 1string <literal> ]]
|
| !(","|"}") . => [[ 1string <literal> ]]
|
||||||
|
@ -45,7 +45,7 @@ End = !(.)
|
||||||
|
|
||||||
Main = Concatenation End
|
Main = Concatenation End
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
: glob-matches? ( input glob -- ? )
|
: glob-matches? ( input glob -- ? )
|
||||||
[ >case-fold ] bi@ <glob> matches? ;
|
[ >case-fold ] bi@ <glob> matches? ;
|
||||||
|
|
|
@ -1,27 +1,11 @@
|
||||||
! Copyright (C) 2009 Chris Double.
|
! Copyright (C) 2009 Chris Double.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: help.syntax help.markup peg peg.search words ;
|
USING: help.syntax help.markup peg peg.search words
|
||||||
|
multiline ;
|
||||||
IN: peg.ebnf
|
IN: peg.ebnf
|
||||||
|
|
||||||
HELP: <EBNF
|
HELP: EBNF[[
|
||||||
{ $syntax "<EBNF ...ebnf... EBNF>" }
|
{ $syntax "EBNF[[ ...ebnf... ]]" }
|
||||||
{ $values { "...ebnf..." "EBNF DSL text" } }
|
|
||||||
{ $description
|
|
||||||
"Creates a " { $vocab-link "peg" }
|
|
||||||
" object that parses a string using the syntax "
|
|
||||||
"defined with the EBNF DSL. The peg object can be run using the " { $link parse }
|
|
||||||
" word and can be used with the " { $link search } " and " { $link replace } " words."
|
|
||||||
}
|
|
||||||
{ $examples
|
|
||||||
{ $example
|
|
||||||
"USING: kernel prettyprint peg.ebnf peg.search ;"
|
|
||||||
"\"abcdab\" <EBNF rule=\"a\" \"b\" => [[ drop \"foo\" ]] EBNF> replace ."
|
|
||||||
"\"foocdfoo\""
|
|
||||||
}
|
|
||||||
} ;
|
|
||||||
|
|
||||||
HELP: [EBNF
|
|
||||||
{ $syntax "[EBNF ...ebnf... EBNF]" }
|
|
||||||
{ $values { "...ebnf..." "EBNF DSL text" } }
|
{ $values { "...ebnf..." "EBNF DSL text" } }
|
||||||
{ $description
|
{ $description
|
||||||
"Creates and calls a quotation that parses a string using the syntax "
|
"Creates and calls a quotation that parses a string using the syntax "
|
||||||
|
@ -32,14 +16,14 @@ HELP: [EBNF
|
||||||
}
|
}
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: multiline prettyprint peg.ebnf ;"
|
||||||
"\"ab\" [EBNF rule=\"a\" \"b\" EBNF] ."
|
"\"ab\" EBNF[[ rule=\"a\" \"b\" ]] ."
|
||||||
"V{ \"a\" \"b\" }"
|
"V{ \"a\" \"b\" }"
|
||||||
}
|
}
|
||||||
} ;
|
} ;
|
||||||
|
|
||||||
HELP: EBNF:
|
HELP: EBNF:
|
||||||
{ $syntax "EBNF: word ...ebnf... ;EBNF" }
|
{ $syntax "EBNF: word [=[ ...ebnf... ]=]" }
|
||||||
{ $values { "word" word } { "...ebnf..." "EBNF DSL text" } }
|
{ $values { "word" word } { "...ebnf..." "EBNF DSL text" } }
|
||||||
{ $description
|
{ $description
|
||||||
"Defines a word that when called will parse a string using the syntax "
|
"Defines a word that when called will parse a string using the syntax "
|
||||||
|
@ -50,9 +34,9 @@ HELP: EBNF:
|
||||||
}
|
}
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint multiline peg.ebnf ;"
|
||||||
"IN: scratchpad"
|
"IN: scratchpad"
|
||||||
"EBNF: foo rule=\"a\" \"b\" ;EBNF"
|
"EBNF: foo [=[ rule=\"a\" \"b\" ]=]"
|
||||||
"\"ab\" foo ."
|
"\"ab\" foo ."
|
||||||
"V{ \"a\" \"b\" }"
|
"V{ \"a\" \"b\" }"
|
||||||
}
|
}
|
||||||
|
@ -67,22 +51,22 @@ ARTICLE: "peg.ebnf.strings" "EBNF Rule: Strings"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"helloworld\" [EBNF rule=\"hello\" \"world\" EBNF] ."
|
"\"helloworld\" EBNF[[ rule=\"hello\" \"world\" ]] ."
|
||||||
"V{ \"hello\" \"world\" }"
|
"V{ \"hello\" \"world\" }"
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"AΣ𝄞\" [EBNF rule='\\x41' '\\u{greek-capital-letter-sigma}' '\\u01D11E' EBNF] ."
|
"\"AΣ𝄞\" EBNF[[ rule='\\x41' '\\u{greek-capital-letter-sigma}' '\\u01D11E' ]] ."
|
||||||
"V{ \"A\" \"Σ\" \"𝄞\" }"
|
"V{ \"A\" \"Σ\" \"𝄞\" }"
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: io peg.ebnf ;"
|
"USING: io peg.ebnf ;"
|
||||||
"\"A double quote: \\\"\" [EBNF rule='A double quote: \"' EBNF] print"
|
"\"A double quote: \\\"\" EBNF[[ rule='A double quote: \"' ]] print"
|
||||||
"A double quote: \""
|
"A double quote: \""
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: io peg.ebnf ;"
|
"USING: io peg.ebnf ;"
|
||||||
"\"' and \\\"\" [EBNF rule=\"' and \\\"\" EBNF] print"
|
"\"' and \\\"\" EBNF[[ rule=\"' and \\\"\" ]] print"
|
||||||
"' and \""
|
"' and \""
|
||||||
}
|
}
|
||||||
} ;
|
} ;
|
||||||
|
@ -93,7 +77,7 @@ ARTICLE: "peg.ebnf.any" "EBNF Rule: Any"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"abc\" [EBNF rule=\"a\" . \"c\" EBNF] ."
|
"\"abc\" EBNF[[ rule=\"a\" . \"c\" ]] ."
|
||||||
"V{ \"a\" 98 \"c\" }"
|
"V{ \"a\" 98 \"c\" }"
|
||||||
}
|
}
|
||||||
} ;
|
} ;
|
||||||
|
@ -106,7 +90,7 @@ ARTICLE: "peg.ebnf.sequence" "EBNF Rule: Sequence"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"abbba\" [EBNF rule=\"a\" (\"b\")* \"a\" EBNF] ."
|
"\"abbba\" EBNF[[ rule=\"a\" (\"b\")* \"a\" ]] ."
|
||||||
"V{ \"a\" V{ \"b\" \"b\" \"b\" } \"a\" }"
|
"V{ \"a\" V{ \"b\" \"b\" \"b\" } \"a\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,12 +107,12 @@ $nl
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"abcca\" [EBNF rule=\"a\" (\"b\" | \"c\")* \"a\" EBNF] ."
|
"\"abcca\" EBNF[[ rule=\"a\" (\"b\" | \"c\")* \"a\" ]] ."
|
||||||
"V{ \"a\" V{ \"b\" \"c\" \"c\" } \"a\" }"
|
"V{ \"a\" V{ \"b\" \"c\" \"c\" } \"a\" }"
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"ab c\nd \" [EBNF rule={\"a\" \"b\" \"c\" \"d\"} EBNF] ."
|
"\"ab c\nd \" EBNF[[ rule={\"a\" \"b\" \"c\" \"d\"} ]] ."
|
||||||
"V{ \"a\" \"b\" \"c\" \"d\" }"
|
"V{ \"a\" \"b\" \"c\" \"d\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -141,17 +125,17 @@ ARTICLE: "peg.ebnf.choice" "EBNF Rule: Choice"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"a\" [EBNF rule=\"a\" | \"b\" | \"c\" EBNF] ."
|
"\"a\" EBNF[[ rule=\"a\" | \"b\" | \"c\" ]] ."
|
||||||
"\"a\""
|
"\"a\""
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"b\" [EBNF rule=\"a\" | \"b\" | \"c\" EBNF] ."
|
"\"b\" EBNF[[ rule=\"a\" | \"b\" | \"c\" ]] ."
|
||||||
"\"b\""
|
"\"b\""
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"d\" [EBNF rule=\"a\" | \"b\" | \"c\" EBNF] ."
|
"\"d\" EBNF[[ rule=\"a\" | \"b\" | \"c\" ]] ."
|
||||||
"Peg parsing error at character position 0.\nExpected 'a' or 'b' or 'c'\nGot 'd'"
|
"Peg parsing error at character position 0.\nExpected 'a' or 'b' or 'c'\nGot 'd'"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -164,7 +148,7 @@ ARTICLE: "peg.ebnf.ignore" "EBNF Rule: Ignore"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"abc\" [EBNF rule=\"a\" \"b\"~ \"c\" EBNF] ."
|
"\"abc\" EBNF[[ rule=\"a\" \"b\"~ \"c\" ]] ."
|
||||||
"V{ \"a\" \"c\" }"
|
"V{ \"a\" \"c\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -177,12 +161,12 @@ ARTICLE: "peg.ebnf.option" "EBNF Rule: Option"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"abc\" [EBNF rule=\"a\" \"b\"? \"c\" EBNF] ."
|
"\"abc\" EBNF[[ rule=\"a\" \"b\"? \"c\" ]] ."
|
||||||
"V{ \"a\" \"b\" \"c\" }"
|
"V{ \"a\" \"b\" \"c\" }"
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"ac\" [EBNF rule=\"a\" \"b\"? \"c\" EBNF] ."
|
"\"ac\" EBNF[[ rule=\"a\" \"b\"? \"c\" ]] ."
|
||||||
"V{ \"a\" f \"c\" }"
|
"V{ \"a\" f \"c\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -199,7 +183,7 @@ ARTICLE: "peg.ebnf.character-class" "EBNF Rule: Character Class"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"123\" [EBNF rule=[0-9]+ EBNF] ."
|
"\"123\" EBNF[[ rule=[0-9]+ ]] ."
|
||||||
"V{ 49 50 51 }"
|
"V{ 49 50 51 }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -212,7 +196,7 @@ ARTICLE: "peg.ebnf.one-or-more" "EBNF Rule: One or more"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"aab\" [EBNF rule=\"a\"+ \"b\" EBNF] ."
|
"\"aab\" EBNF[[ rule=\"a\"+ \"b\" ]] ."
|
||||||
"V{ V{ \"a\" \"a\" } \"b\" }"
|
"V{ V{ \"a\" \"a\" } \"b\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -225,12 +209,12 @@ ARTICLE: "peg.ebnf.zero-or-more" "EBNF Rule: Zero or more"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"aab\" [EBNF rule=\"a\"* \"b\" EBNF] ."
|
"\"aab\" EBNF[[ rule=\"a\"* \"b\" ]] ."
|
||||||
"V{ V{ \"a\" \"a\" } \"b\" }"
|
"V{ V{ \"a\" \"a\" } \"b\" }"
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"b\" [EBNF rule=\"a\"* \"b\" EBNF] ."
|
"\"b\" EBNF[[ rule=\"a\"* \"b\" ]] ."
|
||||||
"V{ V{ } \"b\" }"
|
"V{ V{ } \"b\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -246,7 +230,7 @@ ARTICLE: "peg.ebnf.and" "EBNF Rule: And"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"ab\" [EBNF rule=&(\"a\") \"a\" \"b\" EBNF] ."
|
"\"ab\" EBNF[[ rule=&(\"a\") \"a\" \"b\" ]] ."
|
||||||
"V{ \"a\" \"b\" }"
|
"V{ \"a\" \"b\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -262,7 +246,7 @@ ARTICLE: "peg.ebnf.not" "EBNF Rule: Not"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"\"<abcd>\" [EBNF rule=\"<\" (!(\">\") .)* \">\" EBNF] ."
|
"\"<abcd>\" EBNF[[ rule=\"<\" (!(\">\") .)* \">\" ]] ."
|
||||||
"V{ \"<\" V{ 97 98 99 100 } \">\" }"
|
"V{ \"<\" V{ 97 98 99 100 } \">\" }"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -283,12 +267,12 @@ ARTICLE: "peg.ebnf.action" "EBNF Action"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf strings ;"
|
"USING: prettyprint peg.ebnf strings ;"
|
||||||
"\"<abcd>\" [EBNF rule=\"<\" ((!(\">\") .)* => [[ >string ]]) \">\" EBNF] ."
|
"\"<abcd>\" EBNF[=[ rule=\"<\" ((!(\">\") .)* => [[ >string ]]) \">\" ]=] ."
|
||||||
"V{ \"<\" \"abcd\" \">\" }"
|
"V{ \"<\" \"abcd\" \">\" }"
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf math.parser ;"
|
"USING: prettyprint peg.ebnf math.parser ;"
|
||||||
"\"123\" [EBNF rule=[0-9]+ => [[ string>number ]] EBNF] ."
|
"\"123\" EBNF[=[ rule=[0-9]+ => [[ string>number ]] ]=] ."
|
||||||
"123"
|
"123"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -303,12 +287,12 @@ ARTICLE: "peg.ebnf.semantic-action" "EBNF Semantic Action"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf math math.parser ;"
|
"USING: prettyprint peg.ebnf math math.parser ;"
|
||||||
"\"1\" [EBNF rule=[0-9] ?[ digit> odd? ]? EBNF] ."
|
"\"1\" EBNF[[ rule=[0-9] ?[ digit> odd? ]? ]] ."
|
||||||
"49"
|
"49"
|
||||||
}
|
}
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf math math.parser ;"
|
"USING: prettyprint peg.ebnf math math.parser ;"
|
||||||
"\"2\" [EBNF rule=[0-9] ?[ digit> odd? ]? EBNF] ."
|
"\"2\" EBNF[[ rule=[0-9] ?[ digit> odd? ]? ]] ."
|
||||||
"Peg parsing error at character position 0.\nExpected \nGot '2'"
|
"Peg parsing error at character position 0.\nExpected \nGot '2'"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -321,7 +305,7 @@ ARTICLE: "peg.ebnf.variable" "EBNF Variable"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg.ebnf math.parser ;"
|
"USING: prettyprint peg.ebnf math.parser ;"
|
||||||
"\"1+2\" [EBNF rule=[0-9]:a \"+\" [0-9]:b => [[ a digit> b digit> + ]] EBNF] ."
|
"\"1+2\" EBNF[=[ rule=[0-9]:a \"+\" [0-9]:b => [[ a digit> b digit> + ]] ]=] ."
|
||||||
"3"
|
"3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -339,22 +323,22 @@ ARTICLE: "peg.ebnf.foreign-rules" "EBNF Foreign Rules"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $code
|
{ $code
|
||||||
"USING: prettyprint peg.ebnf ;"
|
"USING: prettyprint peg.ebnf ;"
|
||||||
"EBNF: parse-string"
|
"EBNF: parse-string [=["
|
||||||
"StringBody = (!('\"') .)*"
|
"StringBody = (!('\"') .)*"
|
||||||
"String= '\"' StringBody:b '\"' => [[ b >string ]]"
|
"String= '\"' StringBody:b '\"' => [[ b >string ]]"
|
||||||
";EBNF"
|
"]=]"
|
||||||
"EBNF: parse-two-strings"
|
"EBNF: parse-two-strings [=["
|
||||||
"TwoStrings = <foreign parse-string String> <foreign parse-string String>"
|
"TwoStrings = <foreign parse-string String> <foreign parse-string String>"
|
||||||
";EBNF"
|
"]=]"
|
||||||
"EBNF: parse-two-strings"
|
"EBNF: parse-two-strings [=["
|
||||||
"TwoString = <foreign parse-string> <foreign parse-string>"
|
"TwoString = <foreign parse-string> <foreign parse-string>"
|
||||||
";EBNF"
|
"]=]"
|
||||||
}
|
}
|
||||||
{ $code
|
{ $code
|
||||||
": a-token ( -- parser ) \"a\" token ;"
|
": a-token ( -- parser ) \"a\" token ;"
|
||||||
"EBNF: parse-abc"
|
"EBNF: parse-abc [=["
|
||||||
"abc = <foreign a-token> 'b' 'c'"
|
"abc = <foreign a-token> 'b' 'c'"
|
||||||
";EBNF"
|
"]=]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
@ -365,9 +349,10 @@ ARTICLE: "peg.ebnf.tokenizers" "EBNF Tokenizers"
|
||||||
"Terminals in a rule match successive characters in the array or string. "
|
"Terminals in a rule match successive characters in the array or string. "
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $code
|
{ $code
|
||||||
"EBNF: foo"
|
"USING: multiline ;"
|
||||||
|
"EBNF: foo [=["
|
||||||
"rule = \"++\" \"--\""
|
"rule = \"++\" \"--\""
|
||||||
";EBNF"
|
"]=]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"This parser when run with the string \"++--\" or the array "
|
"This parser when run with the string \"++--\" or the array "
|
||||||
|
@ -376,11 +361,12 @@ ARTICLE: "peg.ebnf.tokenizers" "EBNF Tokenizers"
|
||||||
"between the terminals:"
|
"between the terminals:"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $code
|
{ $code
|
||||||
"EBNF: foo"
|
"USING: multiline ;"
|
||||||
|
"EBNF: foo [=["
|
||||||
"space = (\" \" | \"\\r\" | \"\\t\" | \"\\n\")"
|
"space = (\" \" | \"\\r\" | \"\\t\" | \"\\n\")"
|
||||||
"spaces = space* => [[ drop ignore ]]"
|
"spaces = space* => [[ drop ignore ]]"
|
||||||
"rule = spaces \"++\" spaces \"--\" spaces"
|
"rule = spaces \"++\" spaces \"--\" spaces"
|
||||||
";EBNF"
|
"]=]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"In a large grammar this gets tedious and makes the grammar hard to read. "
|
"In a large grammar this gets tedious and makes the grammar hard to read. "
|
||||||
|
@ -389,12 +375,13 @@ ARTICLE: "peg.ebnf.tokenizers" "EBNF Tokenizers"
|
||||||
"might look:"
|
"might look:"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $code
|
{ $code
|
||||||
"EBNF: foo"
|
"USING: multiline ;"
|
||||||
|
"EBNF: foo [=["
|
||||||
"space = (\" \" | \"\\r\" | \"\\t\" | \"\\n\")"
|
"space = (\" \" | \"\\r\" | \"\\t\" | \"\\n\")"
|
||||||
"spaces = space* => [[ drop ignore ]]"
|
"spaces = space* => [[ drop ignore ]]"
|
||||||
"tokenizer = spaces ( \"++\" | \"--\" )"
|
"tokenizer = spaces ( \"++\" | \"--\" )"
|
||||||
"rule = \"++\" \"--\""
|
"rule = \"++\" \"--\""
|
||||||
";EBNF"
|
"]=]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"'tokenizer' is the name of a built in rule. Once defined it is called to "
|
"'tokenizer' is the name of a built in rule. Once defined it is called to "
|
||||||
|
@ -413,13 +400,13 @@ $nl
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $example
|
{ $example
|
||||||
"USING: prettyprint peg peg.ebnf kernel math.parser strings"
|
"USING: prettyprint peg peg.ebnf kernel math.parser strings"
|
||||||
"accessors math arrays ;"
|
"accessors math arrays multiline ;"
|
||||||
"IN: scratchpad"
|
"IN: scratchpad"
|
||||||
""
|
""
|
||||||
"TUPLE: ast-number value ;"
|
"TUPLE: ast-number value ;"
|
||||||
"TUPLE: ast-string value ;"
|
"TUPLE: ast-string value ;"
|
||||||
""
|
""
|
||||||
"EBNF: foo-tokenizer"
|
"EBNF: foo-tokenizer [=["
|
||||||
"space = (\" \" | \"\\r\" | \"\\t\" | \"\\n\")"
|
"space = (\" \" | \"\\r\" | \"\\t\" | \"\\n\")"
|
||||||
"spaces = space* => [[ drop ignore ]]"
|
"spaces = space* => [[ drop ignore ]]"
|
||||||
""
|
""
|
||||||
|
@ -428,16 +415,16 @@ $nl
|
||||||
""
|
""
|
||||||
"token = spaces ( number | operator )"
|
"token = spaces ( number | operator )"
|
||||||
"tokens = token*"
|
"tokens = token*"
|
||||||
";EBNF"
|
"]=]"
|
||||||
""
|
""
|
||||||
"EBNF: foo"
|
"EBNF: foo [=["
|
||||||
"tokenizer = <foreign foo-tokenizer token>"
|
"tokenizer = <foreign foo-tokenizer token>"
|
||||||
""
|
""
|
||||||
"number = . ?[ ast-number? ]? => [[ value>> ]]"
|
"number = . ?[ ast-number? ]? => [[ value>> ]]"
|
||||||
"string = . ?[ ast-string? ]? => [[ value>> ]]"
|
"string = . ?[ ast-string? ]? => [[ value>> ]]"
|
||||||
""
|
""
|
||||||
"rule = string:a number:b \"+\" number:c => [[ a b c + 2array ]]"
|
"rule = string:a number:b \"+\" number:c => [[ a b c + 2array ]]"
|
||||||
";EBNF"
|
"]=]"
|
||||||
""
|
""
|
||||||
"\"123 456 +\" foo-tokenizer ."
|
"\"123 456 +\" foo-tokenizer ."
|
||||||
"V{\n T{ ast-number { value 123 } }\n T{ ast-number { value 456 } }\n \"+\"\n}"
|
"V{\n T{ ast-number { value 123 } }\n T{ ast-number { value 456 } }\n \"+\"\n}"
|
||||||
|
@ -454,7 +441,8 @@ $nl
|
||||||
"was defined lexically before the rule. This is useful in the JavaScript grammar:"
|
"was defined lexically before the rule. This is useful in the JavaScript grammar:"
|
||||||
{ $examples
|
{ $examples
|
||||||
{ $code
|
{ $code
|
||||||
"EBNF: javascript"
|
"USING: multiline ;"
|
||||||
|
"EBNF: javascript [=["
|
||||||
"tokenizer = default"
|
"tokenizer = default"
|
||||||
"nl = \"\\r\" \"\\n\" | \"\\n\""
|
"nl = \"\\r\" \"\\n\" | \"\\n\""
|
||||||
"tokenizer = <foreign tokenize-javascript Tok>"
|
"tokenizer = <foreign tokenize-javascript Tok>"
|
||||||
|
@ -466,6 +454,7 @@ $nl
|
||||||
"RegExp = . ?[ ast-regexp? ]? => [[ value>> ]]"
|
"RegExp = . ?[ ast-regexp? ]? => [[ value>> ]]"
|
||||||
"SpacesNoNl = (!(nl) Space)* => [[ ignore ]]"
|
"SpacesNoNl = (!(nl) Space)* => [[ ignore ]]"
|
||||||
"Sc = SpacesNoNl (nl | &(\"}\") | End)| \";\""
|
"Sc = SpacesNoNl (nl | &(\"}\") | End)| \";\""
|
||||||
|
"]=]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"Here the rule 'nl' is defined using the default tokenizer of sequential "
|
"Here the rule 'nl' is defined using the default tokenizer of sequential "
|
||||||
|
@ -490,9 +479,12 @@ ARTICLE: "peg.ebnf" "EBNF"
|
||||||
"EBNF syntax. It provides three parsing words described below. These words all "
|
"EBNF syntax. It provides three parsing words described below. These words all "
|
||||||
"accept the same EBNF syntax. The difference is in how they are used. "
|
"accept the same EBNF syntax. The difference is in how they are used. "
|
||||||
{ $subsections
|
{ $subsections
|
||||||
POSTPONE: <EBNF
|
|
||||||
POSTPONE: [EBNF
|
|
||||||
POSTPONE: EBNF:
|
POSTPONE: EBNF:
|
||||||
|
POSTPONE: EBNF[[
|
||||||
|
POSTPONE: EBNF[=[
|
||||||
|
POSTPONE: EBNF[==[
|
||||||
|
POSTPONE: EBNF[===[
|
||||||
|
POSTPONE: EBNF[====[
|
||||||
}
|
}
|
||||||
"The EBNF syntax is composed of a series of rules of the form:"
|
"The EBNF syntax is composed of a series of rules of the form:"
|
||||||
{ $code
|
{ $code
|
||||||
|
|
|
@ -145,149 +145,149 @@ IN: peg.ebnf.tests
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "b" } } [
|
{ V{ "a" "b" } } [
|
||||||
"ab" [EBNF foo='a' 'b' EBNF]
|
"ab" EBNF[=[ foo='a' 'b' ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ 1 "b" } } [
|
{ V{ 1 "b" } } [
|
||||||
"ab" [EBNF foo=('a')[[ drop 1 ]] 'b' EBNF]
|
"ab" EBNF[=[ foo=('a')[[ drop 1 ]] 'b' ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ 1 2 } } [
|
{ V{ 1 2 } } [
|
||||||
"ab" [EBNF foo=('a') [[ drop 1 ]] ('b') [[ drop 2 ]] EBNF]
|
"ab" EBNF[=[ foo=('a') [[ drop 1 ]] ('b') [[ drop 2 ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ CHAR: A } [
|
{ CHAR: A } [
|
||||||
"A" [EBNF foo=[A-Z] EBNF]
|
"A" EBNF[=[ foo=[A-Z] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ CHAR: Z } [
|
{ CHAR: Z } [
|
||||||
"Z" [EBNF foo=[A-Z] EBNF]
|
"Z" EBNF[=[ foo=[A-Z] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
"0" [EBNF foo=[A-Z] EBNF]
|
"0" EBNF[=[ foo=[A-Z] ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ CHAR: 0 } [
|
{ CHAR: 0 } [
|
||||||
"0" [EBNF foo=[^A-Z] EBNF]
|
"0" EBNF[=[ foo=[^A-Z] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
"A" [EBNF foo=[^A-Z] EBNF]
|
"A" EBNF[=[ foo=[^A-Z] ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
[
|
[
|
||||||
"Z" [EBNF foo=[^A-Z] EBNF]
|
"Z" EBNF[=[ foo=[^A-Z] ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ V{ "1" "+" "foo" } } [
|
{ V{ "1" "+" "foo" } } [
|
||||||
"1+1" [EBNF foo='1' '+' '1' [[ drop "foo" ]] EBNF]
|
"1+1" EBNF[=[ foo='1' '+' '1' [[ drop "foo" ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ "foo" } [
|
{ "foo" } [
|
||||||
"1+1" [EBNF foo='1' '+' '1' => [[ drop "foo" ]] EBNF]
|
"1+1" EBNF[=[ foo='1' '+' '1' => [[ drop "foo" ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ "foo" } [
|
{ "foo" } [
|
||||||
"1+1" [EBNF foo='1' '+' '1' => [[ drop "foo" ]] | '1' '-' '1' => [[ drop "bar" ]] EBNF]
|
"1+1" EBNF[=[ foo='1' '+' '1' => [[ drop "foo" ]] | '1' '-' '1' => [[ drop "bar" ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ "bar" } [
|
{ "bar" } [
|
||||||
"1-1" [EBNF foo='1' '+' '1' => [[ drop "foo" ]] | '1' '-' '1' => [[ drop "bar" ]] EBNF]
|
"1-1" EBNF[=[ foo='1' '+' '1' => [[ drop "foo" ]] | '1' '-' '1' => [[ drop "bar" ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ 6 } [
|
{ 6 } [
|
||||||
"4+2" [EBNF num=[0-9] => [[ digit> ]] foo=num:x '+' num:y => [[ x y + ]] EBNF]
|
"4+2" EBNF[=[ num=[0-9] => [[ digit> ]] foo=num:x '+' num:y => [[ x y + ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ 6 } [
|
{ 6 } [
|
||||||
"4+2" [EBNF foo=[0-9]:x '+' [0-9]:y => [[ x digit> y digit> + ]] EBNF]
|
"4+2" EBNF[=[ foo=[0-9]:x '+' [0-9]:y => [[ x digit> y digit> + ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ 10 } [
|
{ 10 } [
|
||||||
{ 1 2 3 4 } [EBNF num=. ?[ number? ]? list=list:x num:y => [[ x y + ]] | num EBNF]
|
{ 1 2 3 4 } EBNF[=[ num=. ?[ number? ]? list=list:x num:y => [[ x y + ]] | num ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
{ "a" 2 3 4 } [EBNF num=. ?[ number? ]? list=list:x num:y => [[ x y + ]] | num EBNF]
|
{ "a" 2 3 4 } EBNF[=[ num=. ?[ number? ]? list=list:x num:y => [[ x y + ]] | num ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ 3 } [
|
{ 3 } [
|
||||||
{ 1 2 "a" 4 } [EBNF num=. ?[ number? ]? list=list:x num:y => [[ x y + ]] | num EBNF]
|
{ 1 2 "a" 4 } EBNF[=[ num=. ?[ number? ]? list=list:x num:y => [[ x y + ]] | num ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
"ab" [EBNF -=" " | "\t" | "\n" foo="a" - "b" EBNF]
|
"ab" EBNF[=[ -=" " | "\t" | "\n" foo="a" - "b" ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ V{ "a" " " "b" } } [
|
{ V{ "a" " " "b" } } [
|
||||||
"a b" [EBNF -=" " | "\t" | "\n" foo="a" - "b" EBNF]
|
"a b" EBNF[=[ -=" " | "\t" | "\n" foo="a" - "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "\t" "b" } } [
|
{ V{ "a" "\t" "b" } } [
|
||||||
"a\tb" [EBNF -=" " | "\t" | "\n" foo="a" - "b" EBNF]
|
"a\tb" EBNF[=[ -=" " | "\t" | "\n" foo="a" - "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "\n" "b" } } [
|
{ V{ "a" "\n" "b" } } [
|
||||||
"a\nb" [EBNF -=" " | "\t" | "\n" foo="a" - "b" EBNF]
|
"a\nb" EBNF[=[ -=" " | "\t" | "\n" foo="a" - "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" f "b" } } [
|
{ V{ "a" f "b" } } [
|
||||||
"ab" [EBNF -=" " | "\t" | "\n" foo="a" (-)? "b" EBNF]
|
"ab" EBNF[=[ -=" " | "\t" | "\n" foo="a" (-)? "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" " " "b" } } [
|
{ V{ "a" " " "b" } } [
|
||||||
"a b" [EBNF -=" " | "\t" | "\n" foo="a" (-)? "b" EBNF]
|
"a b" EBNF[=[ -=" " | "\t" | "\n" foo="a" (-)? "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
|
|
||||||
{ V{ "a" "\t" "b" } } [
|
{ V{ "a" "\t" "b" } } [
|
||||||
"a\tb" [EBNF -=" " | "\t" | "\n" foo="a" (-)? "b" EBNF]
|
"a\tb" EBNF[=[ -=" " | "\t" | "\n" foo="a" (-)? "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "\n" "b" } } [
|
{ V{ "a" "\n" "b" } } [
|
||||||
"a\nb" [EBNF -=" " | "\t" | "\n" foo="a" (-)? "b" EBNF]
|
"a\nb" EBNF[=[ -=" " | "\t" | "\n" foo="a" (-)? "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "b" } } [
|
{ V{ "a" "b" } } [
|
||||||
"ab" [EBNF -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" EBNF]
|
"ab" EBNF[=[ -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "b" } } [
|
{ V{ "a" "b" } } [
|
||||||
"a\tb" [EBNF -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" EBNF]
|
"a\tb" EBNF[=[ -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "b" } } [
|
{ V{ "a" "b" } } [
|
||||||
"a\nb" [EBNF -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" EBNF]
|
"a\nb" EBNF[=[ -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
"axb" [EBNF -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" EBNF]
|
"axb" EBNF[=[ -=(" " | "\t" | "\n")? => [[ drop ignore ]] foo="a" - "b" ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ V{ V{ 49 } "+" V{ 49 } } } [
|
{ V{ V{ 49 } "+" V{ 49 } } } [
|
||||||
! Test direct left recursion.
|
! Test direct left recursion.
|
||||||
! Using packrat, so first part of expr fails, causing 2nd choice to be used
|
! Using packrat, so first part of expr fails, causing 2nd choice to be used
|
||||||
"1+1" [EBNF num=([0-9])+ expr=expr "+" num | num EBNF]
|
"1+1" EBNF[=[ num=([0-9])+ expr=expr "+" num | num ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ V{ 49 } "+" V{ 49 } } "+" V{ 49 } } } [
|
{ V{ V{ V{ 49 } "+" V{ 49 } } "+" V{ 49 } } } [
|
||||||
! Test direct left recursion.
|
! Test direct left recursion.
|
||||||
! Using packrat, so first part of expr fails, causing 2nd choice to be used
|
! Using packrat, so first part of expr fails, causing 2nd choice to be used
|
||||||
"1+1+1" [EBNF num=([0-9])+ expr=expr "+" num | num EBNF]
|
"1+1+1" EBNF[=[ num=([0-9])+ expr=expr "+" num | num ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ V{ 49 } "+" V{ 49 } } "+" V{ 49 } } } [
|
{ V{ V{ V{ 49 } "+" V{ 49 } } "+" V{ 49 } } } [
|
||||||
! Test indirect left recursion.
|
! Test indirect left recursion.
|
||||||
! Using packrat, so first part of expr fails, causing 2nd choice to be used
|
! Using packrat, so first part of expr fails, causing 2nd choice to be used
|
||||||
"1+1+1" [EBNF num=([0-9])+ x=expr expr=x "+" num | num EBNF]
|
"1+1+1" EBNF[=[ num=([0-9])+ x=expr expr=x "+" num | num ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ t } [
|
{ t } [
|
||||||
"abcd='9' | ('8'):x => [[ x ]]" ebnf-parser (parse) remaining>> empty?
|
"abcd='9' | ('8'):x => [[ x ]]" ebnf-parser (parse) remaining>> empty?
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
EBNF: primary
|
EBNF: primary [=[
|
||||||
Primary = PrimaryNoNewArray
|
Primary = PrimaryNoNewArray
|
||||||
PrimaryNoNewArray = ClassInstanceCreationExpression
|
PrimaryNoNewArray = ClassInstanceCreationExpression
|
||||||
| MethodInvocation
|
| MethodInvocation
|
||||||
|
@ -310,7 +310,7 @@ MethodName = "m" | "n"
|
||||||
ExpressionName = Identifier
|
ExpressionName = Identifier
|
||||||
Expression = "i" | "j"
|
Expression = "i" | "j"
|
||||||
main = Primary
|
main = Primary
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ "this" } [
|
{ "this" } [
|
||||||
"this" primary
|
"this" primary
|
||||||
|
@ -333,113 +333,113 @@ main = Primary
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ "a" "b" } "c" } } [
|
{ V{ V{ "a" "b" } "c" } } [
|
||||||
"abc" [EBNF a="a" "b" foo=(a "c") EBNF]
|
"abc" EBNF[=[ a="a" "b" foo=(a "c") ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "c" } } [
|
{ V{ "a" "c" } } [
|
||||||
"abc" [EBNF a="a" "b"~ foo=(a "c") EBNF]
|
"abc" EBNF[=[ a="a" "b"~ foo=(a "c") ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ "a" V{ "b" "b" } } "c" } } [
|
{ V{ V{ "a" V{ "b" "b" } } "c" } } [
|
||||||
"abbc" [EBNF a=("a" "b"*) foo=(a "c") EBNF]
|
"abbc" EBNF[=[ a=("a" "b"*) foo=(a "c") ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "c" } } [
|
{ V{ "a" "c" } } [
|
||||||
"abc" [EBNF a=("a" ("b")~) foo=(a "c") EBNF]
|
"abc" EBNF[=[ a=("a" ("b")~) foo=(a "c") ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "c" } } [
|
{ V{ "a" "c" } } [
|
||||||
"abc" [EBNF a=("a" "b"~) foo=(a "c") EBNF]
|
"abc" EBNF[=[ a=("a" "b"~) foo=(a "c") ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ "c" } [
|
{ "c" } [
|
||||||
"abc" [EBNF a=("a" "b")~ foo=(a "c") EBNF]
|
"abc" EBNF[=[ a=("a" "b")~ foo=(a "c") ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ "a" "b" } "c" } } [
|
{ V{ V{ "a" "b" } "c" } } [
|
||||||
"abc" [EBNF a="a" "b" foo={a "c"} EBNF]
|
"abc" EBNF[=[ a="a" "b" foo={a "c"} ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ "a" "b" } "c" } } [
|
{ V{ V{ "a" "b" } "c" } } [
|
||||||
"abc" [EBNF a="a" "b" foo=a "c" EBNF]
|
"abc" EBNF[=[ a="a" "b" foo=a "c" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
"a bc" [EBNF a="a" "b" foo=(a "c") EBNF]
|
"a bc" EBNF[=[ a="a" "b" foo=(a "c") ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
[
|
[
|
||||||
"a bc" [EBNF a="a" "b" foo=a "c" EBNF]
|
"a bc" EBNF[=[ a="a" "b" foo=a "c" ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
[
|
[
|
||||||
"a bc" [EBNF a="a" "b" foo={a "c"} EBNF]
|
"a bc" EBNF[=[ a="a" "b" foo={a "c"} ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
[
|
[
|
||||||
"ab c" [EBNF a="a" "b" foo=a "c" EBNF]
|
"ab c" EBNF[=[ a="a" "b" foo=a "c" ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ V{ V{ "a" "b" } "c" } } [
|
{ V{ V{ "a" "b" } "c" } } [
|
||||||
"ab c" [EBNF a="a" "b" foo={a "c"} EBNF]
|
"ab c" EBNF[=[ a="a" "b" foo={a "c"} ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
"ab c" [EBNF a="a" "b" foo=(a "c") EBNF]
|
"ab c" EBNF[=[ a="a" "b" foo=(a "c") ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
[
|
[
|
||||||
"a b c" [EBNF a="a" "b" foo=a "c" EBNF]
|
"a b c" EBNF[=[ a="a" "b" foo=a "c" ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
[
|
[
|
||||||
"a b c" [EBNF a="a" "b" foo=(a "c") EBNF]
|
"a b c" EBNF[=[ a="a" "b" foo=(a "c") ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
[
|
[
|
||||||
"a b c" [EBNF a="a" "b" foo={a "c"} EBNF]
|
"a b c" EBNF[=[ a="a" "b" foo={a "c"} ]=]
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ V{ V{ V{ "a" "b" } "c" } V{ V{ "a" "b" } "c" } } } [
|
{ V{ V{ V{ "a" "b" } "c" } V{ V{ "a" "b" } "c" } } } [
|
||||||
"ab cab c" [EBNF a="a" "b" foo={a "c"}* EBNF]
|
"ab cab c" EBNF[=[ a="a" "b" foo={a "c"}* ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ } } [
|
{ V{ } } [
|
||||||
"ab cab c" [EBNF a="a" "b" foo=(a "c")* EBNF]
|
"ab cab c" EBNF[=[ a="a" "b" foo=(a "c")* ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ V{ "a" "b" } "c" } V{ V{ "a" "b" } "c" } } } [
|
{ V{ V{ V{ "a" "b" } "c" } V{ V{ "a" "b" } "c" } } } [
|
||||||
"ab c ab c" [EBNF a="a" "b" foo={a "c"}* EBNF]
|
"ab c ab c" EBNF[=[ a="a" "b" foo={a "c"}* ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ V{ "a" "c" } V{ "a" "c" } } } [
|
{ V{ V{ "a" "c" } V{ "a" "c" } } } [
|
||||||
"ab c ab c" [EBNF a="a" "b"~ foo={a "c"}* EBNF]
|
"ab c ab c" EBNF[=[ a="a" "b"~ foo={a "c"}* ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ } } [
|
{ V{ } } [
|
||||||
"ab c ab c" [EBNF a="a" "b" foo=(a "c")* EBNF]
|
"ab c ab c" EBNF[=[ a="a" "b" foo=(a "c")* ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ } } [
|
{ V{ } } [
|
||||||
"ab c ab c" [EBNF a="a" "b" foo=(a "c")* EBNF]
|
"ab c ab c" EBNF[=[ a="a" "b" foo=(a "c")* ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "a" "a" } } [
|
{ V{ "a" "a" "a" } } [
|
||||||
"aaa" [EBNF a=('a')* b=!('b') a:x => [[ x ]] EBNF]
|
"aaa" EBNF[=[ a=('a')* b=!('b') a:x => [[ x ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ t } [
|
{ t } [
|
||||||
"aaa" [EBNF a=('a')* b=!('b') a:x => [[ x ]] EBNF]
|
"aaa" EBNF[=[ a=('a')* b=!('b') a:x => [[ x ]] ]=]
|
||||||
"aaa" [EBNF a=('a')* b=!('b') (a):x => [[ x ]] EBNF] =
|
"aaa" EBNF[=[ a=('a')* b=!('b') (a):x => [[ x ]] ]=] =
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ "a" "a" "a" } } [
|
{ V{ "a" "a" "a" } } [
|
||||||
"aaa" [EBNF a=('a')* b=a:x => [[ x ]] EBNF]
|
"aaa" EBNF[=[ a=('a')* b=a:x => [[ x ]] ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ t } [
|
{ t } [
|
||||||
"aaa" [EBNF a=('a')* b=a:x => [[ x ]] EBNF]
|
"aaa" EBNF[=[ a=('a')* b=a:x => [[ x ]] ]=]
|
||||||
"aaa" [EBNF a=('a')* b=(a):x => [[ x ]] EBNF] =
|
"aaa" EBNF[=[ a=('a')* b=(a):x => [[ x ]] ]=] =
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ t } [
|
{ t } [
|
||||||
|
@ -469,22 +469,22 @@ main = Primary
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
<<
|
<<
|
||||||
EBNF: parser1
|
EBNF: parser1 [=[
|
||||||
foo='a'
|
foo='a'
|
||||||
;EBNF
|
]=]
|
||||||
>>
|
>>
|
||||||
|
|
||||||
EBNF: parser2
|
EBNF: parser2 [=[
|
||||||
foo=<foreign parser1 foo> 'b'
|
foo=<foreign parser1 foo> 'b'
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
EBNF: parser3
|
EBNF: parser3 [=[
|
||||||
foo=<foreign parser1> 'c'
|
foo=<foreign parser1> 'c'
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
EBNF: parser4
|
EBNF: parser4 [=[
|
||||||
foo=<foreign any-char> 'd'
|
foo=<foreign any-char> 'd'
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ "a" } [
|
{ "a" } [
|
||||||
"a" parser1
|
"a" parser1
|
||||||
|
@ -503,11 +503,11 @@ foo=<foreign any-char> 'd'
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ } [
|
{ } [
|
||||||
"USING: kernel peg.ebnf ; \"a\\n\" [EBNF foo='a' '\n' => [[ drop \"\n\" ]] EBNF] drop" eval( -- )
|
"USING: kernel peg.ebnf ; \"a\\n\" EBNF[=[ foo='a' '\n' => [[ drop \"\n\" ]] ]=] drop" eval( -- )
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[
|
[
|
||||||
"USING: peg.ebnf ; <EBNF foo='a' foo='b' EBNF>" eval( -- ) drop
|
"USING: peg.ebnf ; EBNF[=[ foo='a' foo='b' ]=]" eval( -- ) drop
|
||||||
] must-fail
|
] must-fail
|
||||||
|
|
||||||
{ t } [
|
{ t } [
|
||||||
|
@ -518,12 +518,12 @@ foo=<foreign any-char> 'd'
|
||||||
|
|
||||||
! Tokenizer tests
|
! Tokenizer tests
|
||||||
{ V{ "a" CHAR: b } } [
|
{ V{ "a" CHAR: b } } [
|
||||||
"ab" [EBNF tokenizer=default foo="a" . EBNF]
|
"ab" EBNF[=[ tokenizer=default foo="a" . ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
TUPLE: ast-number value ;
|
TUPLE: ast-number value ;
|
||||||
|
|
||||||
EBNF: a-tokenizer
|
EBNF: a-tokenizer [=[
|
||||||
Letter = [a-zA-Z]
|
Letter = [a-zA-Z]
|
||||||
Digit = [0-9]
|
Digit = [0-9]
|
||||||
Digits = Digit+
|
Digits = Digit+
|
||||||
|
@ -539,52 +539,52 @@ Special = "(" | ")" | "{" | "}" | "[" | "]" | "," | "
|
||||||
| "-" | "*=" | "*" | "/=" | "/" | "%=" | "%" | "&&="
|
| "-" | "*=" | "*" | "/=" | "/" | "%=" | "%" | "&&="
|
||||||
| "&&" | "||=" | "||" | "." | "!"
|
| "&&" | "||=" | "||" | "." | "!"
|
||||||
Tok = Spaces (Number | Special )
|
Tok = Spaces (Number | Special )
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ V{ CHAR: 1 T{ ast-number f 23 } ";" CHAR: x } } [
|
{ V{ CHAR: 1 T{ ast-number f 23 } ";" CHAR: x } } [
|
||||||
"123;x" [EBNF bar = .
|
"123;x" EBNF[=[ bar = .
|
||||||
tokenizer = <foreign a-tokenizer Tok> foo=.
|
tokenizer = <foreign a-tokenizer Tok> foo=.
|
||||||
tokenizer=default baz=.
|
tokenizer=default baz=.
|
||||||
main = bar foo foo baz
|
main = bar foo foo baz
|
||||||
EBNF]
|
]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ CHAR: 5 "+" CHAR: 2 } } [
|
{ V{ CHAR: 5 "+" CHAR: 2 } } [
|
||||||
"5+2" [EBNF
|
"5+2" EBNF[=[
|
||||||
space=(" " | "\n")
|
space=(" " | "\n")
|
||||||
number=[0-9]
|
number=[0-9]
|
||||||
operator=("*" | "+")
|
operator=("*" | "+")
|
||||||
spaces=space* => [[ ignore ]]
|
spaces=space* => [[ ignore ]]
|
||||||
tokenizer=spaces (number | operator)
|
tokenizer=spaces (number | operator)
|
||||||
main= . . .
|
main= . . .
|
||||||
EBNF]
|
]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ V{ CHAR: 5 "+" CHAR: 2 } } [
|
{ V{ CHAR: 5 "+" CHAR: 2 } } [
|
||||||
"5 + 2" [EBNF
|
"5 + 2" EBNF[=[
|
||||||
space=(" " | "\n")
|
space=(" " | "\n")
|
||||||
number=[0-9]
|
number=[0-9]
|
||||||
operator=("*" | "+")
|
operator=("*" | "+")
|
||||||
spaces=space* => [[ ignore ]]
|
spaces=space* => [[ ignore ]]
|
||||||
tokenizer=spaces (number | operator)
|
tokenizer=spaces (number | operator)
|
||||||
main= . . .
|
main= . . .
|
||||||
EBNF]
|
]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ "++" } [
|
{ "++" } [
|
||||||
"++--" [EBNF tokenizer=("++" | "--") main="++" EBNF]
|
"++--" EBNF[=[ tokenizer=("++" | "--") main="++" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{ "\\" } [
|
{ "\\" } [
|
||||||
"\\" [EBNF foo="\\" EBNF]
|
"\\" EBNF[=[ foo="\\" ]=]
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
[ "USE: peg.ebnf [EBNF EBNF]" eval( -- ) ] must-fail
|
[ "USE: peg.ebnf EBNF[=[ ]=]" eval( -- ) ] must-fail
|
||||||
|
|
||||||
[ "USE: peg.ebnf [EBNF
|
[ "USE: peg.ebnf EBNF[=[
|
||||||
lol = a
|
lol = a
|
||||||
lol = b
|
lol = b
|
||||||
EBNF]" eval( -- )
|
]=]" eval( -- )
|
||||||
] [
|
] [
|
||||||
error>> [ redefined-rule? ] [ name>> "lol" = ] bi and
|
error>> [ redefined-rule? ] [ name>> "lol" = ] bi and
|
||||||
] must-fail-with
|
] must-fail-with
|
||||||
|
@ -592,13 +592,13 @@ Tok = Spaces (Number | Special )
|
||||||
{
|
{
|
||||||
{ "a" "a" }
|
{ "a" "a" }
|
||||||
} [
|
} [
|
||||||
EBNF: foo Bar = "a":a1 "a":a2 => [[ a1 a2 2array ]] ;EBNF
|
EBNF: foo [=[ Bar = "a":a1 "a":a2 => [[ a1 a2 2array ]] ]=]
|
||||||
"aa" foo
|
"aa" foo
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
{
|
{
|
||||||
{ "a" "a" }
|
{ "a" "a" }
|
||||||
} [
|
} [
|
||||||
EBNF: foo2 Bar = "a":a-1 "a":a-2 => [[ a-1 a-2 2array ]] ;EBNF
|
EBNF: foo2 [=[ Bar = "a":a-1 "a":a-2 => [[ a-1 a-2 2array ]] ]=]
|
||||||
"aa" foo2
|
"aa" foo2
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
|
@ -537,18 +537,20 @@ ERROR: could-not-parse-ebnf ;
|
||||||
|
|
||||||
PRIVATE>
|
PRIVATE>
|
||||||
|
|
||||||
SYNTAX: <EBNF
|
|
||||||
"EBNF>"
|
|
||||||
reset-tokenizer parse-multiline-string parse-ebnf main of
|
|
||||||
suffix! reset-tokenizer ;
|
|
||||||
|
|
||||||
SYNTAX: [EBNF
|
|
||||||
"EBNF]"
|
|
||||||
reset-tokenizer parse-multiline-string ebnf>quot nip
|
|
||||||
suffix! \ call suffix! reset-tokenizer ;
|
|
||||||
|
|
||||||
SYNTAX: EBNF:
|
SYNTAX: EBNF:
|
||||||
reset-tokenizer scan-new-word dup ";EBNF" parse-multiline-string
|
reset-tokenizer scan-new-word dup scan-object
|
||||||
ebnf>quot swapd
|
ebnf>quot swapd
|
||||||
( input -- ast ) define-declared "ebnf-parser" set-word-prop
|
( input -- ast ) define-declared "ebnf-parser" set-word-prop
|
||||||
reset-tokenizer ;
|
reset-tokenizer ;
|
||||||
|
|
||||||
|
: define-inline-ebnf ( ast string -- quot )
|
||||||
|
reset-tokenizer
|
||||||
|
ebnf>quot nip
|
||||||
|
suffix! \ call suffix! reset-tokenizer ;
|
||||||
|
|
||||||
|
SYNTAX: EBNF[[ "]]" parse-multiline-string define-inline-ebnf ;
|
||||||
|
SYNTAX: EBNF[=[ "]=]" parse-multiline-string define-inline-ebnf ;
|
||||||
|
SYNTAX: EBNF[==[ "]==]" parse-multiline-string define-inline-ebnf ;
|
||||||
|
SYNTAX: EBNF[===[ "]===]" parse-multiline-string define-inline-ebnf ;
|
||||||
|
SYNTAX: EBNF[====[ "]====]" parse-multiline-string define-inline-ebnf ;
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: accessors arrays assocs combinators
|
USING: accessors arrays assocs combinators
|
||||||
combinators.short-circuit interval-maps kernel locals
|
combinators.short-circuit interval-maps kernel locals
|
||||||
math.parser memoize peg.ebnf regexp.ast regexp.classes sequences
|
math.parser memoize multiline peg.ebnf regexp.ast regexp.classes
|
||||||
sets splitting strings unicode unicode.data unicode.script ;
|
sequences sets splitting strings unicode unicode.data unicode.script ;
|
||||||
IN: regexp.parser
|
IN: regexp.parser
|
||||||
|
|
||||||
: allowed-char? ( ch -- ? )
|
: allowed-char? ( ch -- ? )
|
||||||
|
@ -126,7 +126,7 @@ ERROR: nonexistent-option name ;
|
||||||
! add greedy and nongreedy forms of matching
|
! add greedy and nongreedy forms of matching
|
||||||
! (once it's all implemented)
|
! (once it's all implemented)
|
||||||
|
|
||||||
EBNF: parse-regexp
|
EBNF: parse-regexp [=[
|
||||||
|
|
||||||
CharacterInBracket = !("}") Character
|
CharacterInBracket = !("}") Character
|
||||||
|
|
||||||
|
@ -217,4 +217,4 @@ Alternation = Concatenation:c ("|" Concatenation)*:a
|
||||||
End = !(.)
|
End = !(.)
|
||||||
|
|
||||||
Main = Alternation End
|
Main = Alternation End
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
! Copyright (C) 2008, 2010 Slava Pestov
|
! Copyright (C) 2008, 2010 Slava Pestov
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: peg.ebnf strings ;
|
USING: peg.ebnf multiline strings ;
|
||||||
IN: simple-tokenizer
|
IN: simple-tokenizer
|
||||||
|
|
||||||
EBNF: tokenize
|
EBNF: tokenize [=[
|
||||||
space = [ \t\n\r]
|
space = [ \t\n\r]
|
||||||
escaped-char = "\\" .:ch => [[ ch ]]
|
escaped-char = "\\" .:ch => [[ ch ]]
|
||||||
quoted = '"' (escaped-char | [^"])*:a '"' => [[ a ]]
|
quoted = '"' (escaped-char | [^"])*:a '"' => [[ a ]]
|
||||||
unquoted = (escaped-char | [^ \t\n\r"])+
|
unquoted = (escaped-char | [^ \t\n\r"])+
|
||||||
argument = (quoted | unquoted) => [[ >string ]]
|
argument = (quoted | unquoted) => [[ >string ]]
|
||||||
command = space* (argument:a space* => [[ a ]])+:c !(.) => [[ c ]]
|
command = space* (argument:a space* => [[ a ]])+:c !(.) => [[ c ]]
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
USING: accessors arrays ascii assocs combinators fry io.pathnames
|
USING: accessors arrays ascii assocs combinators fry io.pathnames
|
||||||
io.sockets io.sockets.secure kernel lexer linked-assocs make
|
io.sockets io.sockets.secure kernel lexer linked-assocs make
|
||||||
math.parser namespaces peg.ebnf present sequences splitting
|
math.parser namespaces peg.ebnf present sequences splitting
|
||||||
strings strings.parser urls.encoding vocabs.loader ;
|
strings strings.parser urls.encoding vocabs.loader multiline ;
|
||||||
IN: urls
|
IN: urls
|
||||||
|
|
||||||
TUPLE: url protocol username password host port path query anchor ;
|
TUPLE: url protocol username password host port path query anchor ;
|
||||||
|
@ -38,7 +38,7 @@ M: url >url ;
|
||||||
|
|
||||||
<PRIVATE
|
<PRIVATE
|
||||||
|
|
||||||
EBNF: parse-url
|
EBNF: parse-url [=[
|
||||||
|
|
||||||
protocol = [a-zA-Z0-9.+-]+ => [[ url-decode ]]
|
protocol = [a-zA-Z0-9.+-]+ => [[ url-decode ]]
|
||||||
username = [^/:@#?]+ => [[ url-decode ]]
|
username = [^/:@#?]+ => [[ url-decode ]]
|
||||||
|
@ -60,7 +60,7 @@ url = (((protocol "://") => [[ first ]] auth hostname)
|
||||||
("?" query => [[ second ]])?
|
("?" query => [[ second ]])?
|
||||||
("#" anchor => [[ second ]])?
|
("#" anchor => [[ second ]])?
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
PRIVATE>
|
PRIVATE>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
USING: accessors assocs command-line fry io io.encodings.binary
|
USING: accessors assocs command-line fry io io.encodings.binary
|
||||||
io.files io.streams.string kernel macros math namespaces
|
io.files io.streams.string kernel macros math namespaces
|
||||||
peg.ebnf prettyprint sequences ;
|
peg.ebnf prettyprint sequences multiline ;
|
||||||
|
|
||||||
IN: brainfuck
|
IN: brainfuck
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ TUPLE: brainfuck pointer memory ;
|
||||||
: compose-all ( seq -- quot )
|
: compose-all ( seq -- quot )
|
||||||
[ ] [ compose ] reduce ;
|
[ ] [ compose ] reduce ;
|
||||||
|
|
||||||
EBNF: parse-brainfuck
|
EBNF: parse-brainfuck [=[
|
||||||
|
|
||||||
inc-ptr = (">")+ => [[ length '[ _ (>) ] ]]
|
inc-ptr = (">")+ => [[ length '[ _ (>) ] ]]
|
||||||
dec-ptr = ("<")+ => [[ length '[ _ (<) ] ]]
|
dec-ptr = ("<")+ => [[ length '[ _ (<) ] ]]
|
||||||
|
@ -66,7 +66,7 @@ loop = "[" {loop|ops}+ "]" => [[ second compose-all '[ [ (?) ] _ while ] ]]
|
||||||
|
|
||||||
code = (loop|ops|unknown)* => [[ compose-all ]]
|
code = (loop|ops|unknown)* => [[ compose-all ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
PRIVATE>
|
PRIVATE>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
USING: accessors arrays assocs combinators fry io
|
USING: accessors arrays assocs combinators fry io
|
||||||
io.encodings.binary io.files io.pathnames kernel lexer make math
|
io.encodings.binary io.files io.pathnames kernel lexer make math
|
||||||
math.parser namespaces parser peg peg.ebnf peg.parsers
|
math.parser namespaces parser peg peg.ebnf peg.parsers
|
||||||
quotations sequences sequences.deep words ;
|
quotations sequences sequences.deep words multiline ;
|
||||||
IN: cpu.8080.emulator
|
IN: cpu.8080.emulator
|
||||||
|
|
||||||
TUPLE: cpu b c d e f h l a pc sp halted? last-interrupt cycles
|
TUPLE: cpu b c d e f h l a pc sp halted? last-interrupt cycles
|
||||||
|
@ -761,15 +761,15 @@ SYMBOLS: $1 $2 $3 $4 ;
|
||||||
! is the getter word for that register with stack effect
|
! is the getter word for that register with stack effect
|
||||||
! ( cpu -- value ). The second item is the setter word with
|
! ( cpu -- value ). The second item is the setter word with
|
||||||
! stack effect ( value cpu -- ).
|
! stack effect ( value cpu -- ).
|
||||||
<EBNF
|
EBNF[=[
|
||||||
main=("A" | "B" | "C" | "D" | "E" | "H" | "L") => [[ register-lookup ]]
|
main=("A" | "B" | "C" | "D" | "E" | "H" | "L") => [[ register-lookup ]]
|
||||||
EBNF> ;
|
]=] ;
|
||||||
|
|
||||||
: all-flags ( -- parser )
|
: all-flags ( -- parser )
|
||||||
! A parser for 16-bit flags.
|
! A parser for 16-bit flags.
|
||||||
<EBNF
|
EBNF[=[
|
||||||
main=("NZ" | "NC" | "PO" | "PE" | "Z" | "C" | "P" | "M") => [[ flag-lookup ]]
|
main=("NZ" | "NC" | "PO" | "PE" | "Z" | "C" | "P" | "M") => [[ flag-lookup ]]
|
||||||
EBNF> ;
|
]=] ;
|
||||||
|
|
||||||
: 16-bit-registers ( -- parser )
|
: 16-bit-registers ( -- parser )
|
||||||
! A parser for 16-bit registers. On a successfull parse the
|
! A parser for 16-bit registers. On a successfull parse the
|
||||||
|
@ -777,9 +777,9 @@ SYMBOLS: $1 $2 $3 $4 ;
|
||||||
! is the getter word for that register with stack effect
|
! is the getter word for that register with stack effect
|
||||||
! ( cpu -- value ). The second item is the setter word with
|
! ( cpu -- value ). The second item is the setter word with
|
||||||
! stack effect ( value cpu -- ).
|
! stack effect ( value cpu -- ).
|
||||||
<EBNF
|
EBNF[=[
|
||||||
main=("AF" | "BC" | "DE" | "HL" | "SP") => [[ register-lookup ]]
|
main=("AF" | "BC" | "DE" | "HL" | "SP") => [[ register-lookup ]]
|
||||||
EBNF> ;
|
]=] ;
|
||||||
|
|
||||||
: all-registers ( -- parser )
|
: all-registers ( -- parser )
|
||||||
! Return a parser that can parse the format
|
! Return a parser that can parse the format
|
||||||
|
|
|
@ -52,7 +52,7 @@ ERROR: bad-vector-length seq n ;
|
||||||
[ bad-vector-length ]
|
[ bad-vector-length ]
|
||||||
} case ;
|
} case ;
|
||||||
|
|
||||||
EBNF: parse-gml
|
EBNF: parse-gml [=[
|
||||||
|
|
||||||
Letter = [a-zA-Z]
|
Letter = [a-zA-Z]
|
||||||
Digit = [0-9]
|
Digit = [0-9]
|
||||||
|
@ -124,4 +124,4 @@ Tokens = Token* => [[ [ comment? ] reject ]]
|
||||||
|
|
||||||
Program = Tokens Spaces !(.) => [[ parse-proc ]]
|
Program = Tokens Spaces !(.) => [[ parse-proc ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
! Copyright (C) 2009 Philipp Brüschweiler
|
! Copyright (C) 2009 Philipp Brüschweiler
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: infix.ast infix.tokenizer kernel math peg.ebnf sequences
|
USING: infix.ast infix.tokenizer kernel math peg.ebnf sequences
|
||||||
strings vectors ;
|
strings vectors multiline ;
|
||||||
IN: infix.parser
|
IN: infix.parser
|
||||||
|
|
||||||
EBNF: parse-infix
|
EBNF: parse-infix [=[
|
||||||
Number = . ?[ ast-value? ]?
|
Number = . ?[ ast-value? ]?
|
||||||
Identifier = . ?[ string? ]?
|
Identifier = . ?[ string? ]?
|
||||||
Array = Identifier:i "[" Sum:s "]" => [[ i s ast-array boa ]]
|
Array = Identifier:i "[" Sum:s "]" => [[ i s ast-array boa ]]
|
||||||
|
@ -29,7 +29,7 @@ Sum = Sum:s ("+"|"-"):op Product:p => [[ s p op ast-op boa ]]
|
||||||
|
|
||||||
End = !(.)
|
End = !(.)
|
||||||
Expression = Sum End
|
Expression = Sum End
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
: build-infix-ast ( string -- ast )
|
: build-infix-ast ( string -- ast )
|
||||||
tokenize-infix parse-infix ;
|
tokenize-infix parse-infix ;
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
! Copyright (C) 2009 Philipp Brüschweiler
|
! Copyright (C) 2009 Philipp Brüschweiler
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: infix.ast kernel peg peg.ebnf math.parser sequences
|
USING: infix.ast kernel peg peg.ebnf math.parser sequences
|
||||||
strings ;
|
strings multiline ;
|
||||||
IN: infix.tokenizer
|
IN: infix.tokenizer
|
||||||
|
|
||||||
EBNF: tokenize-infix
|
EBNF: tokenize-infix [=[
|
||||||
Letter = [a-zA-Z]
|
Letter = [a-zA-Z]
|
||||||
Digit = [0-9]
|
Digit = [0-9]
|
||||||
Digits = Digit+
|
Digits = Digit+
|
||||||
|
@ -22,4 +22,4 @@ Special = [+*/%(),] | "-" => [[ CHAR: - ]]
|
||||||
Tok = Spaces (Name | Number | String | Special )
|
Tok = Spaces (Name | Number | String | Special )
|
||||||
End = !(.)
|
End = !(.)
|
||||||
Toks = Tok* Spaces End
|
Toks = Tok* Spaces End
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
USING: accessors arrays combinators io kernel math.parser peg prettyprint
|
USING: accessors arrays combinators io kernel math.parser peg prettyprint
|
||||||
sequences strings unicode peg.ebnf ;
|
sequences strings unicode peg.ebnf multiline ;
|
||||||
IN: llvm.examples.kaleidoscope
|
IN: llvm.examples.kaleidoscope
|
||||||
|
|
||||||
TUPLE: ast-binop lhs rhs operator ;
|
TUPLE: ast-binop lhs rhs operator ;
|
||||||
|
@ -10,7 +10,7 @@ TUPLE: ast-unop expr ;
|
||||||
TUPLE: ast-call name args ;
|
TUPLE: ast-call name args ;
|
||||||
TUPLE: ast-if condition true false ;
|
TUPLE: ast-if condition true false ;
|
||||||
|
|
||||||
EBNF: tokenize-kaleidoscope
|
EBNF: tokenize-kaleidoscope [=[
|
||||||
Letter = [a-zA-Z]
|
Letter = [a-zA-Z]
|
||||||
Digit = [0-9]
|
Digit = [0-9]
|
||||||
Digits = Digit+
|
Digits = Digit+
|
||||||
|
@ -27,9 +27,9 @@ Special = "(" | ")" | "*" | "+" | "/" | "-" | "<" | ">" | ","
|
||||||
Keyword = ("def" | "extern" | "if" | "then" | "else") !(NameRest)
|
Keyword = ("def" | "extern" | "if" | "then" | "else") !(NameRest)
|
||||||
Tok = Spaces (Keyword | Name | Number | Special)
|
Tok = Spaces (Keyword | Name | Number | Special)
|
||||||
Toks = Tok* Spaces
|
Toks = Tok* Spaces
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
EBNF: parse-kaleidoscope
|
EBNF: parse-kaleidoscope [=[
|
||||||
tokenizer = <foreign tokenize-kaleidoscope Tok>
|
tokenizer = <foreign tokenize-kaleidoscope Tok>
|
||||||
Name = . ?[ ast-name? ]? => [[ value>> ]]
|
Name = . ?[ ast-name? ]? => [[ value>> ]]
|
||||||
Number = . ?[ ast-number? ]? => [[ value>> ]]
|
Number = . ?[ ast-number? ]? => [[ value>> ]]
|
||||||
|
@ -55,4 +55,4 @@ SrcElem = "def" Name:n "(" Name*:fs ")" CondExpr:expr => [[ n fs expr
|
||||||
| RelExpr
|
| RelExpr
|
||||||
SrcElems = SrcElem*
|
SrcElems = SrcElem*
|
||||||
TopLevel = SrcElems
|
TopLevel = SrcElems
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
! Copyright (C) 2008 Chris Double.
|
! Copyright (C) 2008 Chris Double.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: kernel arrays strings math.parser sequences
|
USING: kernel arrays strings math.parser sequences
|
||||||
peg peg.ebnf peg.parsers memoize math accessors ;
|
peg peg.ebnf peg.parsers memoize math accessors
|
||||||
|
multiline ;
|
||||||
IN: peg.expr
|
IN: peg.expr
|
||||||
|
|
||||||
EBNF: expr
|
EBNF: expr [=[
|
||||||
number = ([0-9])+ => [[ string>number ]]
|
number = ([0-9])+ => [[ string>number ]]
|
||||||
value = number
|
value = number
|
||||||
| ("(" exp ")") => [[ second ]]
|
| ("(" exp ")") => [[ second ]]
|
||||||
|
@ -16,4 +17,4 @@ fac = fac "*" value => [[ first3 nip * ]]
|
||||||
exp = exp "+" fac => [[ first3 nip + ]]
|
exp = exp "+" fac => [[ first3 nip + ]]
|
||||||
| exp "-" fac => [[ first3 nip - ]]
|
| exp "-" fac => [[ first3 nip - ]]
|
||||||
| fac
|
| fac
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
! Copyright (C) 2008 Chris Double.
|
! Copyright (C) 2008 Chris Double.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: kernel accessors sequences
|
USING: kernel accessors sequences multiline
|
||||||
peg peg.ebnf peg.javascript.ast peg.javascript.tokenizer ;
|
peg peg.ebnf peg.javascript.ast peg.javascript.tokenizer ;
|
||||||
IN: peg.javascript.parser
|
IN: peg.javascript.parser
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ IN: peg.javascript.parser
|
||||||
! This operates a character at a time. Using this 'nl' in the parser
|
! This operates a character at a time. Using this 'nl' in the parser
|
||||||
! allows us to detect newlines when we need to for the semicolon
|
! allows us to detect newlines when we need to for the semicolon
|
||||||
! insertion rule, but ignore it in all other places.
|
! insertion rule, but ignore it in all other places.
|
||||||
EBNF: javascript
|
EBNF: javascript [=[
|
||||||
tokenizer = default
|
tokenizer = default
|
||||||
nl = "\r\n" | "\n"
|
nl = "\r\n" | "\n"
|
||||||
|
|
||||||
|
@ -197,4 +197,4 @@ SrcElem = "function" Name:n FuncRest:f => [[ n f a
|
||||||
| Stmt
|
| Stmt
|
||||||
SrcElems = SrcElem* => [[ ast-begin boa ]]
|
SrcElems = SrcElem* => [[ ast-begin boa ]]
|
||||||
TopLevel = SrcElems Spaces
|
TopLevel = SrcElems Spaces
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
! Copyright (C) 2008 Chris Double.
|
! Copyright (C) 2008 Chris Double.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: kernel sequences strings arrays math.parser peg peg.ebnf peg.javascript.ast ;
|
USING: kernel sequences strings arrays math.parser peg peg.ebnf
|
||||||
|
peg.javascript.ast multiline ;
|
||||||
IN: peg.javascript.tokenizer
|
IN: peg.javascript.tokenizer
|
||||||
|
|
||||||
! Grammar for JavaScript. Based on OMeta-JS example from:
|
! Grammar for JavaScript. Based on OMeta-JS example from:
|
||||||
|
@ -8,7 +9,7 @@ IN: peg.javascript.tokenizer
|
||||||
|
|
||||||
USE: prettyprint
|
USE: prettyprint
|
||||||
|
|
||||||
EBNF: tokenize-javascript
|
EBNF: tokenize-javascript [=[
|
||||||
Letter = [a-zA-Z]
|
Letter = [a-zA-Z]
|
||||||
Digit = [0-9]
|
Digit = [0-9]
|
||||||
Digits = Digit+
|
Digits = Digit+
|
||||||
|
@ -87,4 +88,4 @@ Special = "(" | ")" | "{" | "}" | "[" | "]" | "," |
|
||||||
| "^"
|
| "^"
|
||||||
Tok = Spaces (Name | Keyword | Number | Str | RegExp | Special )
|
Tok = Spaces (Name | Keyword | Number | Str | RegExp | Special )
|
||||||
Toks = Tok* Spaces
|
Toks = Tok* Spaces
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
! Copyright (C) 2007 Chris Double.
|
! Copyright (C) 2007 Chris Double.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: kernel arrays strings math.parser sequences
|
USING: kernel arrays strings math.parser sequences
|
||||||
peg peg.ebnf peg.parsers memoize namespaces math ;
|
peg peg.ebnf peg.parsers memoize namespaces math
|
||||||
|
multiline ;
|
||||||
IN: peg.pl0
|
IN: peg.pl0
|
||||||
|
|
||||||
! Grammar for PL/0 based on http://en.wikipedia.org/wiki/PL/0
|
! Grammar for PL/0 based on http://en.wikipedia.org/wiki/PL/0
|
||||||
|
|
||||||
EBNF: pl0
|
EBNF: pl0 [=[
|
||||||
|
|
||||||
block = { "CONST" ident "=" number { "," ident "=" number }* ";" }?
|
block = { "CONST" ident "=" number { "," ident "=" number }* ";" }?
|
||||||
{ "VAR" ident { "," ident }* ";" }?
|
{ "VAR" ident { "," ident }* ";" }?
|
||||||
|
@ -24,4 +25,4 @@ factor = ident | number | "(" expression ")"
|
||||||
ident = (([a-zA-Z])+) => [[ >string ]]
|
ident = (([a-zA-Z])+) => [[ >string ]]
|
||||||
number = ([0-9])+ => [[ string>number ]]
|
number = ([0-9])+ => [[ string>number ]]
|
||||||
program = { block "." }
|
program = { block "." }
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
USING: accessors kernel locals math math.parser peg.ebnf ;
|
USING: accessors kernel locals math math.parser multiline peg.ebnf ;
|
||||||
IN: rosetta-code.arithmetic-evaluation
|
IN: rosetta-code.arithmetic-evaluation
|
||||||
|
|
||||||
! http://rosettacode.org/wiki/Arithmetic_evaluation
|
! http://rosettacode.org/wiki/Arithmetic_evaluation
|
||||||
|
@ -34,7 +34,7 @@ TUPLE: sub < operator ; C: <sub> sub
|
||||||
TUPLE: mul < operator ; C: <mul> mul
|
TUPLE: mul < operator ; C: <mul> mul
|
||||||
TUPLE: div < operator ; C: <div> div
|
TUPLE: div < operator ; C: <div> div
|
||||||
|
|
||||||
EBNF: expr-ast
|
EBNF: expr-ast [=[
|
||||||
spaces = [\n\t ]*
|
spaces = [\n\t ]*
|
||||||
digit = [0-9]
|
digit = [0-9]
|
||||||
number = (digit)+ => [[ string>number ]]
|
number = (digit)+ => [[ string>number ]]
|
||||||
|
@ -51,7 +51,7 @@ exp = exp:a spaces "+" fac:b => [[ a b <add> ]]
|
||||||
| fac
|
| fac
|
||||||
|
|
||||||
main = exp:e spaces !(.) => [[ e ]]
|
main = exp:e spaces !(.) => [[ e ]]
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
GENERIC: eval-ast ( ast -- result )
|
GENERIC: eval-ast ( ast -- result )
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
USING: accessors kernel peg peg.ebnf sequences sequences.deep
|
USING: accessors kernel multiline peg peg.ebnf sequences
|
||||||
strings ;
|
sequences.deep strings ;
|
||||||
|
|
||||||
IN: shell.parser
|
IN: shell.parser
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ TUPLE: factor-expr expr ;
|
||||||
: ast>factor-expr ( ast -- obj )
|
: ast>factor-expr ( ast -- obj )
|
||||||
second >string factor-expr boa ;
|
second >string factor-expr boa ;
|
||||||
|
|
||||||
EBNF: expr
|
EBNF: expr [=[
|
||||||
|
|
||||||
space = " "
|
space = " "
|
||||||
|
|
||||||
|
@ -89,4 +89,4 @@ pipeline = _ command _ (in-file)? _ "|" _ (command _ "|" _)* command _ (to-file
|
||||||
|
|
||||||
submission = (pipeline | basic)
|
submission = (pipeline | basic)
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -1,17 +1,16 @@
|
||||||
|
USING: smalltalk.parser smalltalk.ast peg.ebnf tools.test accessors
|
||||||
|
io.files io.encodings.ascii kernel multiline ;
|
||||||
IN: smalltalk.parser.tests
|
IN: smalltalk.parser.tests
|
||||||
USING: smalltalk.parser smalltalk.ast
|
|
||||||
peg.ebnf tools.test accessors
|
|
||||||
io.files io.encodings.ascii kernel ;
|
|
||||||
|
|
||||||
EBNF: test-Character
|
EBNF: test-Character [=[
|
||||||
test = <foreign parse-smalltalk Character>
|
test = <foreign parse-smalltalk Character>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ CHAR: a } [ "a" test-Character ] unit-test
|
{ CHAR: a } [ "a" test-Character ] unit-test
|
||||||
|
|
||||||
EBNF: test-Comment
|
EBNF: test-Comment [=[
|
||||||
test = <foreign parse-smalltalk Comment>
|
test = <foreign parse-smalltalk Comment>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ T{ ast-comment f "Hello, this is a comment." } }
|
{ T{ ast-comment f "Hello, this is a comment." } }
|
||||||
[ "\"Hello, this is a comment.\"" test-Comment ]
|
[ "\"Hello, this is a comment.\"" test-Comment ]
|
||||||
|
@ -21,15 +20,15 @@ unit-test
|
||||||
[ "\"Hello, \"\"this\"\" is a comment.\"" test-Comment ]
|
[ "\"Hello, \"\"this\"\" is a comment.\"" test-Comment ]
|
||||||
unit-test
|
unit-test
|
||||||
|
|
||||||
EBNF: test-Identifier
|
EBNF: test-Identifier [=[
|
||||||
test = <foreign parse-smalltalk Identifier>
|
test = <foreign parse-smalltalk Identifier>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ "OrderedCollection" } [ "OrderedCollection" test-Identifier ] unit-test
|
{ "OrderedCollection" } [ "OrderedCollection" test-Identifier ] unit-test
|
||||||
|
|
||||||
EBNF: test-Literal
|
EBNF: test-Literal [=[
|
||||||
test = <foreign parse-smalltalk Literal>
|
test = <foreign parse-smalltalk Literal>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ nil } [ "nil" test-Literal ] unit-test
|
{ nil } [ "nil" test-Literal ] unit-test
|
||||||
{ 123 } [ "123" test-Literal ] unit-test
|
{ 123 } [ "123" test-Literal ] unit-test
|
||||||
|
@ -76,22 +75,22 @@ test = <foreign parse-smalltalk Literal>
|
||||||
}
|
}
|
||||||
[ "[5. self]" test-Literal ] unit-test
|
[ "[5. self]" test-Literal ] unit-test
|
||||||
|
|
||||||
EBNF: test-FormalBlockArgumentDeclarationList
|
EBNF: test-FormalBlockArgumentDeclarationList [=[
|
||||||
test = <foreign parse-smalltalk FormalBlockArgumentDeclarationList>
|
test = <foreign parse-smalltalk FormalBlockArgumentDeclarationList>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ V{ "x" "y" "elt" } } [ ":x :y :elt" test-FormalBlockArgumentDeclarationList ] unit-test
|
{ V{ "x" "y" "elt" } } [ ":x :y :elt" test-FormalBlockArgumentDeclarationList ] unit-test
|
||||||
|
|
||||||
EBNF: test-Operand
|
EBNF: test-Operand [=[
|
||||||
test = <foreign parse-smalltalk Operand>
|
test = <foreign parse-smalltalk Operand>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ { 123 15.6 { t f } } } [ "#(123 15.6 (true false))" test-Operand ] unit-test
|
{ { 123 15.6 { t f } } } [ "#(123 15.6 (true false))" test-Operand ] unit-test
|
||||||
{ T{ ast-name f "x" } } [ "x" test-Operand ] unit-test
|
{ T{ ast-name f "x" } } [ "x" test-Operand ] unit-test
|
||||||
|
|
||||||
EBNF: test-Expression
|
EBNF: test-Expression [=[
|
||||||
test = <foreign parse-smalltalk Expression>
|
test = <foreign parse-smalltalk Expression>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ self } [ "self" test-Expression ] unit-test
|
{ self } [ "self" test-Expression ] unit-test
|
||||||
{ { 123 15.6 { t f } } } [ "#(123 15.6 (true false))" test-Expression ] unit-test
|
{ { 123 15.6 { t f } } } [ "#(123 15.6 (true false))" test-Expression ] unit-test
|
||||||
|
@ -227,17 +226,17 @@ test = <foreign parse-smalltalk Expression>
|
||||||
}
|
}
|
||||||
[ "(#(['a']) at: 0) value" test-Expression ] unit-test
|
[ "(#(['a']) at: 0) value" test-Expression ] unit-test
|
||||||
|
|
||||||
EBNF: test-FinalStatement
|
EBNF: test-FinalStatement [=[
|
||||||
test = <foreign parse-smalltalk FinalStatement>
|
test = <foreign parse-smalltalk FinalStatement>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ T{ ast-name f "value" } } [ "value" test-FinalStatement ] unit-test
|
{ T{ ast-name f "value" } } [ "value" test-FinalStatement ] unit-test
|
||||||
{ T{ ast-return f T{ ast-name f "value" } } } [ "^value" test-FinalStatement ] unit-test
|
{ T{ ast-return f T{ ast-name f "value" } } } [ "^value" test-FinalStatement ] unit-test
|
||||||
{ T{ ast-assignment f T{ ast-name f "value" } 5 } } [ "value:=5" test-FinalStatement ] unit-test
|
{ T{ ast-assignment f T{ ast-name f "value" } 5 } } [ "value:=5" test-FinalStatement ] unit-test
|
||||||
|
|
||||||
EBNF: test-LocalVariableDeclarationList
|
EBNF: test-LocalVariableDeclarationList [=[
|
||||||
test = <foreign parse-smalltalk LocalVariableDeclarationList>
|
test = <foreign parse-smalltalk LocalVariableDeclarationList>
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
{ T{ ast-local-variables f { "i" "j" } } } [ " | i j |" test-LocalVariableDeclarationList ] unit-test
|
{ T{ ast-local-variables f { "i" "j" } } } [ " | i j |" test-LocalVariableDeclarationList ] unit-test
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
! Copyright (C) 2009 Slava Pestov.
|
! Copyright (C) 2009 Slava Pestov.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: peg peg.ebnf smalltalk.ast sequences sequences.deep strings
|
USING: peg peg.ebnf smalltalk.ast sequences sequences.deep strings
|
||||||
math.parser kernel arrays byte-arrays math assocs accessors ;
|
math.parser multiline kernel arrays byte-arrays math assocs accessors ;
|
||||||
IN: smalltalk.parser
|
IN: smalltalk.parser
|
||||||
|
|
||||||
! :mode=text:noTabs=true:
|
! :mode=text:noTabs=true:
|
||||||
|
@ -13,7 +13,7 @@ ERROR: bad-number str ;
|
||||||
: check-number ( str -- n )
|
: check-number ( str -- n )
|
||||||
>string dup string>number [ ] [ bad-number ] ?if ;
|
>string dup string>number [ ] [ bad-number ] ?if ;
|
||||||
|
|
||||||
EBNF: parse-smalltalk
|
EBNF: parse-smalltalk [=[
|
||||||
|
|
||||||
Character = .
|
Character = .
|
||||||
WhitespaceCharacter = [ \t\n\r]
|
WhitespaceCharacter = [ \t\n\r]
|
||||||
|
@ -225,4 +225,4 @@ End = !(.)
|
||||||
|
|
||||||
Program = TopLevelForm End
|
Program = TopLevelForm End
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
USING: accessors arrays assocs fry kernel math
|
USING: accessors arrays assocs fry kernel math
|
||||||
math.affine-transforms math.functions math.parser math.trig
|
math.affine-transforms math.functions math.parser math.trig
|
||||||
peg.ebnf sequences sequences.squish splitting strings xml.data
|
peg.ebnf sequences sequences.squish splitting strings xml.data
|
||||||
xml.syntax ;
|
xml.syntax multiline ;
|
||||||
|
|
||||||
IN: svg
|
IN: svg
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ XML-NS: inkscape-name http://www.inkscape.org/namespaces/inkscape
|
||||||
[ string>number ] [ [ string>number 10^ ] [ 1 ] if* ] bi* *
|
[ string>number ] [ [ string>number 10^ ] [ 1 ] if* ] bi* *
|
||||||
>float ;
|
>float ;
|
||||||
|
|
||||||
EBNF: svg-transform>affine-transform
|
EBNF: svg-transform>affine-transform [=[
|
||||||
|
|
||||||
transforms =
|
transforms =
|
||||||
transform:m comma-wsp+ transforms:n => [[ m n a. ]]
|
transform:m comma-wsp+ transforms:n => [[ m n a. ]]
|
||||||
|
@ -77,7 +77,7 @@ wsp = [ \t\r\n]
|
||||||
transform-list = wsp* transforms?:t wsp*
|
transform-list = wsp* transforms?:t wsp*
|
||||||
=> [[ t [ identity-transform ] unless* ]]
|
=> [[ t [ identity-transform ] unless* ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
: tag-transform ( tag -- transform )
|
: tag-transform ( tag -- transform )
|
||||||
"transform" svg-name attr svg-transform>affine-transform ;
|
"transform" svg-name attr svg-transform>affine-transform ;
|
||||||
|
@ -96,7 +96,7 @@ TUPLE: elliptical-arc radii x-axis-rotation large-arc? sweep? p relative? ;
|
||||||
: (set-relative) ( args rel -- args )
|
: (set-relative) ( args rel -- args )
|
||||||
'[ [ _ >>relative? drop ] each ] keep ;
|
'[ [ _ >>relative? drop ] each ] keep ;
|
||||||
|
|
||||||
EBNF: svg-path>array
|
EBNF: svg-path>array [=[
|
||||||
|
|
||||||
moveto-drawto-command-groups =
|
moveto-drawto-command-groups =
|
||||||
moveto-drawto-command-group:first wsp* moveto-drawto-command-groups:rest
|
moveto-drawto-command-group:first wsp* moveto-drawto-command-groups:rest
|
||||||
|
@ -219,7 +219,7 @@ wsp = [ \t\r\n]
|
||||||
|
|
||||||
svg-path = wsp* moveto-drawto-command-groups?:x wsp* => [[ x ]]
|
svg-path = wsp* moveto-drawto-command-groups?:x wsp* => [[ x ]]
|
||||||
|
|
||||||
;EBNF
|
]=]
|
||||||
|
|
||||||
: tag-d ( tag -- d )
|
: tag-d ( tag -- d )
|
||||||
"d" svg-name attr svg-path>array ;
|
"d" svg-name attr svg-path>array ;
|
||||||
|
|
Loading…
Reference in New Issue