lexer: each-token and map-tokens did not handle EOF properly
parent
63c7513e2d
commit
a566d8cc6b
|
@ -86,8 +86,7 @@ M: lexer skip-word ( lexer -- )
|
||||||
|
|
||||||
: scan ( -- str/f ) lexer get parse-token ;
|
: scan ( -- str/f ) lexer get parse-token ;
|
||||||
|
|
||||||
PREDICATE: unexpected-eof < unexpected
|
PREDICATE: unexpected-eof < unexpected got>> not ;
|
||||||
got>> not ;
|
|
||||||
|
|
||||||
: unexpected-eof ( word -- * ) f unexpected ;
|
: unexpected-eof ( word -- * ) f unexpected ;
|
||||||
|
|
||||||
|
@ -97,14 +96,15 @@ PREDICATE: unexpected-eof < unexpected
|
||||||
[ unexpected-eof ]
|
[ unexpected-eof ]
|
||||||
if* ;
|
if* ;
|
||||||
|
|
||||||
: (each-token) ( end quot -- pred quot )
|
|
||||||
[ [ [ scan dup ] ] dip [ = not ] curry [ [ f ] if* ] curry compose ] dip ; inline
|
|
||||||
|
|
||||||
: each-token ( ... end quot: ( ... token -- ... ) -- ... )
|
: each-token ( ... end quot: ( ... token -- ... ) -- ... )
|
||||||
(each-token) while drop ; inline
|
[ scan ] 2dip {
|
||||||
|
{ [ 2over = ] [ 3drop ] }
|
||||||
|
{ [ pick not ] [ drop unexpected-eof ] }
|
||||||
|
[ [ nip call ] [ each-token ] 2bi ]
|
||||||
|
} cond ; inline recursive
|
||||||
|
|
||||||
: map-tokens ( ... end quot: ( ... token -- ... elt ) -- ... seq )
|
: map-tokens ( ... end quot: ( ... token -- ... elt ) -- ... seq )
|
||||||
(each-token) produce nip ; inline
|
collector [ each-token ] dip ; inline
|
||||||
|
|
||||||
: parse-tokens ( end -- seq )
|
: parse-tokens ( end -- seq )
|
||||||
[ ] map-tokens ;
|
[ ] map-tokens ;
|
||||||
|
@ -112,6 +112,7 @@ PREDICATE: unexpected-eof < unexpected
|
||||||
TUPLE: lexer-error line column line-text parsing-words error ;
|
TUPLE: lexer-error line column line-text parsing-words error ;
|
||||||
|
|
||||||
M: lexer-error error-file error>> error-file ;
|
M: lexer-error error-file error>> error-file ;
|
||||||
|
|
||||||
M: lexer-error error-line [ error>> error-line ] [ line>> ] bi or ;
|
M: lexer-error error-line [ error>> error-line ] [ line>> ] bi or ;
|
||||||
|
|
||||||
: <lexer-error> ( msg -- error )
|
: <lexer-error> ( msg -- error )
|
||||||
|
|
|
@ -7,7 +7,6 @@ vocabs.parser words.symbol multiline source-files.errors
|
||||||
tools.crossref grouping ;
|
tools.crossref grouping ;
|
||||||
IN: parser.tests
|
IN: parser.tests
|
||||||
|
|
||||||
[
|
|
||||||
[ 1 [ 2 [ 3 ] 4 ] 5 ]
|
[ 1 [ 2 [ 3 ] 4 ] 5 ]
|
||||||
[ "1\n[\n2\n[\n3\n]\n4\n]\n5" eval( -- a b c ) ]
|
[ "1\n[\n2\n[\n3\n]\n4\n]\n5" eval( -- a b c ) ]
|
||||||
unit-test
|
unit-test
|
||||||
|
@ -77,11 +76,11 @@ IN: parser.tests
|
||||||
[ T{ effect f { "a" "b" } { "d" } f } ]
|
[ T{ effect f { "a" "b" } { "d" } f } ]
|
||||||
[ \ effect-parsing-test "declared-effect" word-prop ] unit-test
|
[ \ effect-parsing-test "declared-effect" word-prop ] unit-test
|
||||||
|
|
||||||
|
[ "IN: parser.tests : missing-- ( a b ) ;" eval( -- ) ] must-fail
|
||||||
|
|
||||||
! Funny bug
|
! Funny bug
|
||||||
[ 2 ] [ "IN: parser.tests : \0. ( -- x ) 2 ; \0." eval( -- n ) ] unit-test
|
[ 2 ] [ "IN: parser.tests : \0. ( -- x ) 2 ; \0." eval( -- n ) ] unit-test
|
||||||
|
|
||||||
[ "IN: parser.tests : missing-- ( a b ) ;" eval( -- ) ] must-fail
|
|
||||||
|
|
||||||
! These should throw errors
|
! These should throw errors
|
||||||
[ "HEX: zzz" eval( -- obj ) ] must-fail
|
[ "HEX: zzz" eval( -- obj ) ] must-fail
|
||||||
[ "OCT: 999" eval( -- obj ) ] must-fail
|
[ "OCT: 999" eval( -- obj ) ] must-fail
|
||||||
|
@ -100,6 +99,10 @@ IN: parser.tests
|
||||||
"foo" "parser.tests" lookup eq?
|
"foo" "parser.tests" lookup eq?
|
||||||
] unit-test
|
] unit-test
|
||||||
|
|
||||||
|
! parse-tokens should do the right thing on EOF
|
||||||
|
[ "USING: kernel" eval( -- ) ]
|
||||||
|
[ error>> T{ unexpected { want ";" } } = ] must-fail-with
|
||||||
|
|
||||||
! Test smudging
|
! Test smudging
|
||||||
|
|
||||||
[ 1 ] [
|
[ 1 ] [
|
||||||
|
@ -332,7 +335,6 @@ IN: parser.tests
|
||||||
[
|
[
|
||||||
"IN: parser.tests : foo ( x y -- z) 1 2 ; : bar ( a -- b ) ;" eval( -- )
|
"IN: parser.tests : foo ( x y -- z) 1 2 ; : bar ( a -- b ) ;" eval( -- )
|
||||||
] must-fail
|
] must-fail
|
||||||
] with-file-vocabs
|
|
||||||
|
|
||||||
[ ] [
|
[ ] [
|
||||||
"IN: parser.tests USE: kernel PREDICATE: foo < object ( x -- y ) ;" eval( -- )
|
"IN: parser.tests USE: kernel PREDICATE: foo < object ( x -- y ) ;" eval( -- )
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
! Copyright (C) 2004, 2009 Slava Pestov.
|
! Copyright (C) 2004, 2010 Slava Pestov.
|
||||||
! See http://factorcode.org/license.txt for BSD license.
|
! See http://factorcode.org/license.txt for BSD license.
|
||||||
USING: accessors alien arrays byte-arrays byte-vectors definitions generic
|
USING: accessors alien arrays byte-arrays byte-vectors definitions generic
|
||||||
hashtables kernel math namespaces parser lexer sequences strings
|
hashtables kernel math namespaces parser lexer sequences strings
|
||||||
|
@ -125,7 +125,7 @@ IN: bootstrap.syntax
|
||||||
] define-core-syntax
|
] define-core-syntax
|
||||||
|
|
||||||
"SYMBOLS:" [
|
"SYMBOLS:" [
|
||||||
";" [ create-in dup reset-generic define-symbol ] each-token
|
";" [ create-in [ reset-generic ] [ define-symbol ] bi ] each-token
|
||||||
] define-core-syntax
|
] define-core-syntax
|
||||||
|
|
||||||
"SINGLETONS:" [
|
"SINGLETONS:" [
|
||||||
|
|
Loading…
Reference in New Issue