diff --git a/extra/peg/javascript/tokenizer/tokenizer-tests.factor b/extra/peg/javascript/tokenizer/tokenizer-tests.factor index 1e9df1cf52..5d01d2d154 100644 --- a/extra/peg/javascript/tokenizer/tokenizer-tests.factor +++ b/extra/peg/javascript/tokenizer/tokenizer-tests.factor @@ -31,3 +31,7 @@ IN: peg.javascript.tokenizer.tests { V{ T{ ast-string { value "\b\f\n\r\t\v'\"\\" } } } } [ "\"\\b\\f\\n\\r\\t\\v\\'\\\"\\\\\"" tokenize-javascript ] unit-test + +{ + V{ T{ ast-string { value "abc" } } } +} [ "\"\\x61\\u0062\\u{63}\"" tokenize-javascript ] unit-test diff --git a/extra/peg/javascript/tokenizer/tokenizer.factor b/extra/peg/javascript/tokenizer/tokenizer.factor index 8dcbc70e85..56afa42cad 100644 --- a/extra/peg/javascript/tokenizer/tokenizer.factor +++ b/extra/peg/javascript/tokenizer/tokenizer.factor @@ -48,15 +48,20 @@ Name = !(Keyword) iName => [[ ast-name boa ]] Number = Digits:ws '.' Digits:fs => [[ ws "." fs 3array "" concat-as string>number ast-number boa ]] | Digits => [[ >string string>number ast-number boa ]] -EscapeChar = "\\b" => [[ CHAR: \b ]] - | "\\f" => [[ CHAR: \f ]] - | "\\n" => [[ CHAR: \n ]] - | "\\r" => [[ CHAR: \r ]] - | "\\t" => [[ CHAR: \t ]] - | "\\v" => [[ CHAR: \v ]] - | "\\'" => [[ CHAR: ' ]] - | "\\\"" => [[ CHAR: " ]] - | "\\\\" => [[ CHAR: \\ ]] +SingleEscapeChar = "b" => [[ CHAR: \b ]] + | "f" => [[ CHAR: \f ]] + | "n" => [[ CHAR: \n ]] + | "r" => [[ CHAR: \r ]] + | "t" => [[ CHAR: \t ]] + | "v" => [[ CHAR: \v ]] + | "'" => [[ CHAR: ' ]] + | "\"" => [[ CHAR: " ]] + | "\\" => [[ CHAR: \\ ]] +HexDigit = [0-9a-fA-F] +HexEscapeChar = "x" (HexDigit HexDigit):d => [[ d hex> ]] +UnicodeEscapeChar = "u" (HexDigit HexDigit HexDigit HexDigit):d => [[ d hex> ]] + | "u{" HexDigit+:d "}" => [[ d hex> ]] +EscapeChar = "\\" (SingleEscapeChar | HexEscapeChar | UnicodeEscapeChar):c => [[ c ]] StringChars1 = (EscapeChar | !('"""') .)* => [[ >string ]] StringChars2 = (EscapeChar | !('"') .)* => [[ >string ]] StringChars3 = (EscapeChar | !("'") .)* => [[ >string ]]