Adding escaping to CHAR: " -> CHAR: \"

It is not necessary to escape the " character, but it doesn't hurt and
is necessary for syntax-highlighting (FUEL) to work correctly.
char-rename
Björn Lindqvist 2016-09-04 06:40:43 +02:00
parent a36ac6e435
commit 20711a0fd3
25 changed files with 43 additions and 43 deletions

View File

@ -20,7 +20,7 @@ DEFER: quoted-field,
2over stream-read1 swap over =
[ nip ] [
{
{ CHAR: " [ [ CHAR: " , ] when quoted-field, ] }
{ CHAR: \" [ [ CHAR: \" , ] when quoted-field, ] }
{ CHAR: \n [ ] } ! Error: cr inside string?
{ CHAR: \r [ ] } ! Error: lf inside string?
[ [ , drop f maybe-escaped-quote ] when* ]
@ -45,7 +45,7 @@ DEFER: quoted-field,
swap ?trim [ drop ] 2dip ; inline
: field ( delimiter stream field-seps quote-seps -- sep/f field )
pick stream-read-until dup CHAR: " = [
pick stream-read-until dup CHAR: \" = [
drop [ drop quoted-field ] [ continue-field ] if-empty
] [ [ 3drop ] 2dip swap ?trim ] if ;
@ -89,10 +89,10 @@ PRIVATE>
'[ dup "\n\"\r" member? [ drop t ] [ _ = ] if ] any? ; inline
: escape-quotes ( cell stream -- )
CHAR: " over stream-write1 swap [
CHAR: \" over stream-write1 swap [
[ over stream-write1 ]
[ dup CHAR: " = [ over stream-write1 ] [ drop ] if ] bi
] each CHAR: " swap stream-write1 ;
[ dup CHAR: \" = [ over stream-write1 ] [ drop ] if ] bi
] each CHAR: \" swap stream-write1 ;
: escape-if-required ( cell delimiter stream -- )
[ dupd needs-escaping? ] dip

View File

@ -15,7 +15,7 @@ IN: help.html
: escape-char ( ch -- )
dup ascii? [
dup H{
{ CHAR: " "__quo__" }
{ CHAR: \" "__quo__" }
{ CHAR: * "__star__" }
{ CHAR: : "__colon__" }
{ CHAR: < "__lt__" }

View File

@ -16,7 +16,7 @@ TUPLE: template-lexer < lexer ;
M: template-lexer skip-word
[
{
{ [ 2dup nth CHAR: " = ] [ drop 1 + ] }
{ [ 2dup nth CHAR: \" = ] [ drop 1 + ] }
{ [ 2dup swap tail-slice "%>" head? ] [ drop 2 + ] }
[ f skip ]
} cond

View File

@ -97,7 +97,7 @@ PEG: parse-response-line ( string -- triple )
[ " \t" member? ] satisfy repeat1 ;
: qdtext-parser ( -- parser )
{ [ CHAR: " = ] [ control? ] } except-these ;
{ [ CHAR: \" = ] [ control? ] } except-these ;
: quoted-char-parser ( -- parser )
"\\" token hide any-char 2seq ;

View File

@ -64,10 +64,10 @@ TUPLE: CreateProcess-args
[
{ [ drop CHAR: \ = ] [ nip "\\\"" member? ] } 2&&
] monotonic-split [
dup last CHAR: " = [
dup last CHAR: \" = [
dup length 1 > [
! String of backslashes + double-quote
length 1 - 2 * CHAR: \ <repetition> "\\\"" append
length 1 - 2 * CHAR: \\ <repetition> "\\\"" append
] [
! Single double-quote
drop "\\\""

View File

@ -44,7 +44,7 @@ DEFER: (read-json-string)
: (read-json-escape) ( stream accum -- accum )
{ sbuf } declare
over stream-read1 {
{ CHAR: " [ CHAR: " ] }
{ CHAR: \" [ CHAR: \" ] }
{ CHAR: \\ [ CHAR: \\ ] }
{ CHAR: / [ CHAR: / ] }
{ CHAR: b [ CHAR: \b ] }

View File

@ -59,9 +59,9 @@ M: json-null stream-json-print
PRIVATE>
M: string stream-json-print
CHAR: " over stream-write1 swap [
CHAR: \" over stream-write1 swap [
{
{ CHAR: " [ "\\\"" over stream-write ] }
{ CHAR: \" [ "\\\"" over stream-write ] }
{ CHAR: \\ [ "\\\\" over stream-write ] }
{ CHAR: / [
json-escape-slashes? get
@ -87,7 +87,7 @@ M: string stream-json-print
] if
]
} case
] each CHAR: " swap stream-write1 ;
] each CHAR: \" swap stream-write1 ;
M: integer stream-json-print
[ number>string ] [ stream-write ] bi* ;

View File

@ -114,7 +114,7 @@ C: <ebnf> ebnf
[
[ CHAR: \ = ] satisfy
[ "\"\\" member? ] satisfy 2seq ,
[ CHAR: " = not ] satisfy ,
[ CHAR: \" = not ] satisfy ,
] choice* repeat1 "\"" "\"" surrounded-by ,
[ CHAR: ' = not ] satisfy repeat1 "'" "'" surrounded-by ,
] choice* [ "" flatten-as unescape-string ] action ;

View File

@ -74,9 +74,9 @@ PRIVATE>
: string-parser ( -- parser )
[
[ CHAR: " = ] satisfy hide ,
[ CHAR: " = not ] satisfy repeat0 ,
[ CHAR: " = ] satisfy hide ,
[ CHAR: \" = ] satisfy hide ,
[ CHAR: \" = not ] satisfy repeat0 ,
[ CHAR: \" = ] satisfy hide ,
] seq* [ first >string ] action ;
: (range-pattern) ( pattern -- string )

View File

@ -24,7 +24,7 @@ UNION: dtd-acceptable
[
take-word pass-blank get-char {
{ CHAR: ' [ parse-quote ] }
{ CHAR: " [ parse-quote ] }
{ CHAR: \" [ parse-quote ] }
[ drop take-external-id close ]
} case
] dip '[ swap _ [ ?set-at ] change ] 2keep ;

View File

@ -15,7 +15,7 @@ CONSTANT: quoted-entities-out
H{
{ CHAR: & "&amp;" }
{ CHAR: ' "&apos;" }
{ CHAR: " "&quot;" }
{ CHAR: \" "&quot;" }
{ CHAR: < "&lt;" }
}
@ -35,7 +35,7 @@ CONSTANT: entities
{ "gt" CHAR: > }
{ "amp" CHAR: & }
{ "apos" CHAR: ' }
{ "quot" CHAR: " }
{ "quot" CHAR: \" }
}
: with-entities ( entities quot -- )

View File

@ -48,7 +48,7 @@ PRIVATE>
<PRIVATE
: write-quoted ( string -- )
CHAR: " write1 write CHAR: " write1 ;
CHAR: \" write1 write CHAR: \" write1 ;
: print-attrs ( assoc -- )
[

View File

@ -85,7 +85,7 @@ GENERIC: skip-word ( lexer -- )
M: lexer skip-word
[
2dup nth CHAR: " eq? [ drop 1 + ] [ f skip ] if
2dup nth CHAR: \" eq? [ drop 1 + ] [ f skip ] if
] change-lexer-column ;
: still-parsing? ( lexer -- ? )

View File

@ -51,32 +51,32 @@ IN: c.lexer.tests
{ f }
[
"\"abc\" asdf" <sequence-parser>
[ CHAR: \ CHAR: " take-quoted-string drop ] [ "asdf" take-sequence ] bi
[ CHAR: \ CHAR: \" take-quoted-string drop ] [ "asdf" take-sequence ] bi
] unit-test
{ "abc\\\"def" }
[
"\"abc\\\"def\" asdf" <sequence-parser>
CHAR: \ CHAR: " take-quoted-string
CHAR: \ CHAR: \" take-quoted-string
] unit-test
{ "asdf" }
[
"\"abc\" asdf" <sequence-parser>
[ CHAR: \ CHAR: " take-quoted-string drop ]
[ CHAR: \ CHAR: \" take-quoted-string drop ]
[ skip-whitespace "asdf" take-sequence ] bi
] unit-test
{ f }
[
"\"abc asdf" <sequence-parser>
CHAR: \ CHAR: " take-quoted-string
CHAR: \ CHAR: \" take-quoted-string
] unit-test
{ "\"abc" }
[
"\"abc asdf" <sequence-parser>
[ CHAR: \ CHAR: " take-quoted-string drop ]
[ CHAR: \ CHAR: \" take-quoted-string drop ]
[ "\"abc" take-sequence ] bi
] unit-test
@ -87,7 +87,7 @@ IN: c.lexer.tests
[ "" <sequence-parser> take-token ] unit-test
{ "abcd e \\\"f g" }
[ "\"abcd e \\\"f g\"" <sequence-parser> CHAR: \ CHAR: " take-token* ] unit-test
[ "\"abcd e \\\"f g\"" <sequence-parser> CHAR: \ CHAR: \" take-token* ] unit-test
{ "123" }
[ "123jjj" <sequence-parser> take-c-integer ] unit-test

View File

@ -69,7 +69,7 @@ IN: c.lexer
} case ;
: take-token ( sequence-parser -- string/f )
CHAR: \ CHAR: " take-token* ;
CHAR: \ CHAR: \" take-token* ;
: c-identifier-begin? ( ch -- ? )
CHAR: a CHAR: z [a,b]

View File

@ -74,7 +74,7 @@ ERROR: header-file-missing path ;
: handle-include ( preprocessor-state sequence-parser -- )
skip-whitespace/comments advance dup previous {
{ CHAR: < [ CHAR: > take-until-object read-standard-include ] }
{ CHAR: " [ CHAR: " take-until-object read-local-include ] }
{ CHAR: \" [ CHAR: \" take-until-object read-local-include ] }
[ bad-include-line ]
} case ;

View File

@ -58,7 +58,7 @@ ERROR: unknown-syntax syntax ;
dup [ CHAR: ; = ] find drop [ head ] when* ;
: trim-quotes ( str -- str' )
[ CHAR: " = ] trim ;
[ CHAR: \" = ] trim ;
: last-track ( cuesheet -- cuesheet track )
dup files>> last tracks>> last ;

View File

@ -32,7 +32,7 @@ TUPLE: ast-hashtable elements ;
[
{
[ blank? not ]
[ CHAR: " = not ]
[ CHAR: \" = not ]
[ CHAR: ; = not ]
[ LETTER? not ]
[ letter? not ]

View File

@ -71,7 +71,7 @@ CONSTANT: CHARS H{
{ CHAR: 9 CHAR: 6 }
{ CHAR: & 0x214B }
{ CHAR: ! 0x00A1 }
{ CHAR: " 0x201E }
{ CHAR: \" 0x201E }
{ CHAR: . 0x02D9 }
{ CHAR: ; 0x061B }
{ CHAR: [ CHAR: ] }

View File

@ -43,7 +43,7 @@ SYMBOL: tagstack
CHAR: ' (read-quote) ;
: read-double-quote ( sequence-parser -- string )
CHAR: " (read-quote) ;
CHAR: \" (read-quote) ;
: read-quote ( sequence-parser -- string )
dup get+increment CHAR: ' =

View File

@ -19,7 +19,7 @@ IN: ini-file
{ CHAR: t CHAR: \t }
{ CHAR: v CHAR: \v }
{ CHAR: ' CHAR: ' }
{ CHAR: " CHAR: " }
{ CHAR: \" CHAR: \" }
{ CHAR: \\ CHAR: \\ }
{ CHAR: ? CHAR: ? }
{ CHAR: ; CHAR: ; }
@ -50,7 +50,7 @@ USE: xml.entities
{ CHAR: \t "\\t" }
{ 0x0b "\\v" }
{ CHAR: ' "\\'" }
{ CHAR: " "\\\"" }
{ CHAR: \" "\\\"" }
{ CHAR: \\ "\\\\" }
{ CHAR: ? "\\?" }
{ CHAR: ; "\\;" }

View File

@ -71,7 +71,7 @@ CONSTANT: morse-code-table $[
{ CHAR: + ".-.-." }
{ CHAR: - "-....-" }
{ CHAR: _ "..--.-" }
{ CHAR: " ".-..-." }
{ CHAR: \" ".-..-." }
{ CHAR: $ "...-..-" }
{ CHAR: @ ".--.-." }
{ CHAR: \s "/" }

View File

@ -11,9 +11,9 @@ IN: parser-combinators.simple
[ digit? ] satisfy <*> [ string>number ] <@ ;
: string-parser ( -- parser )
[ CHAR: " = ] satisfy
[ CHAR: " = not ] satisfy <*> &>
[ CHAR: " = ] satisfy <& [ >string ] <@ ;
[ CHAR: \" = ] satisfy
[ CHAR: \" = not ] satisfy <*> &>
[ CHAR: \" = ] satisfy <& [ >string ] <@ ;
: bold-parser ( -- parser )
"*" token

View File

@ -55,7 +55,7 @@ SingleEscape = "b" => [[ CHAR: \b ]]
| "t" => [[ CHAR: \t ]]
| "v" => [[ CHAR: \v ]]
| "'" => [[ CHAR: ' ]]
| "\"" => [[ CHAR: " ]]
| "\"" => [[ CHAR: \" ]]
| "\\" => [[ CHAR: \\ ]]
HexDigit = [0-9a-fA-F]
HexEscape = "x" (HexDigit HexDigit):d => [[ d hex> ]]

View File

@ -44,7 +44,7 @@ DEFER: parse-tnetstring
: parse-tnetstring ( data -- remain value )
parse-payload {
{ CHAR: # [ string>number ] }
{ CHAR: " [ ] }
{ CHAR: \" [ ] }
{ CHAR: } [ parse-dict ] }
{ CHAR: ] [ parse-list ] }
{ CHAR: ! [ parse-bool ] }