From 9bba5d1312bd7a8eb3857f1d5544d020ac6cb1fd Mon Sep 17 00:00:00 2001 From: John Benediktsson Date: Fri, 28 Nov 2014 08:11:21 -0800 Subject: [PATCH] json: cleanup and add more parameters for writing. --- basis/json/json.factor | 3 + basis/json/reader/reader.factor | 100 ++++++++++++---------- basis/json/writer/writer-docs.factor | 14 ++- basis/json/writer/writer-tests.factor | 47 +++++------ basis/json/writer/writer.factor | 117 ++++++++++++++++---------- 5 files changed, 163 insertions(+), 118 deletions(-) diff --git a/basis/json/json.factor b/basis/json/json.factor index ba1d8cf80e..b68290ba28 100644 --- a/basis/json/json.factor +++ b/basis/json/json.factor @@ -3,12 +3,15 @@ IN: json SINGLETON: json-null +ERROR: json-error ; + : if-json-null ( x if-null else -- ) [ dup json-null? ] [ [ drop ] prepose ] [ ] tri* if ; inline : when-json-null ( x if-null -- ) [ ] if-json-null ; inline + : unless-json-null ( x else -- ) [ ] swap if-json-null ; inline "json.reader" require diff --git a/basis/json/reader/reader.factor b/basis/json/reader/reader.factor index 84e1359a79..d6af2c49d2 100644 --- a/basis/json/reader/reader.factor +++ b/basis/json/reader/reader.factor @@ -1,23 +1,23 @@ ! Copyright (C) 2008 Peter Burns, 2009 Philipp Winkler ! See http://factorcode.org/license.txt for BSD license. -USING: accessors arrays assocs combinators fry hashtables io -io.streams.string json kernel kernel.private make math -math.parser namespaces prettyprint sequences sequences.private -strings vectors ; +USING: arrays assocs combinators fry hashtables io +io.streams.string json kernel kernel.private math math.parser +namespaces sbufs sequences sequences.private strings vectors ; IN: json.reader number ] dip ; -DEFER: j-string% +DEFER: (read-json-string) -: j-escape% ( stream -- ) - dup stream-read1 { +: (read-json-escape) ( stream accum -- accum ) + { sbuf } declare + over stream-read1 { { CHAR: " [ CHAR: " ] } { CHAR: \\ [ CHAR: \\ ] } { CHAR: / [ CHAR: / ] } @@ -26,17 +26,18 @@ DEFER: j-string% { CHAR: n [ CHAR: \n ] } { CHAR: r [ CHAR: \r ] } { CHAR: t [ CHAR: \t ] } - { CHAR: u [ 4 over stream-read hex> ] } + { CHAR: u [ 4 pick stream-read hex> ] } [ ] - } case [ , j-string% ] [ drop ] if* ; + } case [ suffix! (read-json-string) ] [ json-error ] if* ; -: j-string% ( stream -- ) - "\\\"" over stream-read-until [ % ] dip - CHAR: \" = [ drop ] [ j-escape% ] if ; +: (read-json-string) ( stream accum -- accum ) + { sbuf } declare + "\\\"" pick stream-read-until [ append! ] dip + CHAR: \" = [ nip ] [ (read-json-escape) ] if ; -: j-string ( stream -- str ) +: read-json-string ( stream -- str ) "\\\"" over stream-read-until CHAR: \" = - [ nip ] [ [ % j-escape% ] "" make ] if ; + [ nip ] [ >sbuf (read-json-escape) { sbuf } declare "" like ] if ; : second-last-unsafe ( seq -- second-last ) [ length 2 - ] [ nth-unsafe ] bi ; inline @@ -44,56 +45,65 @@ DEFER: j-string% : pop-unsafe ( seq -- elt ) [ length 1 - ] keep [ nth-unsafe ] [ shorten ] 2bi ; inline -ERROR: json-error ; - : check-length ( seq n -- seq ) - [ dup length ] [ >= ] bi* [ json-error ] unless + [ dup length ] [ >= ] bi* [ json-error ] unless ; inline + +: v-over-push ( accum -- accum ) + { vector } declare 2 check-length + dup [ pop-unsafe ] [ last-unsafe ] bi + { vector } declare push ; + +: v-pick-push ( accum -- accum ) + { vector } declare 3 check-length dup + [ pop-unsafe ] [ second-last-unsafe ] bi + { vector } declare push ; + +: v-pop ( accum -- vector ) + pop { vector } declare ; inline + +: v-close ( accum -- accum ) + { vector } declare + dup last V{ } = not [ v-over-push ] when { vector } declare ; inline -: v-over-push ( vec -- vec' ) - 2 check-length dup [ pop-unsafe ] [ last-unsafe ] bi - push ; +: json-open-array ( accum -- accum ) + { vector } declare V{ } clone suffix! ; -: v-pick-push ( vec -- vec' ) - 3 check-length dup [ pop-unsafe ] [ second-last-unsafe ] bi - push ; +: json-open-hash ( accum -- accum ) + { vector } declare V{ } clone suffix! V{ } clone suffix! ; -: (close) ( accum -- accum' ) - { vector } declare - dup last V{ } = not [ v-over-push ] when ; +: json-close-array ( accum -- accum ) + v-close dup v-pop { } like suffix! ; -: (close-array) ( accum -- accum' ) - { vector } declare - (close) dup pop >array suffix! ; +: json-close-hash ( accum -- accum ) + v-close dup dup [ v-pop ] bi@ swap H{ } zip-as suffix! ; -: (close-hash) ( accum -- accum' ) - { vector } declare - (close) dup dup [ pop ] bi@ 2dup min-length - [ [ set-at ] curry 2each ] keep suffix! ; +: json-expect ( token stream -- ) + [ dup length ] [ stream-read ] bi* = [ json-error ] unless ; inline : scan ( stream accum char -- stream accum ) ! 2dup 1string swap . . ! Great for debug... + { object vector object } declare { - { CHAR: \" [ over j-string suffix! ] } - { CHAR: [ [ V{ } clone suffix! ] } + { CHAR: \" [ over read-json-string suffix! ] } + { CHAR: [ [ json-open-array ] } { CHAR: , [ v-over-push ] } - { CHAR: ] [ (close-array) ] } - { CHAR: { [ 2 [ V{ } clone suffix! ] times ] } + { CHAR: ] [ json-close-array ] } + { CHAR: { [ json-open-hash ] } { CHAR: : [ v-pick-push ] } - { CHAR: } [ (close-hash) ] } + { CHAR: } [ json-close-hash ] } { CHAR: \s [ ] } { CHAR: \t [ ] } { CHAR: \r [ ] } { CHAR: \n [ ] } - { CHAR: t [ 3 pick stream-read drop t suffix! ] } - { CHAR: f [ 4 pick stream-read drop f suffix! ] } - { CHAR: n [ 3 pick stream-read drop json-null suffix! ] } - [ pick value [ suffix! ] dip [ scan ] when* ] + { CHAR: t [ "rue" pick json-expect t suffix! ] } + { CHAR: f [ "alse" pick json-expect f suffix! ] } + { CHAR: n [ "ull" pick json-expect json-null suffix! ] } + [ pick json-number [ suffix! ] dip [ scan ] when* ] } case ; : stream-json-read ( stream -- objects ) - V{ } clone over '[ _ stream-read1 dup ] - [ scan ] while drop nip ; + V{ } clone over '[ _ stream-read1 dup ] [ scan ] while drop nip ; PRIVATE> diff --git a/basis/json/writer/writer-docs.factor b/basis/json/writer/writer-docs.factor index 9588a20d1c..dab13883d8 100644 --- a/basis/json/writer/writer-docs.factor +++ b/basis/json/writer/writer-docs.factor @@ -10,9 +10,17 @@ HELP: >json HELP: json-print { $values { "obj" object } } -{ $description "Serializes the object into a JSON formatted string and outputs it to the standard output stream. - -By default, tuples and hashtables are serialized into Javascript-friendly JSON formatted output by converting keys containing dashes into underscores. This behaviour can be modified by setting the dynamic variable " { $strong "jsvar-encode?" } " to false." } +{ $description "Serializes the object into a JSON formatted string and outputs it to the standard output stream." +$nl +"Some options can control the formatting of the result:" +{ $table + { { $link json-allow-nans? } "Allow special floating-points: NaN, Infinity, -Infinity." } + { { $link json-friendly-keys? } "Convert - to _ in tuple slots and hashtable keys" } + { { $link json-coerce-keys? } "Coerce hashtable keys into strings" } + { { $link json-escape-slashes? } "Escape forward slashes inside strings" } + { { $link json-escape-unicode? } "Escape unicode values inside strings" } +} +} { $see-also >json } ; ARTICLE: "json.writer" "JSON writer" diff --git a/basis/json/writer/writer-tests.factor b/basis/json/writer/writer-tests.factor index 6a3c2a6dfe..de895d142b 100644 --- a/basis/json/writer/writer-tests.factor +++ b/basis/json/writer/writer-tests.factor @@ -9,31 +9,30 @@ IN: json.writer.tests { "-102" } [ -102 >json ] unit-test { "102.0" } [ 102.0 >json ] unit-test { "102.5" } [ 102.5 >json ] unit-test +{ "0.5" } [ 1/2 >json ] unit-test { "[1,\"two\",3.0]" } [ { 1 "two" 3.0 } >json ] unit-test -{ """{"US$":1.0,"EU\\u20ac\":1.5}""" } [ H{ { "US$" 1.0 } { "EU€" 1.5 } } >json ] unit-test +{ """{"US$":1.0,"EU€":1.5}""" } [ H{ { "US$" 1.0 } { "EU€" 1.5 } } >json ] unit-test -! Random symbols are written simply as strings -SYMBOL: testSymbol -{ """"testSymbol"""" } [ testSymbol >json ] unit-test +{ """">json"""" } [ \ >json >json ] unit-test [ { 0.5 } ] [ { 1/2 } >json json> ] unit-test -[ "{\"b-b\":\"asdf\"}" ] - [ f jsvar-encode? [ "asdf" "b-b" associate >json ] with-variable ] unit-test +TUPLE: person first-name age ; -[ "{\"b_b\":\"asdf\"}" ] - [ t jsvar-encode? [ "asdf" "b-b" associate >json ] with-variable ] unit-test +[ "{\"first-name\":\"David\",\"age\":32}" ] +[ + f json-friendly-keys? + [ "David" 32 person boa >json ] + with-variable +] unit-test -TUPLE: person name age a-a ; -[ "{\"name\":\"David-David\",\"age\":32,\"a_a\":{\"b_b\":\"asdf\"}}" ] - [ t jsvar-encode? - [ "David-David" 32 H{ { "b-b" "asdf" } } person boa >json ] - with-variable ] unit-test -[ "{\"name\":\"Alpha-Beta\",\"age\":32,\"a-a\":{\"b-b\":\"asdf\"}}" ] - [ f jsvar-encode? - [ "Alpha-Beta" 32 H{ { "b-b" "asdf" } } person boa >json ] - with-variable ] unit-test +[ "{\"first_name\":\"David\",\"age\":32}" ] +[ + t json-friendly-keys? + [ "David" 32 person boa >json ] + with-variable +] unit-test { """{"1":2,"3":4}""" } [ H{ { "1" 2 } { "3" 4 } } >json ] unit-test @@ -50,17 +49,17 @@ TUPLE: person name age a-a ; { """{"3.1":3}""" } [ H{ { 3.1 3 } } >json ] unit-test -{ """{"Infinity":1}""" } -[ H{ { 1/0. 1 } } >json ] unit-test - -{ """{"-Infinity":1}""" } -[ H{ { -1/0. 1 } } >json ] unit-test - { """{"null":1}""" } [ H{ { json-null 1 } } >json ] unit-test +{ """{"Infinity":1}""" } +[ t json-allow-nans? [ H{ { 1/0. 1 } } >json ] with-variable ] unit-test + +{ """{"-Infinity":1}""" } +[ t json-allow-nans? [ H{ { -1/0. 1 } } >json ] with-variable ] unit-test + { """{"NaN":1}""" } -[ H{ { NAN: 333 1 } } >json ] unit-test +[ t json-allow-nans? [ H{ { NAN: 333 1 } } >json ] with-variable ] unit-test { "\"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f\"" diff --git a/basis/json/writer/writer.factor b/basis/json/writer/writer.factor index 10ba9f3c65..0832af0d16 100644 --- a/basis/json/writer/writer.factor +++ b/basis/json/writer/writer.factor @@ -1,10 +1,25 @@ ! Copyright (C) 2006 Chris Double. ! See http://factorcode.org/license.txt for BSD license. USING: accessors ascii assocs combinators fry hashtables io -io.streams.string json kernel math math.parser mirrors +io.streams.string json kernel locals math math.parser mirrors namespaces sequences strings tr words ; IN: json.writer +SYMBOL: json-allow-nans? +f json-allow-nans? set-global + +SYMBOL: json-friendly-keys? +t json-friendly-keys? set-global + +SYMBOL: json-coerce-keys? +t json-coerce-keys? set-global + +SYMBOL: json-escape-slashes? +f json-escape-slashes? set-global + +SYMBOL: json-escape-unicode? +f json-escape-unicode? set-global + #! Writes the object out to a stream in JSON format GENERIC# stream-json-print 1 ( obj stream -- ) @@ -28,21 +43,32 @@ M: string stream-json-print CHAR: " over stream-write1 swap [ { { CHAR: " [ "\\\"" over stream-write ] } - { CHAR: \\ [ "\\\\" over stream-write ] } - { CHAR: / [ "\\/" over stream-write ] } + { CHAR: \\ [ "\\\\" over stream-write ] } + { CHAR: / [ + json-escape-slashes? get + [ "\\/" over stream-write ] + [ CHAR: / over stream-write1 ] if + ] } { CHAR: \b [ "\\b" over stream-write ] } { CHAR: \f [ "\\f" over stream-write ] } { CHAR: \n [ "\\n" over stream-write ] } { CHAR: \r [ "\\r" over stream-write ] } { CHAR: \s [ "\\s" over stream-write ] } { CHAR: \t [ "\\t" over stream-write ] } + { 0x2028 [ "\\u2028" over stream-write ] } + { 0x2029 [ "\\u2029" over stream-write ] } [ - dup printable? - [ over stream-write1 ] - [ + { + { [ dup printable? ] [ f ] } + { [ dup control? ] [ t ] } + [ json-escape-unicode? get ] + } cond [ + dup 0xffff > [ json-error ] when "\\u" pick stream-write >hex 4 CHAR: 0 pad-head over stream-write + ] [ + over stream-write1 ] if ] } case @@ -52,12 +78,16 @@ M: integer stream-json-print [ number>string ] [ stream-write ] bi* ; : float>json ( float -- string ) - { - { [ dup fp-nan? ] [ drop "NaN" ] } - { [ dup 1/0. = ] [ drop "Infinity" ] } - { [ dup -1/0. = ] [ drop "-Infinity" ] } - [ number>string ] - } cond ; + dup fp-special? [ + json-allow-nans? get [ json-error ] unless + { + { [ dup fp-nan? ] [ drop "NaN" ] } + { [ dup 1/0. = ] [ drop "Infinity" ] } + { [ dup -1/0. = ] [ drop "-Infinity" ] } + } cond + ] [ + number>string + ] if ; M: float stream-json-print [ float>json ] [ stream-write ] bi* ; @@ -66,44 +96,39 @@ M: real stream-json-print [ >float number>string ] [ stream-write ] bi* ; M: sequence stream-json-print - CHAR: [ over stream-write1 swap [ - over '[ CHAR: , _ stream-write1 ] - pick '[ _ stream-json-print ] interleave - ] unless-empty CHAR: ] swap stream-write1 ; - -SYMBOL: jsvar-encode? -t jsvar-encode? set-global -TR: jsvar-encode "-" "_" ; - -GENERIC: >js-key ( obj -- str ) -M: boolean >js-key "true" "false" ? ; -M: string >js-key jsvar-encode ; -M: number >js-key number>string ; -M: float >js-key float>json ; -M: json-null >js-key drop "null" ; + CHAR: [ over stream-write1 swap + over '[ CHAR: , _ stream-write1 ] + pick '[ _ stream-json-print ] interleave + CHAR: ] swap stream-write1 ; alist [ - jsvar-encode? get [ - over '[ CHAR: , _ stream-write1 ] - pick dup '[ - first2 - [ >js-key _ stream-json-print ] - [ _ CHAR: : over stream-write1 stream-json-print ] - bi* - ] interleave +TR: json-friendly "-" "_" ; + +GENERIC: json-key ( obj -- str ) +M: f json-key drop "false" ; +M: t json-key drop "true" ; +M: json-null json-key drop "null" ; +M: integer json-key number>string ; +M: float json-key float>json ; +M: real json-key >float number>string ; + +:: json-print-assoc ( obj stream -- ) + CHAR: { stream stream-write1 obj >alist + [ CHAR: , stream stream-write1 ] + json-friendly-keys? get + json-coerce-keys? get '[ + first2 [ + dup string? + [ _ [ json-friendly ] when ] + [ _ [ json-key ] when ] if + stream stream-json-print ] [ - over '[ CHAR: , _ stream-write1 ] - pick dup '[ - first2 - [ _ stream-json-print ] - [ _ CHAR: : over stream-write1 stream-json-print ] - bi* - ] interleave - ] if - ] unless-empty CHAR: } swap stream-write1 ; + CHAR: : stream stream-write1 + stream stream-json-print + ] bi* + ] interleave + CHAR: } stream stream-write1 ; PRIVATE>