diff --git a/basis/cairo/gadgets/gadgets.factor b/basis/cairo/gadgets/gadgets.factor index 131f7425c9..87942b4c91 100644 --- a/basis/cairo/gadgets/gadgets.factor +++ b/basis/cairo/gadgets/gadgets.factor @@ -2,19 +2,26 @@ ! See http://factorcode.org/license.txt for BSD license. USING: sequences math kernel byte-arrays cairo.ffi cairo io.backend ui.gadgets accessors opengl.gl arrays fry -classes ui.render namespaces ; - +classes ui.render namespaces destructors libc ; IN: cairo.gadgets +stride ( width -- stride ) 4 * ; + +: image-dims ( gadget -- width height stride ) + dim>> first2 over width>stride ; inline +: image-buffer ( width height stride -- alien ) + * nip malloc ; inline +PRIVATE> GENERIC: render-cairo* ( gadget -- ) -: render-cairo ( gadget -- byte-array ) - dup dim>> first2 over width>stride - [ * nip dup CAIRO_FORMAT_ARGB32 ] - [ cairo_image_surface_create_for_data ] 3bi - rot '[ _ render-cairo* ] with-cairo-from-surface ; inline +: render-cairo ( gadget -- alien ) + [ + image-dims + [ image-buffer dup CAIRO_FORMAT_ARGB32 ] + [ cairo_image_surface_create_for_data ] 3bi + ] [ '[ _ render-cairo* ] with-cairo-from-surface ] bi ; TUPLE: cairo-gadget < gadget ; @@ -23,11 +30,13 @@ TUPLE: cairo-gadget < gadget ; swap >>dim ; M: cairo-gadget draw-gadget* - [ dim>> ] [ render-cairo ] bi - origin get first2 glRasterPos2i - 1.0 -1.0 glPixelZoom - [ first2 GL_BGRA GL_UNSIGNED_BYTE ] dip - glDrawPixels ; + [ + [ dim>> ] [ render-cairo &free ] bi + origin get first2 glRasterPos2i + 1.0 -1.0 glPixelZoom + [ first2 GL_BGRA GL_UNSIGNED_BYTE ] dip + glDrawPixels + ] with-destructors ; : copy-surface ( surface -- ) cr swap 0 0 cairo_set_source_surface diff --git a/basis/http/server/server.factor b/basis/http/server/server.factor index 8a5e695a70..c9ec2c7f3e 100755 --- a/basis/http/server/server.factor +++ b/basis/http/server/server.factor @@ -42,7 +42,7 @@ ERROR: no-boundary ; ";" split1 nip "=" split1 nip [ no-boundary ] unless* ; -: read-multipart-data ( request -- form-variables uploaded-files ) +: read-multipart-data ( request -- mime-parts ) [ "content-type" header ] [ "content-length" header string>number ] bi unlimit-input @@ -55,7 +55,7 @@ ERROR: no-boundary ; : parse-content ( request content-type -- post-data ) [ swap ] keep { - { "multipart/form-data" [ read-multipart-data assoc-union >>params ] } + { "multipart/form-data" [ read-multipart-data >>params ] } { "application/x-www-form-urlencoded" [ read-content query>assoc >>params ] } [ drop read-content >>data ] } case ; diff --git a/basis/io/directories/search/search-tests.factor b/basis/io/directories/search/search-tests.factor index 63c9483331..a8b8bf9215 100644 --- a/basis/io/directories/search/search-tests.factor +++ b/basis/io/directories/search/search-tests.factor @@ -4,8 +4,7 @@ IN: io.directories.search.tests [ t ] [ [ - 10 [ "io.paths.test" "gogogo" make-unique-file* ] replicate - current-directory get t [ ] find-all-files - ] with-unique-directory - [ natural-sort ] bi@ = + 10 [ "io.paths.test" "gogogo" make-unique-file ] replicate + current-temporary-directory get t [ ] find-all-files + ] with-unique-directory drop [ natural-sort ] bi@ = ] unit-test diff --git a/basis/io/files/links/unix/unix-tests.factor b/basis/io/files/links/unix/unix-tests.factor index b1d2c5b8fa..dd5eb5c8d9 100644 --- a/basis/io/files/links/unix/unix-tests.factor +++ b/basis/io/files/links/unix/unix-tests.factor @@ -9,24 +9,30 @@ IN: io.files.links.unix.tests [ t ] [ [ - 5 "lol" make-test-links - "lol1" follow-links - current-directory get "lol5" append-path = - ] with-unique-directory + current-temporary-directory get [ + 5 "lol" make-test-links + "lol1" follow-links + current-temporary-directory get "lol5" append-path = + ] with-directory + ] cleanup-unique-directory ] unit-test [ [ - 100 "laf" make-test-links "laf1" follow-links + current-temporary-directory get [ + 100 "laf" make-test-links "laf1" follow-links + ] with-directory ] with-unique-directory ] [ too-many-symlinks? ] must-fail-with [ t ] [ 110 symlink-depth [ [ - 100 "laf" make-test-links - "laf1" follow-links - current-directory get "laf100" append-path = - ] with-unique-directory + current-temporary-directory get [ + 100 "laf" make-test-links + "laf1" follow-links + current-temporary-directory get "laf100" append-path = + ] with-directory + ] cleanup-unique-directory ] with-variable ] unit-test diff --git a/basis/io/files/unique/unique-docs.factor b/basis/io/files/unique/unique-docs.factor index 08836cf497..b8a4431a73 100644 --- a/basis/io/files/unique/unique-docs.factor +++ b/basis/io/files/unique/unique-docs.factor @@ -1,8 +1,9 @@ USING: help.markup help.syntax io io.ports kernel math -io.pathnames io.directories math.parser io.files strings ; +io.pathnames io.directories math.parser io.files strings +quotations io.files.unique.private ; IN: io.files.unique -HELP: temporary-path +HELP: default-temporary-directory { $values { "path" "a pathname string" } } @@ -25,42 +26,66 @@ HELP: unique-retries HELP: make-unique-file ( prefix suffix -- path ) { $values { "prefix" "a string" } { "suffix" "a string" } { "path" "a pathname string" } } -{ $description "Creates a file that is guaranteed not to exist in a platform-specific temporary directory. The file name is composed of a prefix, a number of random digits and letters, and the suffix. Returns the full pathname." } +{ $description "Creates a file that is guaranteed not to exist in the directory stored in " { $link current-temporary-directory } ". The file name is composed of a prefix, a number of random digits and letters, and the suffix. Returns the full pathname." } { $errors "Throws an error if a new unique file cannot be created after a number of tries. The most likely error is incorrect directory permissions on the temporary directory." } ; -HELP: make-unique-file* -{ $values - { "prefix" string } { "suffix" string } - { "path" "a pathname string" } -} -{ $description "Creates a file that is guaranteed not to exist in the directory in the " { $link current-directory } " variable. The file name is composed of a prefix, a number of random digits and letters, and the suffix. Returns the full pathname." } ; +{ unique-file make-unique-file cleanup-unique-file } related-words -{ make-unique-file make-unique-file* with-unique-file } related-words - -HELP: with-unique-file ( prefix suffix quot: ( path -- ) -- ) +HELP: cleanup-unique-file ( prefix suffix quot: ( path -- ) -- ) { $values { "prefix" "a string" } { "suffix" "a string" } { "quot" "a quotation" } } { $description "Creates a file with " { $link make-unique-file } " and calls the quotation with the path name on the stack." } { $notes "The unique file will be deleted after calling this word." } ; -HELP: make-unique-directory ( -- path ) +HELP: unique-directory ( -- path ) { $values { "path" "a pathname string" } } -{ $description "Creates a directory that is guaranteed not to exist in a platform-specific temporary directory and returns the full pathname." } +{ $description "Creates a directory in the value in " { $link current-temporary-directory } " that is guaranteed not to exist in and returns the full pathname." } { $errors "Throws an error if the directory cannot be created after a number of tries. The most likely error is incorrect directory permissions on the temporary directory." } ; -HELP: with-unique-directory ( quot -- ) +HELP: cleanup-unique-directory ( quot -- ) { $values { "quot" "a quotation" } } -{ $description "Creates a directory with " { $link make-unique-directory } " and calls the quotation with the pathname on the stack using the " { $link with-directory } " combinator. The quotation can access the " { $link current-directory } " symbol for the name of the temporary directory." } -{ $notes "The directory will be deleted after calling this word, even if an error is thrown in the quotation." } ; +{ $description "Creates a directory with " { $link unique-directory } " and calls the quotation with the pathname on the stack using the " { $link with-temporary-directory } " combinator. The quotation can access the " { $link current-temporary-directory } " symbol for the name of the temporary directory. Subsequent unique files will be created in this unique directory until the combinator returns." } +{ $notes "The directory will be deleted after calling this word, even if an error is thrown in the quotation. This combinator is like " { $link with-unique-directory } " but does not delete the directory." } ; -ARTICLE: "io.files.unique" "Temporary files" -"The " { $vocab-link "io.files.unique" } " vocabulary implements cross-platform temporary file creation in a high-level and secure way." $nl -"Creating temporary files:" +HELP: with-unique-directory +{ $values + { "quot" quotation } + { "path" "a pathname string" } +} +{ $description "Creates a directory with " { $link unique-directory } " and calls the quotation with the pathname on the stack using the " { $link with-temporary-directory } " combinator. The quotation can access the " { $link current-temporary-directory } " symbol for the name of the temporary directory. Subsequent unique files will be created in this unique directory until the combinator returns." } ; + +HELP: current-temporary-directory +{ $values + { "value" "a path" } +} +{ $description "The temporary directory used for creating unique files and directories." } ; + +HELP: unique-file +{ $values + { "path" "a pathname string" } + { "path'" "a pathname string" } +} +{ $description "Creates a temporary file in the directory stored in " { $link current-temporary-directory } " and outputs the path name." } ; + +HELP: with-temporary-directory +{ $values + { "path" "a pathname string" } { "quot" quotation } +} +{ $description "Sets " { $link current-temporary-directory } " to " { $snippet "path" } " and calls the quotation, restoring the previous temporary path after execution completes." } ; + +ARTICLE: "io.files.unique" "Unique files" +"The " { $vocab-link "io.files.unique" } " vocabulary implements cross-platform unique file creation in temporary directories in a high-level and secure way." $nl +"Changing the temporary path:" +{ $subsection current-temporary-directory } +"Creating unique files:" +{ $subsection unique-file } +{ $subsection cleanup-unique-file } { $subsection make-unique-file } -{ $subsection make-unique-file* } -{ $subsection with-unique-file } -"Creating temporary directories:" -{ $subsection make-unique-directory } -{ $subsection with-unique-directory } ; +"Creating unique directories:" +{ $subsection unique-directory } +{ $subsection with-unique-directory } +{ $subsection cleanup-unique-directory } +"Default temporary directory:" +{ $subsection default-temporary-directory } ; ABOUT: "io.files.unique" diff --git a/basis/io/files/unique/unique-tests.factor b/basis/io/files/unique/unique-tests.factor index 8f2e32cea2..fd8cf2c69f 100644 --- a/basis/io/files/unique/unique-tests.factor +++ b/basis/io/files/unique/unique-tests.factor @@ -1,21 +1,41 @@ USING: io.encodings.ascii sequences strings io io.files accessors tools.test kernel io.files.unique namespaces continuations -io.files.info io.pathnames ; +io.files.info io.pathnames io.directories ; IN: io.files.unique.tests [ 123 ] [ "core" ".test" [ [ [ 123 CHAR: a ] dip ascii set-file-contents ] [ file-info size>> ] bi - ] with-unique-file + ] cleanup-unique-file ] unit-test [ t ] [ - [ current-directory get file-info directory? ] with-unique-directory + [ current-directory get file-info directory? ] cleanup-unique-directory ] unit-test [ t ] [ current-directory get - [ [ "FAILDOG" throw ] with-unique-directory ] [ drop ] recover + [ [ "FAILDOG" throw ] cleanup-unique-directory ] [ drop ] recover current-directory get = ] unit-test + +[ t ] [ + [ + "asdf" unique-file drop + "asdf2" unique-file drop + current-temporary-directory get directory-files length 2 = + ] cleanup-unique-directory +] unit-test + +[ t ] [ + [ ] with-unique-directory >boolean +] unit-test + +[ t ] [ + [ + "asdf" unique-file drop + "asdf" unique-file drop + current-temporary-directory get directory-files length 2 = + ] with-unique-directory drop +] unit-test diff --git a/basis/io/files/unique/unique.factor b/basis/io/files/unique/unique.factor index 02f4d6080c..7bd96aa63b 100644 --- a/basis/io/files/unique/unique.factor +++ b/basis/io/files/unique/unique.factor @@ -6,8 +6,13 @@ kernel math math.bitwise math.parser namespaces random sequences system vocabs.loader ; IN: io.files.unique -HOOK: touch-unique-file io-backend ( path -- ) -HOOK: temporary-path io-backend ( -- path ) +HOOK: (touch-unique-file) io-backend ( path -- ) +: touch-unique-file ( path -- ) + normalize-path (touch-unique-file) ; + +HOOK: default-temporary-directory io-backend ( -- path ) + +SYMBOL: current-temporary-directory SYMBOL: unique-length SYMBOL: unique-retries @@ -15,6 +20,9 @@ SYMBOL: unique-retries 10 unique-length set-global 10 unique-retries set-global +: with-temporary-directory ( path quot -- ) + [ current-temporary-directory ] dip with-variable ; inline + +: random-name ( -- string ) + unique-length get [ random-ch ] "" replicate-as ; : (make-unique-file) ( path prefix suffix -- path ) '[ - _ _ _ unique-length get random-name glue append-path + _ _ _ random-name glue append-path dup touch-unique-file ] unique-retries get retry ; +PRIVATE> + : make-unique-file ( prefix suffix -- path ) - [ temporary-path ] 2dip (make-unique-file) ; + [ current-temporary-directory get ] 2dip (make-unique-file) ; -: make-unique-file* ( prefix suffix -- path ) - [ current-directory get ] 2dip (make-unique-file) ; - -: with-unique-file ( prefix suffix quot: ( path -- ) -- ) +: cleanup-unique-file ( prefix suffix quot: ( path -- ) -- ) [ make-unique-file ] dip [ delete-file ] bi ; inline -: make-unique-directory ( -- path ) +: unique-directory ( -- path ) [ - temporary-path unique-length get random-name append-path + current-temporary-directory get + random-name append-path dup make-directory ] unique-retries get retry ; -: with-unique-directory ( quot: ( -- ) -- ) - [ make-unique-directory ] dip - '[ _ with-directory ] [ delete-tree ] bi ; inline +: with-unique-directory ( quot -- path ) + [ unique-directory ] dip + [ with-temporary-directory ] [ drop ] 2bi ; inline + +: cleanup-unique-directory ( quot: ( -- ) -- ) + [ unique-directory ] dip + '[ _ with-temporary-directory ] [ delete-tree ] bi ; inline + +: unique-file ( path -- path' ) + "" make-unique-file ; { { [ os unix? ] [ "io.files.unique.unix" ] } { [ os windows? ] [ "io.files.unique.windows" ] } } cond require + +default-temporary-directory current-temporary-directory set-global diff --git a/basis/io/files/unique/unix/unix.factor b/basis/io/files/unique/unix/unix.factor index ed4e120b79..9f35f440c7 100644 --- a/basis/io/files/unique/unix/unix.factor +++ b/basis/io/files/unique/unix/unix.factor @@ -7,7 +7,7 @@ IN: io.files.unique.unix : open-unique-flags ( -- flags ) { O_RDWR O_CREAT O_EXCL } flags ; -M: unix touch-unique-file ( path -- ) +M: unix (touch-unique-file) ( path -- ) open-unique-flags file-mode open-file close-file ; -M: unix temporary-path ( -- path ) "/tmp" ; +M: unix default-temporary-directory ( -- path ) "/tmp" ; diff --git a/basis/io/files/unique/windows/windows.factor b/basis/io/files/unique/windows/windows.factor index 47f30999c3..2c722426dc 100644 --- a/basis/io/files/unique/windows/windows.factor +++ b/basis/io/files/unique/windows/windows.factor @@ -3,8 +3,8 @@ io.files.windows io.ports windows destructors environment io.files.unique ; IN: io.files.unique.windows -M: windows touch-unique-file ( path -- ) +M: windows (touch-unique-file) ( path -- ) GENERIC_WRITE CREATE_NEW 0 open-file dispose ; -M: windows temporary-path ( -- path ) +M: windows default-temporary-directory ( -- path ) "TEMP" os-env ; diff --git a/basis/io/streams/limited/limited-docs.factor b/basis/io/streams/limited/limited-docs.factor index 90f7860672..fac1232cc0 100755 --- a/basis/io/streams/limited/limited-docs.factor +++ b/basis/io/streams/limited/limited-docs.factor @@ -81,7 +81,7 @@ ARTICLE: "io.streams.limited" "Limited input streams" "Unlimits a limited stream:" { $subsection unlimit } "Unlimits the current " { $link input-stream } ":" -{ $subsection limit-input } +{ $subsection unlimit-input } "Make a limited stream throw an exception on exhaustion:" { $subsection stream-throws } "Make a limited stream return " { $link f } " on exhaustion:" diff --git a/basis/io/streams/limited/limited-tests.factor b/basis/io/streams/limited/limited-tests.factor index c88d52be81..feddc130e9 100644 --- a/basis/io/streams/limited/limited-tests.factor +++ b/basis/io/streams/limited/limited-tests.factor @@ -1,6 +1,7 @@ USING: io io.streams.limited io.encodings io.encodings.string io.encodings.ascii io.encodings.binary io.streams.byte-array -namespaces tools.test strings kernel io.streams.string accessors ; +namespaces tools.test strings kernel io.streams.string accessors +io.encodings.utf8 io.files destructors ; IN: io.streams.limited.tests [ ] [ @@ -59,3 +60,19 @@ IN: io.streams.limited.tests "abc" 3 stream-eofs limit unlimit "abc" = ] unit-test + +[ t ] +[ + "abc" 3 stream-eofs limit unlimit + "abc" = +] unit-test + +[ t ] +[ + [ + "resource:license.txt" utf8 &dispose + 3 stream-eofs limit unlimit + "resource:license.txt" utf8 &dispose + [ decoder? ] both? + ] with-destructors +] unit-test diff --git a/basis/io/streams/limited/limited.factor b/basis/io/streams/limited/limited.factor index 71c6eb67d4..1237b3aba2 100755 --- a/basis/io/streams/limited/limited.factor +++ b/basis/io/streams/limited/limited.factor @@ -5,7 +5,7 @@ USING: kernel math io io.encodings destructors accessors sequences namespaces byte-vectors fry combinators ; IN: io.streams.limited -TUPLE: limited-stream stream count limit mode ; +TUPLE: limited-stream stream count limit mode stack ; SINGLETONS: stream-throws stream-eofs ; @@ -24,13 +24,24 @@ M: decoder limit ( stream limit mode -- stream' ) M: object limit ( stream limit mode -- stream' ) ; -: unlimit ( stream -- stream' ) +GENERIC: unlimit ( stream -- stream' ) + +M: decoder unlimit ( stream -- stream' ) [ stream>> ] change-stream ; +M: object unlimit ( stream -- stream' ) + stream>> stream>> ; + : limit-input ( limit mode -- ) input-stream [ -rot limit ] change ; : unlimit-input ( -- ) input-stream [ unlimit ] change ; +: with-unlimited-stream ( stream quot -- ) + [ clone unlimit ] dip call ; inline + +: with-limited-stream ( stream limit mode quot -- ) + [ limit ] dip call ; inline + ERROR: limit-exceeded ; ERROR: bad-stream-mode mode ; diff --git a/basis/mime/multipart/multipart-tests.factor b/basis/mime/multipart/multipart-tests.factor index e1bf0483bc..d91e31cca2 100644 --- a/basis/mime/multipart/multipart-tests.factor +++ b/basis/mime/multipart/multipart-tests.factor @@ -2,7 +2,7 @@ ! See http://factorcode.org/license.txt for BSD license. USING: io.encodings.ascii io.files io.files.unique kernel mime.multipart tools.test io.streams.duplex io multiline -assocs ; +assocs accessors ; IN: mime.multipart.tests : upload-separator ( -- seq ) @@ -20,11 +20,16 @@ IN: mime.multipart.tests [ t ] [ mime-test-stream [ upload-separator parse-multipart ] with-input-stream - nip "\"up.txt\"" swap key? + "file1" swap key? ] unit-test [ t ] [ mime-test-stream [ upload-separator parse-multipart ] with-input-stream - drop "\"text1\"" swap key? + "file1" swap key? +] unit-test + +[ t ] [ + mime-test-stream [ upload-separator parse-multipart ] with-input-stream + "file1" swap at filename>> "up.txt" = ] unit-test diff --git a/basis/mime/multipart/multipart.factor b/basis/mime/multipart/multipart.factor index 1cea707862..fc3024bd01 100755 --- a/basis/mime/multipart/multipart.factor +++ b/basis/mime/multipart/multipart.factor @@ -3,7 +3,7 @@ USING: multiline kernel sequences io splitting fry namespaces http.parsers hashtables assocs combinators ascii io.files.unique accessors io.encodings.binary io.files byte-arrays math -io.streams.string combinators.short-circuit strings ; +io.streams.string combinators.short-circuit strings math.order ; IN: mime.multipart CONSTANT: buffer-size 65536 @@ -16,8 +16,7 @@ header content-disposition bytes filename temp-file name name-content -uploaded-files -form-variables ; +mime-parts ; TUPLE: mime-file headers filename temporary-path ; TUPLE: mime-variable headers key value ; @@ -25,8 +24,7 @@ TUPLE: mime-variable headers key value ; : ( mime-separator -- multipart ) multipart new swap >>mime-separator - H{ } clone >>uploaded-files - H{ } clone >>form-variables ; + H{ } clone >>mime-parts ; ERROR: bad-header bytes ; @@ -47,11 +45,7 @@ ERROR: end-of-stream multipart ; dup bytes>> [ fill-bytes ] unless ; : split-bytes ( bytes separator -- leftover-bytes safe-to-dump ) - 2dup [ length ] [ length 1- ] bi* < [ - drop f - ] [ - length 1- cut-slice swap - ] if ; + dupd [ length ] bi@ 1- - short cut-slice swap ; : dump-until-separator ( multipart -- multipart ) dup @@ -59,11 +53,10 @@ ERROR: end-of-stream multipart ; [ nip ] [ start ] 2bi [ cut-slice [ mime-write ] - [ over current-separator>> length tail-slice >>bytes ] bi* + [ over current-separator>> length short tail-slice >>bytes ] bi* ] [ drop - dup [ bytes>> ] [ current-separator>> ] bi split-bytes - [ mime-write ] when* + dup [ bytes>> ] [ current-separator>> ] bi split-bytes mime-write >>bytes fill-bytes dup end-of-stream?>> [ dump-until-separator ] unless ] if* ; @@ -72,31 +65,43 @@ ERROR: end-of-stream multipart ; [ dump-until-separator ] with-string-writer ; : read-header ( multipart -- multipart ) - "\r\n\r\n" dump-string dup "--\r" = [ - drop + dup bytes>> "--\r\n" sequence= [ + t >>end-of-stream? ] [ - parse-headers >>header + "\r\n\r\n" dump-string parse-headers >>header ] if ; : empty-name? ( string -- ? ) { "''" "\"\"" "" f } member? ; +: quote? ( ch -- ? ) "'\"" member? ; + +: quoted? ( str -- ? ) + { + [ length 1 > ] + [ first quote? ] + [ [ first ] [ peek ] bi = ] + } 1&& ; + +: unquote ( str -- newstr ) + dup quoted? [ but-last-slice rest-slice >string ] when ; + : save-uploaded-file ( multipart -- ) dup filename>> empty-name? [ drop ] [ [ [ header>> ] [ filename>> ] [ temp-file>> ] tri mime-file boa ] - [ filename>> ] - [ uploaded-files>> set-at ] tri + [ content-disposition>> "name" swap at unquote ] + [ mime-parts>> set-at ] tri ] if ; -: save-form-variable ( multipart -- ) +: save-mime-part ( multipart -- ) dup name>> empty-name? [ drop ] [ - [ [ header>> ] [ name>> ] [ name-content>> ] tri mime-variable boa ] - [ name>> ] - [ form-variables>> set-at ] tri + [ [ header>> ] [ name>> unquote ] [ name-content>> ] tri mime-variable boa ] + [ name>> unquote ] + [ mime-parts>> set-at ] tri ] if ; : dump-mime-file ( multipart filename -- multipart ) @@ -119,12 +124,13 @@ ERROR: unknown-content-disposition multipart ; : parse-form-data ( multipart -- multipart ) "filename" lookup-disposition [ + unquote >>filename [ dump-file ] [ save-uploaded-file ] bi ] [ "name" lookup-disposition [ [ dup mime-separator>> dump-string >>name-content ] dip - >>name dup save-form-variable + >>name dup save-mime-part ] [ unknown-content-disposition ] if* @@ -157,6 +163,6 @@ ERROR: no-content-disposition multipart ; read-header dup end-of-stream?>> [ process-header parse-multipart-loop ] unless ; -: parse-multipart ( separator -- form-variables uploaded-files ) - parse-beginning parse-multipart-loop - [ form-variables>> ] [ uploaded-files>> ] bi ; +: parse-multipart ( separator -- mime-parts ) + parse-beginning fill-bytes parse-multipart-loop + mime-parts>> ; diff --git a/basis/sorting/human/human-docs.factor b/basis/sorting/human/human-docs.factor index 5342b28317..5cd04c8090 100644 --- a/basis/sorting/human/human-docs.factor +++ b/basis/sorting/human/human-docs.factor @@ -11,19 +11,19 @@ HELP: find-numbers } { $description "Splits a string on numbers and returns a sequence of sequences and integers." } ; -HELP: human-<=> +HELP: human<=> { $values { "obj1" object } { "obj2" object } { "<=>" "an ordering specifier" } } { $description "Compares two objects after converting numbers in the string into integers." } ; -HELP: human->=< +HELP: human>=< { $values { "obj1" object } { "obj2" object } { ">=<" "an ordering specifier" } } -{ $description "Compares two objects using the " { $link human-<=> } " word and inverts the result." } ; +{ $description "Compares two objects using the " { $link human<=> } " word and inverts the result." } ; HELP: human-compare { $values @@ -44,22 +44,22 @@ HELP: human-sort-keys { "seq" "an alist" } { "sortedseq" "a new sorted sequence" } } -{ $description "Sorts the elements comparing first elements of pairs using the " { $link human-<=> } " word." } ; +{ $description "Sorts the elements comparing first elements of pairs using the " { $link human<=> } " word." } ; HELP: human-sort-values { $values { "seq" "an alist" } { "sortedseq" "a new sorted sequence" } } -{ $description "Sorts the elements comparing second elements of pairs using the " { $link human-<=> } " word." } ; +{ $description "Sorts the elements comparing second elements of pairs using the " { $link human<=> } " word." } ; { <=> >=< human-compare human-sort human-sort-keys human-sort-values } related-words ARTICLE: "sorting.human" "sorting.human" "The " { $vocab-link "sorting.human" } " vocabulary sorts by numbers as a human would -- by comparing their magnitudes -- rather than in a lexicographic way. For example, sorting a1, a10, a03, a2 with human sort returns a1, a2, a03, a10, while sorting with natural sort returns a03, a1, a10, a2." $nl "Comparing two objects:" -{ $subsection human-<=> } -{ $subsection human->=< } +{ $subsection human<=> } +{ $subsection human>=< } { $subsection human-compare } "Sort a sequence:" { $subsection human-sort } diff --git a/basis/sorting/human/human.factor b/basis/sorting/human/human.factor index 2c4d391a60..1c7392901b 100644 --- a/basis/sorting/human/human.factor +++ b/basis/sorting/human/human.factor @@ -7,13 +7,13 @@ IN: sorting.human : find-numbers ( string -- seq ) [EBNF Result = ([0-9]+ => [[ string>number ]] | (!([0-9]) .)+)* EBNF] ; -: human-<=> ( obj1 obj2 -- <=> ) [ find-numbers ] bi@ <=> ; +: human<=> ( obj1 obj2 -- <=> ) [ find-numbers ] bi@ <=> ; -: human->=< ( obj1 obj2 -- >=< ) human-<=> invert-comparison ; inline +: human>=< ( obj1 obj2 -- >=< ) human<=> invert-comparison ; inline -: human-compare ( obj1 obj2 quot -- <=> ) bi@ human-<=> ; +: human-compare ( obj1 obj2 quot -- <=> ) bi@ human<=> ; -: human-sort ( seq -- seq' ) [ human-<=> ] sort ; +: human-sort ( seq -- seq' ) [ human<=> ] sort ; : human-sort-keys ( seq -- sortedseq ) [ [ first ] human-compare ] sort ; diff --git a/basis/sorting/slots/slots-tests.factor b/basis/sorting/slots/slots-tests.factor index 7a4eeb8e75..46824c6fdb 100644 --- a/basis/sorting/slots/slots-tests.factor +++ b/basis/sorting/slots/slots-tests.factor @@ -41,7 +41,7 @@ TUPLE: tuple2 d ; T{ sort-test f 1 1 11 } T{ sort-test f 2 5 3 } T{ sort-test f 2 5 2 } - } { { a>> human-<=> } { b>> human->=< } { c>> <=> } } sort-by-slots + } { { a>> human<=> } { b>> human>=< } { c>> <=> } } sort-by-slots ] unit-test [ @@ -64,7 +64,7 @@ TUPLE: tuple2 d ; T{ sort-test f 2 5 3 } T{ sort-test f 2 5 2 } } - { { a>> human-<=> } { b>> <=> } } [ sort-by-slots ] keep + { { a>> human<=> } { b>> <=> } } [ sort-by-slots ] keep [ but-last-slice ] map split-by-slots [ >array ] map ] unit-test diff --git a/core/continuations/continuations-docs.factor b/core/continuations/continuations-docs.factor index ea3470feb3..3364f44657 100644 --- a/core/continuations/continuations-docs.factor +++ b/core/continuations/continuations-docs.factor @@ -245,8 +245,9 @@ HELP: retry } { $description "Tries the quotation up to " { $snippet "n" } " times until it returns true. Retries the quotation if an exception is thrown or if the quotation returns " { $link f } ". The quotation is expected to have side effects that may fail, such as generating a random name for a new file until successful." } { $examples + "Try to get a 0 as a random number:" { $unchecked-example "USING: continuations math prettyprint ;" - "[ 5 random 0 = ] retry t" + "[ 5 random 0 = ] 5 retry t" "t" } } ; diff --git a/core/syntax/syntax-docs.factor b/core/syntax/syntax-docs.factor index 6697b9ec5a..9962bcdb87 100644 --- a/core/syntax/syntax-docs.factor +++ b/core/syntax/syntax-docs.factor @@ -604,7 +604,7 @@ HELP: MIXIN: HELP: INSTANCE: { $syntax "INSTANCE: instance mixin" } -{ $values { "instance" "a class word" } { "instance" "a class word" } } +{ $values { "instance" "a class word" } { "mixin" "a mixin class word" } } { $description "Makes " { $snippet "instance" } " an instance of " { $snippet "mixin" } "." } ; HELP: PREDICATE: diff --git a/extra/literals/authors.txt b/extra/literals/authors.txt new file mode 100644 index 0000000000..f13c9c1e77 --- /dev/null +++ b/extra/literals/authors.txt @@ -0,0 +1 @@ +Joe Groff diff --git a/extra/literals/literals-docs.factor b/extra/literals/literals-docs.factor new file mode 100644 index 0000000000..ae25c75495 --- /dev/null +++ b/extra/literals/literals-docs.factor @@ -0,0 +1,61 @@ +! Copyright (C) 2008 Joe Groff. +! See http://factorcode.org/license.txt for BSD license. +USING: help.markup help.syntax multiline ; +IN: literals + +HELP: $ +{ $syntax "$ word" } +{ $description "Executes " { $snippet "word" } " at parse time and adds the result(s) to the parser accumulator." } +{ $notes "Since " { $snippet "word" } " is executed at parse time, " { $snippet "$" } " cannot be used with words defined in the same compilation unit." } +{ $examples + + { $example <" +USING: kernel literals prettyprint ; +IN: scratchpad + +<< : five 5 ; >> +{ $ five } . + "> "{ 5 }" } + + { $example <" +USING: kernel literals prettyprint ; +IN: scratchpad + +<< : seven-eleven 7 11 ; >> +{ $ seven-eleven } . + "> "{ 7 11 }" } + +} ; + +HELP: $[ +{ $syntax "$[ code ]" } +{ $description "Calls " { $snippet "code" } " at parse time and adds the result(s) to the parser accumulator." } +{ $notes "Since " { $snippet "code" } " is executed at parse time, it cannot reference any words defined in the same compilation unit." } +{ $examples + + { $example <" +USING: kernel literals math prettyprint ; +IN: scratchpad + +<< : five 5 ; >> +{ $[ five dup 1+ dup 2 + ] } . + "> "{ 5 6 8 }" } + +} ; + +{ POSTPONE: $ POSTPONE: $[ } related-words + +ARTICLE: "literals" "Interpolating code results into literal values" +"The " { $vocab-link "literals" } " vocabulary contains words to run code at parse time and insert the results into more complex literal values." +{ $example <" +USING: kernel literals math prettyprint ; +IN: scratchpad + +<< : five 5 ; >> +{ $ five $[ five dup 1+ dup 2 + ] } . + "> "{ 5 5 6 8 }" } +{ $subsection POSTPONE: $ } +{ $subsection POSTPONE: $[ } +; + +ABOUT: "literals" diff --git a/extra/literals/literals-tests.factor b/extra/literals/literals-tests.factor index b88a286a59..185d672dd3 100644 --- a/extra/literals/literals-tests.factor +++ b/extra/literals/literals-tests.factor @@ -1,4 +1,4 @@ -USING: kernel literals tools.test ; +USING: kernel literals math tools.test ; IN: literals.tests << @@ -10,3 +10,5 @@ IN: literals.tests [ { 5 } ] [ { $ five } ] unit-test [ { 7 11 } ] [ { $ seven-eleven } ] unit-test [ { 6 6 6 } ] [ { $ six-six-six } ] unit-test + +[ { 8 8 8 } ] [ { $[ six-six-six [ 2 + ] tri@ ] } ] unit-test diff --git a/extra/literals/literals.factor b/extra/literals/literals.factor index d46f492cd4..a450c2118e 100644 --- a/extra/literals/literals.factor +++ b/extra/literals/literals.factor @@ -1,4 +1,6 @@ -USING: continuations kernel parser words ; +! (c) Joe Groff, see license for details +USING: continuations kernel parser words quotations ; IN: literals : $ scan-word [ execute ] curry with-datastack ; parsing +: $[ \ ] parse-until >quotation with-datastack ; parsing diff --git a/extra/literals/summary.txt b/extra/literals/summary.txt new file mode 100644 index 0000000000..dfeb9fe797 --- /dev/null +++ b/extra/literals/summary.txt @@ -0,0 +1 @@ +Expression interpolation into sequence literals diff --git a/extra/literals/tags.txt b/extra/literals/tags.txt new file mode 100644 index 0000000000..71c0ff7282 --- /dev/null +++ b/extra/literals/tags.txt @@ -0,0 +1 @@ +syntax diff --git a/extra/sequences/n-based/authors.txt b/extra/sequences/n-based/authors.txt new file mode 100644 index 0000000000..f13c9c1e77 --- /dev/null +++ b/extra/sequences/n-based/authors.txt @@ -0,0 +1 @@ +Joe Groff diff --git a/extra/sequences/n-based/n-based-docs.factor b/extra/sequences/n-based/n-based-docs.factor new file mode 100644 index 0000000000..ca5ac57cec --- /dev/null +++ b/extra/sequences/n-based/n-based-docs.factor @@ -0,0 +1,66 @@ +! (c)2008 Joe Groff, see BSD license etc. +USING: help.markup help.syntax kernel math multiline sequences ; +IN: sequences.n-based + +HELP: +{ $values { "seq" sequence } { "base" integer } { "n-based-assoc" n-based-assoc } } +{ $description "Wraps " { $snippet "seq" } " in an " { $link n-based-assoc } " wrapper." } +{ $examples +{ $example <" +USING: assocs prettyprint kernel sequences.n-based ; +IN: scratchpad + +: months + { + "January" + "February" + "March" + "April" + "May" + "June" + "July" + "August" + "September" + "October" + "November" + "December" + } 1 ; + +10 months at . +"> "\"October\"" } } ; + +HELP: n-based-assoc +{ $class-description "An adaptor class that allows a sequence to be treated as an assoc with non-zero-based keys." } +{ $examples +{ $example <" +USING: assocs prettyprint kernel sequences.n-based ; +IN: scratchpad + +: months + { + "January" + "February" + "March" + "April" + "May" + "June" + "July" + "August" + "September" + "October" + "November" + "December" + } 1 ; + +10 months at . +"> "\"October\"" } } ; + +{ n-based-assoc } related-words + +ARTICLE: "sequences.n-based" "sequences.n-based" +"The " { $vocab-link "sequences.n-based" } " vocabulary provides a sequence adaptor that allows a sequence to be treated as an assoc with non-zero-based keys." +{ $subsection n-based-assoc } +{ $subsection } +; + +ABOUT: "sequences.n-based" diff --git a/extra/sequences/n-based/n-based-tests.factor b/extra/sequences/n-based/n-based-tests.factor new file mode 100644 index 0000000000..7ee5bd649f --- /dev/null +++ b/extra/sequences/n-based/n-based-tests.factor @@ -0,0 +1,64 @@ +! (c)2008 Joe Groff, see BSD license etc. +USING: kernel accessors assocs +sequences sequences.n-based tools.test ; +IN: sequences.n-based.tests + +: months + V{ + "January" + "February" + "March" + "April" + "May" + "June" + "July" + "August" + "September" + "October" + "November" + "December" + } clone 1 ; inline + +[ "December" t ] +[ 12 months at* ] unit-test +[ f f ] +[ 13 months at* ] unit-test +[ f f ] +[ 0 months at* ] unit-test + +[ 12 ] [ months assoc-size ] unit-test + +[ { + { 1 "January" } + { 2 "February" } + { 3 "March" } + { 4 "April" } + { 5 "May" } + { 6 "June" } + { 7 "July" } + { 8 "August" } + { 9 "September" } + { 10 "October" } + { 11 "November" } + { 12 "December" } +} ] [ months >alist ] unit-test + +[ V{ + "January" + "February" + "March" + "April" + "May" + "June" + "July" + "August" + "September" + "October" + "November" + "December" + "Smarch" +} ] [ "Smarch" 13 months [ set-at ] keep seq>> ] unit-test + +[ V{ } ] [ months [ clear-assoc ] keep seq>> ] unit-test + + diff --git a/extra/sequences/n-based/n-based.factor b/extra/sequences/n-based/n-based.factor new file mode 100644 index 0000000000..78fe851389 --- /dev/null +++ b/extra/sequences/n-based/n-based.factor @@ -0,0 +1,31 @@ +! (c)2008 Joe Groff, see BSD license etc. +USING: accessors assocs kernel math math.ranges sequences +sequences.private ; +IN: sequences.n-based + +TUPLE: n-based-assoc seq base ; +C: n-based-assoc + +> - ] [ nip seq>> ] 2bi ; +: n-based-keys ( assoc -- range ) + [ base>> ] [ assoc-size ] bi 1 ; + +PRIVATE> + +INSTANCE: n-based-assoc assoc +M: n-based-assoc at* ( key assoc -- value ? ) + n-based@ 2dup bounds-check? + [ nth-unsafe t ] [ 2drop f f ] if ; +M: n-based-assoc assoc-size ( assoc -- size ) + seq>> length ; +M: n-based-assoc >alist ( assoc -- alist ) + [ n-based-keys ] [ seq>> ] bi zip ; +M: n-based-assoc set-at ( value key assoc -- ) + n-based@ set-nth ; +M: n-based-assoc delete-at ( key assoc -- ) + [ f ] 2dip n-based@ set-nth ; +M: n-based-assoc clear-assoc ( assoc -- ) + seq>> delete-all ; diff --git a/extra/sequences/n-based/summary.txt b/extra/sequences/n-based/summary.txt new file mode 100644 index 0000000000..a8097a3131 --- /dev/null +++ b/extra/sequences/n-based/summary.txt @@ -0,0 +1 @@ +Sequence adaptor to treat a sequence as an n-based assoc diff --git a/extra/sequences/n-based/tags.txt b/extra/sequences/n-based/tags.txt new file mode 100644 index 0000000000..1ee19c1323 --- /dev/null +++ b/extra/sequences/n-based/tags.txt @@ -0,0 +1,2 @@ +sequences +assocs