factor/extra/http/client/client.factor

120 lines
3.1 KiB
Factor
Raw Normal View History

! Copyright (C) 2005, 2008 Slava Pestov.
2007-09-20 18:09:08 -04:00
! See http://factorcode.org/license.txt for BSD license.
USING: assocs http kernel math math.parser namespaces sequences
2008-02-09 22:34:42 -05:00
io io.sockets io.streams.string io.files io.timeouts strings
splitting calendar continuations accessors vectors math.order
2008-05-05 04:51:41 -04:00
io.encodings.8-bit io.encodings.binary io.streams.duplex
fry debugger inspector ;
2007-09-20 18:09:08 -04:00
IN: http.client
: max-redirects 10 ;
ERROR: too-many-redirects ;
M: too-many-redirects summary
drop
[ "Redirection limit of " % max-redirects # " exceeded" % ] "" make ;
2008-03-07 18:21:20 -05:00
DEFER: http-request
<PRIVATE
2008-02-25 15:53:18 -05:00
: parse-url ( url -- resource host port )
"http://" ?head [ "Only http:// supported" throw ] unless
"/" split1 [ "/" prepend ] [ "/" ] if*
2008-02-25 15:53:18 -05:00
swap parse-host ;
2007-09-20 18:09:08 -04:00
2008-02-25 15:53:18 -05:00
: store-path ( request path -- request )
"?" split1 >r >>path r> dup [ query>assoc ] when >>query ;
2007-09-20 18:09:08 -04:00
2008-05-05 18:31:46 -04:00
: request-with-url ( request url -- request )
parse-url >r >r store-path r> >>host r> >>port ;
2007-09-20 18:09:08 -04:00
SYMBOL: redirects
2008-04-23 02:42:30 -04:00
: absolute-url? ( url -- ? )
[ "http://" head? ] [ "https://" head? ] bi or ;
2008-05-05 18:31:46 -04:00
: do-redirect ( response data -- response data )
over code>> 300 399 between? [
drop
redirects inc
redirects get max-redirects < [
2008-05-05 18:31:46 -04:00
request get
swap "location" header dup absolute-url?
[ request-with-url ] [ store-path ] if
"GET" >>method http-request
2008-02-25 15:53:18 -05:00
] [
too-many-redirects
] if
2008-05-05 18:31:46 -04:00
] when ;
2007-09-20 18:09:08 -04:00
2008-02-25 15:53:18 -05:00
PRIVATE>
2007-09-20 18:09:08 -04:00
2008-05-05 18:31:46 -04:00
: read-chunks ( -- )
read-crlf ";" split1 drop hex> dup { f 0 } member?
[ drop ] [ read % read-crlf "" assert= read-chunks ] if ;
: read-response-body ( response -- response data )
dup "transfer-encoding" header "chunked" =
[ [ read-chunks ] "" make ] [ input-stream get contents ] if ;
: http-request ( request -- response data )
2008-03-07 18:21:20 -05:00
dup request [
2008-05-05 18:31:46 -04:00
dup request-addr latin1 [
1 minutes timeouts
write-request
2008-03-07 18:21:20 -05:00
read-response
2008-05-05 18:31:46 -04:00
read-response-body
] with-client
do-redirect
2008-03-07 18:21:20 -05:00
] with-variable ;
2008-03-07 18:21:20 -05:00
: <get-request> ( url -- request )
2008-05-05 18:31:46 -04:00
<request>
swap request-with-url
"GET" >>method ;
2008-05-05 18:31:46 -04:00
: http-get* ( url -- response data )
<get-request> http-request ;
2007-09-20 18:09:08 -04:00
: success? ( code -- ? ) 200 = ;
2007-09-20 18:09:08 -04:00
ERROR: download-failed response body ;
M: download-failed error.
"HTTP download failed:" print nl
[
response>>
write-response-code
write-response-message nl
drop
]
[ body>> write ] bi ;
: check-response ( response string -- string )
over code>> success? [ nip ] [ download-failed ] if ;
: http-get ( url -- string )
2008-05-05 18:31:46 -04:00
http-get* check-response ;
: download-name ( url -- name )
file-name "?" split1 drop "/" ?tail drop ;
: download-to ( url file -- )
2007-09-20 18:09:08 -04:00
#! Downloads the contents of a URL to a file.
2008-05-05 18:31:46 -04:00
>r http-get r> latin1 [ write ] with-file-writer ;
: download ( url -- )
dup download-name download-to ;
2007-09-20 18:09:08 -04:00
2008-03-07 18:21:20 -05:00
: <post-request> ( content-type content url -- request )
2008-02-29 01:57:38 -05:00
<request>
2008-05-05 18:31:46 -04:00
"POST" >>method
swap request-with-url
swap >>post-data
swap >>post-data-type ;
2008-02-25 15:53:18 -05:00
2008-05-05 18:31:46 -04:00
: http-post ( content-type content url -- response data )
<post-request> http-request ;