http.client: remove http-get* and its friends, change http-request* and with-http-request* to not check response codes.

db4
John Benediktsson 2014-03-12 19:29:35 -07:00
parent 7c06afa219
commit 712be93989
22 changed files with 65 additions and 128 deletions

View File

@ -7,7 +7,7 @@ IN: bootstrap.image.download
CONSTANT: url URL" http://downloads.factorcode.org/images/latest/"
: download-checksums ( -- alist )
url "checksums.txt" >url derive-url http-get*
url "checksums.txt" >url derive-url http-get nip
string-lines [ " " split1 ] { } map>assoc ;
: file-checksum ( image -- checksum )

View File

@ -53,7 +53,7 @@ fry http.client kernel urls ;
URL" http://www.oracle.com"
}
2 <semaphore> '[
_ [ http-get* ] with-semaphore
_ [ http-get nip ] with-semaphore
] parallel-map"""
} ;

View File

@ -59,7 +59,7 @@ M: recaptcha call-responder*
{ "privatekey" private-key }
{ "remoteip" remote-ip }
} URL" http://api-verify.recaptcha.net/verify"
http-post* parse-recaptcha-response ;
http-post nip parse-recaptcha-response ;
: validate-recaptcha-params ( -- )
{

View File

@ -55,98 +55,60 @@ HELP: http-get
{ $description "Downloads the contents of a URL." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-get*
{ $values { "url" "a " { $link url } " or " { $link string } } { "data" sequence } }
{ $description "A variant of " { $link http-get } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: http-post
{ $values { "post-data" object } { "url" "a " { $link url } " or " { $link string } } { "response" response } { "data" sequence } }
{ $description "Submits an HTTP POST request." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-post*
{ $values { "post-data" object } { "url" "a " { $link url } " or " { $link string } } { "data" sequence } }
{ $description "A variant of " { $link http-post } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: http-put
{ $values { "post-data" object } { "url" "a " { $link url } " or " { $link string } } { "response" response } { "data" sequence } }
{ $description "Submits an HTTP PUT request." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-put*
{ $values { "post-data" object } { "url" "a " { $link url } " or " { $link string } } { "data" sequence } }
{ $description "A variant of " { $link http-put } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: http-head
{ $values { "url" "a " { $link url } " or " { $link string } } { "response" response } { "data" sequence } }
{ $description "Same as " { $link http-get } " except that the server is not supposed to return a message-body in the response, as per RFC2616. However in practise, most web servers respond to GET and HEAD method calls with identical responses." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-head*
{ $values { "url" "a " { $link url } " or " { $link string } } { "data" sequence } }
{ $description "A variant of " { $link http-head } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: http-delete
{ $values { "url" "a " { $link url } " or " { $link string } } { "response" response } { "data" sequence } }
{ $description "Requests that the origin server delete the resource identified by the URL." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-delete*
{ $values { "url" "a " { $link url } " or " { $link string } } { "data" sequence } }
{ $description "A variant of " { $link http-delete } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: http-options
{ $values { "url" "a " { $link url } " or " { $link string } } { "response" response } { "data" sequence } }
{ $description "Submits an HTTP OPTIONS request." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-options*
{ $values { "url" "a " { $link url } " or " { $link string } } { "data" sequence } }
{ $description "A variant of " { $link http-options } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: http-trace
{ $values { "url" "a " { $link url } " or " { $link string } } { "response" response } { "data" sequence } }
{ $description "Submits an HTTP TRACE request." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-trace*
{ $values { "url" "a " { $link url } " or " { $link string } } { "data" sequence } }
{ $description "A variant of " { $link http-trace } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: with-http-get
{ $values { "url" "a " { $link url } " or " { $link string } } { "quot" { $quotation "( chunk -- )" } } { "response" response } }
{ $description "Downloads the contents of a URL. Chunks of data are passed to the quotation as they are read." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: with-http-get*
{ $values { "url" "a " { $link url } " or " { $link string } } { "quot" { $quotation "( chunk -- )" } } }
{ $description "A variant of " { $link with-http-get } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
HELP: http-request
{ $values { "request" request } { "response" response } { "data" sequence } }
{ $description "Sends an HTTP request to an HTTP server, and reads the response." }
{ $description "A variant of " { $link http-request* } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails." } ;
HELP: http-request*
{ $values { "request" request } { "data" sequence } }
{ $description "A variant of " { $link http-request } " that checks that the response was successful." }
{ $errors "Throws an error if the HTTP request fails or is not successful." } ;
{ $values { "request" request } { "response" response } { "data" sequence } }
{ $description "Sends an HTTP request to an HTTP server, and reads the response." } ;
HELP: with-http-request
{ $values { "request" request } { "quot" { $quotation "( chunk -- )" } } { "response" response } }
{ $description "A variant of " { $link with-http-request* } " that checks that the response was successful." } ;
HELP: with-http-request*
{ $values { "request" request } { "quot" { $quotation "( chunk -- )" } } { "response" response } }
{ $description "Sends an HTTP request to an HTTP server, and reads the response incrementally. Chunks of data are passed to the quotation as they are read. Does not throw an error if the HTTP request fails; to do so, call " { $link check-response } " on the " { $snippet "response" } "." } ;
{ http-request http-request* with-http-request with-http-request* } related-words
ARTICLE: "http.client.get" "GET requests with the HTTP client"
"Basic usage involves passing a " { $link url } " and getting a " { $link response } " and data back:"
{ $subsections http-get http-get* }
{ $subsections
http-get
}
"Utilities to retrieve a " { $link url } " and save the contents to a file:"
{ $subsections
download
@ -158,11 +120,10 @@ ARTICLE: "http.client.get" "GET requests with the HTTP client"
http-request
http-request*
}
"The " { $link http-get } " and " { $link http-request } " words output sequences. This is undesirable if the response data may be large. Another pair of words take a quotation instead, and pass the quotation chunks of data incrementally:"
"The " { $link http-request } " and " { $link http-request* } " words output sequences. This is undesirable if the response data may be large. Another pair of words take a quotation instead, and pass the quotation chunks of data incrementally:"
{ $subsections
with-http-get
with-http-get*
with-http-request
with-http-request*
} ;
ARTICLE: "http.client.post-data" "HTTP client post data"
@ -185,21 +146,21 @@ ARTICLE: "http.client.post-data" "HTTP client post data"
ARTICLE: "http.client.post" "POST requests with the HTTP client"
"Basic usage involves passing post data and a " { $link url } ", and getting a " { $link response } " and data back:"
{ $subsections http-post http-post* }
{ $subsections http-post }
"Advanced usage involves constructing a " { $link request } ", which allows " { $link "http.cookies" } " and " { $link "http.headers" } " to be set:"
{ $subsections <post-request> }
"Both words take a post data parameter; see " { $link "http.client.post-data" } "." ;
ARTICLE: "http.client.put" "PUT requests with the HTTP client"
"Basic usage involves passing post data and a " { $link url } ", and getting a " { $link response } " and data back:"
{ $subsections http-put http-put* }
{ $subsections http-put }
"Advanced usage involves constructing a " { $link request } ", which allows " { $link "http.cookies" } " and " { $link "http.headers" } " to be set:"
{ $subsections <put-request> }
"Both words take a post data parameter; see " { $link "http.client.post-data" } "." ;
ARTICLE: "http.client.head" "HEAD requests with the HTTP client"
"Basic usage involves passing a " { $link url } " and getting a " { $link response } " and data back:"
{ $subsections http-head http-head* }
{ $subsections http-head }
"Advanced usage involves constructing a " { $link request } ", which allows " { $link "http.cookies" } " and " { $link "http.headers" } " to be set:"
{ $subsections
<head-request>
@ -207,7 +168,7 @@ ARTICLE: "http.client.head" "HEAD requests with the HTTP client"
ARTICLE: "http.client.delete" "DELETE requests with the HTTP client"
"Basic usage involves passing a " { $link url } " and getting a " { $link response } " and data back:"
{ $subsections http-delete http-delete* }
{ $subsections http-delete }
"Advanced usage involves constructing a " { $link request } ", which allows " { $link "http.cookies" } " and " { $link "http.headers" } " to be set:"
{ $subsections
<delete-request>
@ -215,7 +176,7 @@ ARTICLE: "http.client.delete" "DELETE requests with the HTTP client"
ARTICLE: "http.client.options" "OPTIONS requests with the HTTP client"
"Basic usage involves passing a " { $link url } " and getting a " { $link response } " and data back:"
{ $subsections http-options http-options* }
{ $subsections http-options }
"Advanced usage involves constructing a " { $link request } ", which allows " { $link "http.cookies" } " and " { $link "http.headers" } " to be set:"
{ $subsections
<options-request>
@ -224,7 +185,7 @@ ARTICLE: "http.client.options" "OPTIONS requests with the HTTP client"
ARTICLE: "http.client.trace" "TRACE requests with the HTTP client"
"Basic usage involves passing a " { $link url } " and getting a " { $link response } " and data back:"
{ $subsections http-trace http-trace* }
{ $subsections http-trace }
"Advanced usage involves constructing a " { $link request } ", which allows " { $link "http.cookies" } " and " { $link "http.headers" } " to be set:"
{ $subsections
<trace-request>

View File

@ -1,14 +1,12 @@
! Copyright (C) 2005, 2010 Slava Pestov.
! See http://factorcode.org/license.txt for BSD license.
USING: assocs combinators.short-circuit kernel math math.parser
namespaces make sequences strings splitting calendar
continuations accessors vectors math.order hashtables
byte-arrays destructors io io.sockets io.streams.string io.files
io.timeouts io.pathnames io.encodings io.encodings.string
io.encodings.ascii io.encodings.utf8 io.encodings.binary
io.encodings.iana io.crlf io.streams.duplex fry ascii urls
urls.encoding present locals http http.parsers
http.client.post-data mime.types ;
USING: accessors ascii assocs calendar combinators.short-circuit
destructors fry hashtables http http.client.post-data
http.parsers io io.crlf io.encodings io.encodings.ascii
io.encodings.binary io.encodings.iana io.encodings.string
io.files io.pathnames io.sockets io.timeouts kernel locals math
math.order math.parser mime.types namespaces present sequences
splitting urls vocabs.loader ;
IN: http.client
ERROR: too-many-redirects ;
@ -163,15 +161,18 @@ ERROR: download-failed response ;
: check-response-with-body ( response body -- response body )
[ >>body check-response ] keep ;
: with-http-request ( request quot: ( chunk -- ) -- response )
: with-http-request* ( request quot: ( chunk -- ) -- response )
[ (with-http-request) ] with-destructors ; inline
: http-request ( request -- response data )
[ [ % ] with-http-request ] B{ } make
over content-encoding>> decode check-response-with-body ;
: with-http-request ( request quot: ( chunk -- ) -- response )
with-http-request* check-response ; inline
: http-request* ( request -- data )
http-request swap check-response drop ;
: http-request* ( request -- response data )
BV{ } clone [ '[ _ push-all ] with-http-request* ] keep
B{ } like over content-encoding>> decode [ >>body ] keep ;
: http-request ( request -- response data )
http-request* [ check-response ] dip ;
: <get-request> ( url -- request )
"GET" <client-request> ;
@ -179,20 +180,13 @@ ERROR: download-failed response ;
: http-get ( url -- response data )
<get-request> http-request ;
: http-get* ( url -- data )
http-get swap check-response drop ;
: with-http-get ( url quot: ( chunk -- ) -- response )
[ <get-request> ] dip with-http-request ; inline
: with-http-get* ( url quot: ( chunk -- ) -- )
with-http-get check-response drop ; inline
: download-name ( url -- name )
present file-name "?" split1 drop "/" ?tail drop ;
: download-to ( url file -- )
binary [ [ write ] with-http-get* ] with-file-writer ;
binary [
<get-request> [ write ] with-http-request drop
] with-file-writer ;
: download ( url -- )
dup download-name download-to ;
@ -204,9 +198,6 @@ ERROR: download-failed response ;
: http-post ( post-data url -- response data )
<post-request> http-request ;
: http-post* ( post-data url -- data )
http-post swap check-response drop ;
: <put-request> ( post-data url -- request )
"PUT" <client-request>
swap >>post-data ;
@ -214,45 +205,30 @@ ERROR: download-failed response ;
: http-put ( post-data url -- response data )
<put-request> http-request ;
: http-put* ( post-data url -- data )
http-put swap check-response drop ;
: <delete-request> ( url -- request )
"DELETE" <client-request> ;
: http-delete ( url -- response data )
<delete-request> http-request ;
: http-delete* ( url -- data )
http-delete swap check-response drop ;
: <head-request> ( url -- request )
"HEAD" <client-request> ;
: http-head ( url -- response data )
<head-request> http-request ;
: http-head* ( url -- data )
http-head swap check-response drop ;
: <options-request> ( url -- request )
"OPTIONS" <client-request> ;
: http-options ( url -- response data )
<options-request> http-request ;
: http-options* ( url -- data )
http-options swap check-response drop ;
: <trace-request> ( url -- request )
"TRACE" <client-request> ;
: http-trace ( url -- response data )
<trace-request> http-request ;
: http-trace* ( url -- data )
http-trace swap check-response drop ;
USE: vocabs.loader
{ "http.client" "debugger" } "http.client.debugger" require-when

View File

@ -108,7 +108,7 @@ M: byte-array parse-feed [ bytes>xml xml>feed ] with-html-entities ;
: download-feed ( url -- feed )
#! Retrieve an news syndication file, return as a feed tuple.
http-get* parse-feed ;
http-get nip parse-feed ;
! Atom generation

View File

@ -23,7 +23,7 @@ ERROR: bad-response json status ;
] unless ;
: json-data ( url -- json )
http-get* json> check-status "data" of ;
http-get nip json> check-status "data" of ;
: get-short-url ( short-url path -- data )
<bitly-url> swap "shortUrl" set-query-param json-data ;

View File

@ -100,6 +100,6 @@ PRIVATE>
: chart. ( chart -- )
chart>url present dup length 2000 < [ http-image. ] [
"?" split1 swap http-post*
"?" split1 swap http-post nip
"png" (image-class) load-image* image.
] if ;

View File

@ -22,7 +22,7 @@ title content unescapedUrl url titleNoFormatting fileFormat ;
PRIVATE>
: google-search ( query -- results )
search-url http-get* json>
search-url http-get nip json>
{ "responseData" "results" } deep-at
[ \ search-result from-slots ] map ;

View File

@ -16,7 +16,7 @@ CONSTANT: maximum-translation-size 5120
] { } make ;
: assoc>query-response ( assoc -- response )
google-translate-url http-post* ;
google-translate-url http-post nip ;
ERROR: response-error response error ;

View File

@ -25,7 +25,7 @@ TUPLE: post title postedBy points id url commentCount postedAgo ;
] map ;
: hacker-news-items ( -- seq )
"http://api.ihackernews.com/page" http-get*
"http://api.ihackernews.com/page" http-get nip
json> "items" of items> ;
: write-title ( title url -- )

View File

@ -21,7 +21,7 @@ SYMBOLS: latest-sources last-built-sources ;
[ maybe-download-image drop ] [ file-checksum ] bi ;
: latest-counter ( -- counter )
counter-url get-global http-get* string>number ;
counter-url get-global http-get nip string>number ;
: update-sources ( -- )
#! Must be run from builds-dir

View File

@ -44,7 +44,7 @@ public_description subscribers title url ;
TUPLE: page url data before after ;
: json-page ( url -- page )
>url dup http-get* json> "data" of {
>url dup http-get nip json> "data" of {
[ "children" of [ parse-data ] map ]
[ "before" of [ f ] when-json-null ]
[ "after" of [ f ] when-json-null ]
@ -55,7 +55,7 @@ TUPLE: page url data before after ;
: get-user-info ( username -- user )
"http://api.reddit.com/user/%s/about" sprintf
http-get* json> parse-data ;
http-get nip json> parse-data ;
: get-url-info ( url -- page )
"http://api.reddit.com/api/info?url=%s" sprintf json-page ;

View File

@ -30,7 +30,7 @@ visit-time request-rate crawl-delay unknowns ;
>url URL" robots.txt" derive-url ;
: get-robots.txt ( url -- robots.txt )
>robots.txt-url http-get* ;
>robots.txt-url http-get nip ;
: normalize-robots.txt ( string -- sitemaps seq )
string-lines

View File

@ -15,7 +15,7 @@ IN: rosetta-code.web-scraping
! and popular such as CPAN for Perl or Boost for C++.
: web-scraping-main ( -- )
"http://tycho.usno.navy.mil/cgi-bin/timer.pl" http-get*
"http://tycho.usno.navy.mil/cgi-bin/timer.pl" http-get nip
[ "UTC" swap start [ 9 - ] [ 1 - ] bi ] keep subseq print ;
MAIN: web-scraping-main

View File

@ -5,7 +5,7 @@ IN: twitter.prettyprint
MEMO: load-http-image ( url -- image/f )
'[ _
[ http-get* ] [ image-class ] bi load-image*
[ http-get nip ] [ image-class ] bi load-image*
] [ drop f ] recover ;
: user-image ( user -- image/f )

View File

@ -34,8 +34,9 @@ TUPLE: fjsc < dispatcher ;
swap append ;
: do-compile-url ( url -- response )
[
absolute-url http-get* 'expression' parse fjsc-compile write "();" write
[
absolute-url http-get nip 'expression' parse
fjsc-compile write "();" write
] with-string-writer
"application/javascript" <content> ;

View File

@ -48,7 +48,7 @@ SYMBOL: language
[ month-name ] [ day>> ] bi "%s_%s" sprintf wikipedia-url ;
: (historical-events) ( timestamp -- seq )
historical-url http-get* string>xml "ul" deep-tags-named ;
historical-url http-get nip string>xml "ul" deep-tags-named ;
: items>sequence ( tag -- seq )
children-tags [ deep-children>string ] map ;
@ -74,7 +74,7 @@ PRIVATE>
(historical-events) "Deaths" header. fourth items. ;
: article. ( name -- )
wikipedia-url http-get* parse-html
wikipedia-url http-get nip parse-html
"content" find-by-id-between
[ html-text. ] with-string-writer string-lines
[ [ blank? ] trim ] map harvest [

View File

@ -16,7 +16,7 @@ SYMBOL: wolfram-api-id
: query ( query -- xml )
url-encode wolfram-api-id get-global
"http://api.wolframalpha.com/v2/query?input=%s&appid=%s"
sprintf http-get* string>xml
sprintf http-get nip string>xml
dup "error" tag-named [
"msg" tag-named children>string throw
] when* ;

View File

@ -11,19 +11,18 @@ IN: xkcd
<PRIVATE
: comic-image ( url -- image )
http-get*
http-get nip
R" http://imgs\.xkcd\.com/comics/[^\.]+\.(png|jpg)"
first-match >string load-http-image ;
: comic-image. ( url -- ) comic-image image. ;
: comic-string ( url -- string )
http-get* string>xml
http-get nip string>xml
"transcript" "id" deep-tag-with-attr children>string ;
: comic-text. ( url -- )
comic-image
80 wrap-lines [ print ] each ;
comic-image 80 wrap-lines [ print ] each ;
: comic. ( url -- )
ui-running? [ comic-image. ] [ comic-text. ] if ;

View File

@ -57,4 +57,4 @@ CONSTANT: factor-id "fRrVAKzV34GDyeRw6bUHDhEWHRedwfOC7e61wwXZLgGF80E67spxdQXuugB
swap >>query ;
: yahoo-search ( search -- seq )
query http-get* string>xml parse-yahoo ;
query http-get nip string>xml parse-yahoo ;

View File

@ -50,7 +50,7 @@ CONSTANT: video-info-url URL" http://www.youtube.com/get_video_info"
"detailpage" "el" set-query-param
"en_US" "hl" set-query-param
swap "video_id" set-query-param
http-get* query>assoc ;
http-get nip query>assoc ;
: video-formats ( video-info -- video-formats )
"url_encoded_fmt_stream_map" of "," split