Compare commits

...

1 Commits

Author SHA1 Message Date
Doug Coleman d49562ea1f io.pathnames: Changed the parser to allow tag"payload" form strings.
Goals:
"resource:foo" -> resource"foo"
append-path for special paths
2018-06-29 23:14:20 -05:00
98 changed files with 366 additions and 276 deletions

View File

@ -4,7 +4,7 @@ USING: assocs bootstrap.image checksums checksums.md5
http.client io.files kernel math.parser splitting urls ;
IN: bootstrap.image.download
CONSTANT: url URL" http://downloads.factorcode.org/images/master/"
CONSTANT: url url"http://downloads.factorcode.org/images/master/"
: download-checksums ( -- alist )
url "checksums.txt" >url derive-url http-get nip

View File

@ -13,8 +13,8 @@ SYMBOL: bootstrap-time
: strip-encodings ( -- )
os unix? [
[
P" resource:core/io/encodings/utf16/utf16.factor"
P" resource:core/io/encodings/utf16n/utf16n.factor" [ forget ] bi@
path"core/io/encodings/utf16/utf16.factor"
path"core/io/encodings/utf16n/utf16n.factor" [ forget ] bi@
"io.encodings.utf16"
"io.encodings.utf16n" [ loaded-child-vocab-names [ forget-vocab ] each ] bi@
] with-compilation-unit

View File

@ -86,7 +86,7 @@ unit-test
] unit-test
[ 1 ] [
SBUF" " [ 1 slot 1 [ slot ] keep ] compile-call nip
sbuf"" [ 1 slot 1 [ slot ] keep ] compile-call nip
] unit-test
! Test slow shuffles

View File

@ -402,7 +402,7 @@ ERROR: bug-in-fixnum* x y a b ;
{ 1 2 3 } 2 [ vector boa ] compile-call
] unit-test
[ SBUF" hello" ] [
[ sbuf"hello" ] [
"hello world" 5 [ sbuf boa ] compile-call
] unit-test

View File

@ -104,7 +104,7 @@ SYMBOL: person4
T{ duration f 0 0 0 12 34 56 }
f
H{ { 1 2 } { 3 4 } { 5 "lol" } }
URL" http://www.google.com/search?hl=en&q=trailer+park+boys&btnG=Google+Search"
url"http://www.google.com/search?hl=en&q=trailer+park+boys&btnG=Google+Search"
}
] [ T{ person f 4 } select-tuple ] unit-test
@ -123,7 +123,7 @@ SYMBOL: person4
"2008-11-22 00:00:00" ymdhms>timestamp
"12:34:56" hms>duration
f H{ { 1 2 } { 3 4 } { 5 "lol" } }
URL" http://www.google.com/search?hl=en&q=trailer+park+boys&btnG=Google+Search" ;
url"http://www.google.com/search?hl=en&q=trailer+park+boys&btnG=Google+Search" ;
: db-assigned-person-schema ( -- )
person "PERSON"

View File

@ -259,5 +259,5 @@ MACRO: strftime ( format-string -- quot )
'[ over @ swap push-all ]
] if
] map '[
SBUF" " clone [ _ cleave drop ] keep "" like
sbuf"" clone [ _ cleave drop ] keep "" like
] ;

View File

@ -62,7 +62,7 @@ GENERIC: login-required* ( description capabilities realm -- response )
GENERIC: user-registered ( user realm -- response )
M: object user-registered 2drop URL" $realm" <redirect> ;
M: object user-registered 2drop url"$realm" <redirect> ;
GENERIC: init-realm ( realm -- )

View File

@ -15,7 +15,7 @@ IN: furnace.auth.features.deactivate-user
1 >>deleted
t >>changed?
drop
URL" $realm" end-aside
url"$realm" end-aside
] >>submit ;
: allow-deactivation ( realm -- realm )

View File

@ -52,7 +52,7 @@ IN: furnace.auth.features.edit-profile
drop
URL" $realm" end-aside
url"$realm" end-aside
] >>submit
<protected>

View File

@ -13,7 +13,7 @@ SYMBOL: lost-password-from
url get host>> host-name or ;
: new-password-url ( user -- url )
URL" recover-3" clone
url"recover-3" clone
swap
[ username>> "username" set-query-param ]
[ ticket>> "ticket" set-query-param ]
@ -63,7 +63,7 @@ SYMBOL: lost-password-from
send-password-email
] when*
URL" $realm/recover-2" <redirect>
url"$realm/recover-2" <redirect>
] >>submit ;
: <recover-action-2> ( -- action )
@ -99,7 +99,7 @@ SYMBOL: lost-password-from
"new-password" value >>encoded-password
users update-user
URL" $realm/recover-4" <redirect>
url"$realm/recover-4" <redirect>
] [
<403>
] if*

View File

@ -47,14 +47,14 @@ M: login-realm modify-form ( responder -- xml/f )
: successful-login ( user -- response )
[ username>> make-permit permit-id set ] [ init-user ] bi
URL" $realm" end-aside
url"$realm" end-aside
put-permit-cookie ;
\ successful-login DEBUG add-input-logging
: logout ( -- response )
permit-id get [ delete-permit ] when*
URL" $realm" end-aside ;
url"$realm" end-aside ;
<PRIVATE
@ -100,10 +100,10 @@ M: login-realm login-required* ( description capabilities login -- response )
[ description cset ] [ capabilities cset ] [ secure>> ] tri*
[
url get >secure-url begin-aside
URL" $realm/login" >secure-url <continue-conversation>
url"$realm/login" >secure-url <continue-conversation>
] [
url get begin-aside
URL" $realm/login" <continue-conversation>
url"$realm/login" <continue-conversation>
] if ;
M: login-realm user-registered ( user realm -- response )

View File

@ -37,6 +37,6 @@ unit-test
{ f } [ <request> request [ referrer ] with-variable ] unit-test
{ t } [ URL" http://foo" dup url [ same-host? ] with-variable ] unit-test
{ t } [ url"http://foo" dup url [ same-host? ] with-variable ] unit-test
{ f } [ f URL" http://foo" url [ same-host? ] with-variable ] unit-test
{ f } [ f url"http://foo" url [ same-host? ] with-variable ] unit-test

View File

@ -2,8 +2,8 @@ USING: furnace.recaptcha.private tools.test urls ;
IN: furnace.recaptcha.tests
{
URL" http://www.google.com/recaptcha/api/challenge"
URL" https://www.google.com/recaptcha/api/challenge"
url"http://www.google.com/recaptcha/api/challenge"
url"https://www.google.com/recaptcha/api/challenge"
} [
f recaptcha-url
t recaptcha-url

View File

@ -57,7 +57,7 @@ M: recaptcha call-responder*
{ "response" response }
{ "privatekey" private-key }
{ "remoteip" remote-ip }
} URL" http://api-verify.recaptcha.net/verify"
} url"http://api-verify.recaptcha.net/verify"
http-post nip parse-recaptcha-response ;
: validate-recaptcha-params ( -- )

View File

@ -140,7 +140,7 @@ ERROR: unknown-chloe-tag tag ;
: with-compiler ( quot -- quot' )
[
SBUF" " string-buffer namespaces:set
sbuf"" string-buffer namespaces:set
V{ } clone tag-stack namespaces:set
call
reset-buffer
@ -152,7 +152,7 @@ ERROR: unknown-chloe-tag tag ;
: compile-quot ( quot -- )
reset-buffer
[
SBUF" " string-buffer namespaces:set
sbuf"" string-buffer namespaces:set
call
reset-buffer
] [ ] make , ; inline

View File

@ -90,7 +90,7 @@ M: f call-template* drop call-next-template ;
: with-boilerplate ( child master -- )
[
title [ [ <box> ] unless* ] change
style [ [ SBUF" " clone ] unless* ] change
style [ [ sbuf"" clone ] unless* ] change
atom-feeds [ V{ } like ] change
[

View File

@ -66,7 +66,7 @@ IN: http.client.tests
] unit-test
{ "www.google.com:8080" } [
URL" http://foo:bar@www.google.com:8080/foo?bar=baz#quux" authority-uri
url"http://foo:bar@www.google.com:8080/foo?bar=baz#quux" authority-uri
] unit-test
{ "/index.html?bar=baz" } [
@ -147,7 +147,7 @@ CONSTANT: classic-proxy-settings H{
] with-variables
] unit-test
{ URL" http://proxy.private:3128" } [
{ url"http://proxy.private:3128" } [
classic-proxy-settings [
"27.0.0.1" "GET" <client-request> ?default-proxy proxy-url>>
] with-variables
@ -165,7 +165,7 @@ CONSTANT: classic-proxy-settings H{
] with-variables
] unit-test
{ URL" http://proxy.private:3128" } [
{ url"http://proxy.private:3128" } [
classic-proxy-settings [
"a.subprivate" "GET" <client-request> ?default-proxy proxy-url>>
] with-variables
@ -183,41 +183,41 @@ CONSTANT: classic-proxy-settings H{
] with-variables
] unit-test
{ URL" http://proxy.private:3128" } [
{ url"http://proxy.private:3128" } [
classic-proxy-settings [
"bara.subprivate" "GET" <client-request> ?default-proxy proxy-url>>
] with-variables
] unit-test
{ URL" http://proxy.private:3128" } [
{ url"http://proxy.private:3128" } [
classic-proxy-settings [
"google.com" "GET" <client-request> ?default-proxy proxy-url>>
] with-variables
] unit-test
{ URL" http://localhost:3128" } [
{ url"http://localhost:3128" } [
{ { "http.proxy" "localhost:3128" } } [
"google.com" "GET" <client-request> ?default-proxy proxy-url>>
] with-variables
] unit-test
{ URL" http://localhost:3128" } [
{ url"http://localhost:3128" } [
"google.com" "GET" <client-request>
URL" localhost:3128" >>proxy-url ?default-proxy proxy-url>>
url"localhost:3128" >>proxy-url ?default-proxy proxy-url>>
] unit-test
{ URL" http://localhost:3128" } [
{ url"http://localhost:3128" } [
"google.com" "GET" <client-request>
"localhost:3128" >>proxy-url ?default-proxy proxy-url>>
] unit-test
{ URL" http://proxysec.private:3128" } [
{ url"http://proxysec.private:3128" } [
classic-proxy-settings [
"https://google.com" "GET" <client-request> ?default-proxy proxy-url>>
] with-variables
] unit-test
{ URL" http://proxy.private:3128" } [
{ url"http://proxy.private:3128" } [
classic-proxy-settings [
"allprivate.google.com" "GET" <client-request> ?default-proxy proxy-url>>
] with-variables

View File

@ -184,7 +184,7 @@ SYMBOL: redirects
: (check-proxy) ( proxy -- ? )
{
{ [ dup URL" " = ] [ drop f ] }
{ [ dup url"" = ] [ drop f ] }
{ [ dup host>> ] [ drop t ] }
[ invalid-proxy ]
} cond ;

View File

@ -160,7 +160,7 @@ content-type: text/html; charset=UTF-8
;
${ read-response-test-1' } [
URL" http://localhost/" url set
url"http://localhost/" url set
read-response-test-1 lf>crlf
[ read-response ] with-string-reader
[ write-response ] with-string-writer
@ -257,7 +257,7 @@ USING: locals ;
"vocab:http/test" <static> >>default
"nested" add-responder
<action>
[ URL" redirect-loop" <temporary-redirect> ] >>display
[ url"redirect-loop" <temporary-redirect> ] >>display
"redirect-loop" add-responder [
[ t ] [
@ -347,7 +347,7 @@ SYMBOL: a
[ a get-global "a" set-value ] >>init
[ [ "<!DOCTYPE html><html>" write "a" <field> render "</html>" write ] "text/html" <content> ] >>display
[ { { "a" [ v-integer ] } } validate-params ] >>validate
[ "a" value a set-global URL" " <redirect> ] >>submit
[ "a" value a set-global url"" <redirect> ] >>submit
<conversations>
<sessions>
>>default

View File

@ -134,8 +134,8 @@ hello
{
T{ request
{ method "GET" }
{ url URL" /" }
{ proxy-url URL" " }
{ url url"/" }
{ proxy-url url"" }
{ version "1.0" }
{ header H{ } }
{ cookies V{ } }

View File

@ -33,16 +33,16 @@ H{ } clone params set
"rewrite" set
{ { { } "DEFAULT!" } } [
URL" http://blogs.vegan.net" url set
url"http://blogs.vegan.net" url set
{ } "rewrite" get call-responder
] unit-test
{ { { } "DEFAULT!" } } [
URL" http://www.blogs.vegan.net" url set
url"http://www.blogs.vegan.net" url set
{ } "rewrite" get call-responder
] unit-test
{ { { } "erg" } } [
URL" http://erg.blogs.vegan.net" url set
url"http://erg.blogs.vegan.net" url set
{ } "rewrite" get call-responder
] unit-test

View File

@ -50,8 +50,8 @@ io.streams.string kernel math peg sequences tools.test urls ;
{
T{ request
{ method "GET" }
{ url URL" /" }
{ proxy-url URL" " }
{ url url"/" }
{ proxy-url url"" }
{ version "1.0" }
{ header H{ } }
{ cookies V{ } }
@ -68,8 +68,8 @@ io.streams.string kernel math peg sequences tools.test urls ;
{
T{ request
{ method "GET" }
{ url URL" /" }
{ proxy-url URL" " }
{ url url"/" }
{ proxy-url url"" }
{ version "1.0" }
{ header H{ } }
{ cookies V{ } }

View File

@ -9,13 +9,13 @@ io.launcher io.pathnames kernel sequences tools.test ;
] unit-test
{ { "kernel" } } [
"resource:core" [
resource"core" [
"." directory-files [ "kernel" = ] filter
] with-directory
] unit-test
{ { "kernel" } } [
"resource:core" [
resource"core" [
[ "kernel" = ] filter
] with-directory-files
] unit-test
@ -140,7 +140,7 @@ io.launcher io.pathnames kernel sequences tools.test ;
! copy-file
{ } [
"resource:LICENSE.txt" "test" copy-file
resource"LICENSE.txt" "test" copy-file
] unit-test
! copy-file-into

View File

@ -2,19 +2,19 @@ USING: io.directories io.directories.hierarchy kernel
sequences tools.test ;
{ { "classes/tuple/tuple.factor" } } [
"resource:core" [
resource"core" [
"." directory-tree-files [ "classes/tuple/tuple.factor" = ] filter
] with-directory
] unit-test
{ { "classes/tuple" } } [
"resource:core" [
resource"core" [
"." directory-tree-files [ "classes/tuple" = ] filter
] with-directory
] unit-test
{ { "classes/tuple/tuple.factor" } } [
"resource:core" [
resource"core" [
[ "classes/tuple/tuple.factor" = ] filter
] with-directory-tree-files
] unit-test

View File

@ -38,12 +38,12 @@ tools.test ;
] unit-test
{ t } [
"resource:core/math/integers/integers.factor"
resource"core/math/integers/integers.factor"
[ "math.factor" tail? ] find-up-to-root >boolean
] unit-test
{ f } [
"resource:core/math/integers/integers.factor"
resource"core/math/integers/integers.factor"
[ drop f ] find-up-to-root
] unit-test

View File

@ -7,7 +7,7 @@ HELP: open-read
{ $examples
{ $unchecked-example
"USING: io.files.windows prettyprint ;"
"\"resource:core/kernel/kernel.factor\" absolute-path open-read ."
"resource\"core/kernel/kernel.factor\" absolute-path open-read ."
"T{ win32-file { handle ALIEN: 234 } { ptr 0 } }"
}
} ;

View File

@ -33,7 +33,7 @@ M: mock-io-backend link-info
{ } [
mock-io-backend io-backend [
"resource:core/io" <mailbox> <recursive-monitor> dispose
resource"core/io" <mailbox> <recursive-monitor> dispose
] with-variable
] unit-test
@ -56,6 +56,6 @@ M: mock-io-backend link-info
! Test that disposing twice is allowed
{ } [
"resource:core/io" <mailbox> <recursive-monitor>
resource"core/io" <mailbox> <recursive-monitor>
[ dispose ] [ dispose ] bi
] unit-test

View File

@ -3,7 +3,7 @@ kernel sequences tools.test urls windows.winsock ;
IN: io.sockets.windows.tests
: google-socket ( -- socket )
URL" http://www.google.com" url-addr resolve-host first
url"http://www.google.com" url-addr resolve-host first
SOCK_STREAM open-socket ;
{ } [

View File

@ -80,7 +80,7 @@ SYNTAX: STRING:
PRIVATE>
: parse-multiline-string ( end-text -- str )
lexer get 1 (parse-multiline-string) ;
lexer get 0 (parse-multiline-string) ;
SYNTAX: /* "*/" parse-multiline-string drop ;

View File

@ -131,10 +131,19 @@ M: string pprint*
dup "\"" "\"" pprint-string ;
M: sbuf pprint*
dup "SBUF\" " "\"" pprint-string ;
dup "sbuf\"" "\"" pprint-string ;
M: pathname pprint*
dup string>> "P\" " "\"" pprint-string ;
dup string>> "path\"" "\"" pprint-string ;
M: resource-pathname pprint*
dup string>> "resource\"" "\"" pprint-string ;
M: vocab-pathname pprint*
dup string>> "vocab-path\"" "\"" pprint-string ;
M: home-pathname pprint*
dup string>> "home-path\"" "\"" pprint-string ;
! Sequences
: nesting-limit? ( -- ? )

View File

@ -47,7 +47,7 @@ unit-test
{ "f" } [ f unparse ] unit-test
{ "t" } [ t unparse ] unit-test
{ "SBUF\" hello world\"" } [ SBUF" hello world" unparse ] unit-test
{ "sbuf\"hello world\"" } [ sbuf"hello world" unparse ] unit-test
{ "W{ \\ + }" } [ [ W{ \ + } ] first unparse ] unit-test

View File

@ -52,7 +52,7 @@ CONSTANT: objects
"test"
{ 1 2 "three" }
V{ 1 2 "three" }
SBUF" hello world"
sbuf"hello world"
"hello \u012345 unicode"
\ dup
[ \ dup dup ]

View File

@ -11,13 +11,13 @@ IN: syndication.tests
feed
f
"Meerkat"
URL" http://meerkat.oreillynet.com"
url"http://meerkat.oreillynet.com"
{
T{
entry
f
"XML: A Disruptive Technology"
URL" http://c.moreover.com/click/here.pl?r123"
url"http://c.moreover.com/click/here.pl?r123"
"\n XML is placing increasingly heavy loads on the existing technical\n infrastructure of the Internet.\n "
f
}
@ -27,13 +27,13 @@ IN: syndication.tests
feed
f
"dive into mark"
URL" http://example.org/"
url"http://example.org/"
{
T{
entry
f
"Atom draft-07 snapshot"
URL" http://example.org/2005/04/02/atom"
url"http://example.org/2005/04/02/atom"
"\n <div xmlns=\"http://www.w3.org/1999/xhtml\">\n <p><i>[Update: The Atom draft is finished.]</i></p>\n </div>\n "
T{ timestamp f 2003 12 13 8 29 29 T{ duration f 0 0 0 -4 0 0 } }

View File

@ -3,7 +3,7 @@ USING: accessors urls io.encodings.ascii io.files math.parser
io.files.temp http.client kernel ;
: deploy-test-5 ( -- )
URL" http://localhost/foo.html" clone
url"http://localhost/foo.html" clone
"port-number" temp-file ascii file-contents string>number >>port
http-get 2drop ;

View File

@ -5,5 +5,5 @@ prettyprint.backend urls ;
IN: urls.prettyprint
M: url pprint*
\ URL" record-vocab
dup present "URL\" " "\"" pprint-string ;
\ url" record-vocab
dup present "url\"" "\"" pprint-string ;

View File

@ -19,7 +19,7 @@ HELP: >url
{ $example
"USING: accessors prettyprint urls ;"
"\"http://www.apple.com\" >url ."
"URL\" http://www.apple.com/\""
"url\"http://www.apple.com/\""
}
"We can examine the URL object:"
{ $example
@ -35,13 +35,13 @@ HELP: >url
}
} ;
HELP: URL"
{ $syntax "URL\" url...\"" }
HELP: url"
{ $syntax "url\"url...\"" }
{ $description "URL literal syntax." }
{ $examples
{ $example
"USING: accessors prettyprint urls ;"
"URL\" http://factorcode.org:80\" port>> ."
"url\"http://factorcode.org:80\" port>> ."
"80"
}
} ;
@ -52,15 +52,15 @@ HELP: derive-url
{ $examples
{ $example
"USING: prettyprint urls ;"
"URL\" http://factorcode.org\""
"URL\" binaries.fhtml\" derive-url ."
"URL\" http://factorcode.org/binaries.fhtml\""
"url\"http://factorcode.org\""
"url\"binaries.fhtml\" derive-url ."
"url\"http://factorcode.org/binaries.fhtml\""
}
{ $example
"USING: prettyprint urls ;"
"URL\" http://www.truecasey.com/drinks/kombucha\""
"URL\" master-cleanser\" derive-url ."
"URL\" http://www.truecasey.com/drinks/master-cleanser\""
"url\"http://www.truecasey.com/drinks/kombucha\""
"url\"master-cleanser\" derive-url ."
"url\"http://www.truecasey.com/drinks/master-cleanser\""
}
} ;
@ -70,7 +70,7 @@ HELP: ensure-port
{ $examples
{ $example
"USING: accessors prettyprint urls ;"
"URL\" https://concatenative.org\" ensure-port port>> ."
"url\"https://concatenative.org\" ensure-port port>> ."
"443"
}
} ;
@ -95,7 +95,7 @@ HELP: query-param
{ $examples
{ $example
"USING: io urls ;"
"URL\" http://food.com/calories?item=French+Fries\""
"url\"http://food.com/calories?item=French+Fries\""
"\"item\" query-param print"
"French Fries"
}
@ -109,7 +109,7 @@ HELP: set-query-param
{ $examples
{ $code
"USING: kernel http.client urls ;
URL\" http://search.yahooapis.com/WebSearchService/V1/webSearch\" clone
url\"http://search.yahooapis.com/WebSearchService/V1/webSearch\" clone
\"concatenative programming (NSFW)\" \"query\" set-query-param
\"1\" \"adult_ok\" set-query-param
http-get"
@ -124,9 +124,9 @@ HELP: relative-url
{ $examples
{ $example
"USING: prettyprint urls ;"
"URL\" http://factorcode.org/binaries.fhtml\""
"url\"http://factorcode.org/binaries.fhtml\""
"relative-url ."
"URL\" /binaries.fhtml\""
"url\"/binaries.fhtml\""
}
} ;
@ -153,12 +153,12 @@ HELP: url-addr
{ $examples
{ $example
"USING: prettyprint urls ;"
"URL\" ftp://ftp.cdrom.com\" url-addr ."
"url\"ftp://ftp.cdrom.com\" url-addr ."
"T{ inet { host \"ftp.cdrom.com\" } { port 21 } }"
}
{ $example
"USING: io.sockets.secure prettyprint urls ;"
"URL\" https://google.com/\" url-addr ."
"url\"https://google.com/\" url-addr ."
"T{ secure\n { addrspec T{ inet { host \"google.com\" } { port 443 } } }\n { hostname \"google.com\" }\n}"
}
} ;
@ -189,7 +189,7 @@ $nl
"URLs can be converted back to strings using the " { $link present } " word."
$nl
"URL literal syntax:"
{ $subsections POSTPONE: URL" }
{ $subsections POSTPONE: url" }
"Manipulating URLs:"
{ $subsections
derive-url

View File

@ -252,26 +252,26 @@ urls [
] unit-test
! Support //foo.com, which has the same protocol as the url we derive from
{ URL" http://foo.com" }
[ URL" http://google.com" URL" //foo.com" derive-url ] unit-test
{ url"http://foo.com" }
[ url"http://google.com" url"//foo.com" derive-url ] unit-test
{ URL" https://foo.com" }
[ URL" https://google.com" URL" //foo.com" derive-url ] unit-test
{ url"https://foo.com" }
[ url"https://google.com" url"//foo.com" derive-url ] unit-test
{ "a" } [
<url> "a" "b" set-query-param "b" query-param
] unit-test
{ t } [
URL" http://www.google.com" "foo" "bar" set-query-param
url"http://www.google.com" "foo" "bar" set-query-param
query>> linked-assoc?
] unit-test
{ "foo#3" } [ URL" foo" clone 3 >>anchor present ] unit-test
{ "foo#3" } [ url"foo" clone 3 >>anchor present ] unit-test
{ "http://www.foo.com/" } [ "http://www.foo.com:80" >url present ] unit-test
{ f } [ URL" /gp/redirect.html/002-7009742-0004012?location=http://advantage.amazon.com/gp/vendor/public/join%26token%3d77E3769AB3A5B6CF611699E150DC33010761CE12" protocol>> ] unit-test
{ f } [ url"/gp/redirect.html/002-7009742-0004012?location=http://advantage.amazon.com/gp/vendor/public/join%26token%3d77E3769AB3A5B6CF611699E150DC33010761CE12" protocol>> ] unit-test
{
T{ url
@ -295,10 +295,10 @@ urls [
{ "/" } [ "http://www.jedit.org" >url path>> ] unit-test
{ "USING: urls ;\nURL\" foo\"" } [ URL" foo" unparse-use ] unit-test
{ "USING: urls ;\nURL\" foo\"" } [ url"foo" unparse-use ] unit-test
{ T{ inet { host "google.com" } { port 80 } } }
[ URL" http://google.com/" url-addr ] unit-test
[ url"http://google.com/" url-addr ] unit-test
{
T{ secure
@ -306,14 +306,14 @@ urls [
{ hostname "google.com" }
}
}
[ URL" https://google.com/" url-addr ] unit-test
[ url"https://google.com/" url-addr ] unit-test
{ "git+https" }
[ URL" git+https://google.com/git/factor.git" >url protocol>> ] unit-test
[ url"git+https://google.com/git/factor.git" >url protocol>> ] unit-test
! Params should be rendered in the order in which they are added.
{ "/?foo=foo&bar=bar&baz=baz" } [
URL" /"
url"/"
"foo" "foo" set-query-param
"bar" "bar" set-query-param
"baz" "baz" set-query-param
@ -322,6 +322,6 @@ urls [
! Scheme characters are
! case-insensitive. https://tools.ietf.org/html/rfc3986#section-3.1
{ URL" http://www.google.com/" } [
URL" http://www.google.com/"
{ url"http://www.google.com/" } [
url"http://www.google.com/"
] unit-test

View File

@ -112,7 +112,7 @@ M: pathname >url string>> >url ;
[ path>> "/" head? [ "/" % ] unless ]
} cleave ;
! URL" //foo.com" takes on the protocol of the url it's derived from
! url"//foo.com" takes on the protocol of the url it's derived from
: unparse-protocol ( url -- )
dup protocol>> [
% "://" % unparse-host-part
@ -189,6 +189,6 @@ PRIVATE>
clone dup protocol>> '[ _ protocol-port or ] change-port ;
! Literal syntax
SYNTAX: URL" lexer get skip-blank parse-string >url suffix! ;
SYNTAX: url" parse-string >url suffix! ;
{ "urls" "prettyprint" } "urls.prettyprint" require-when

View File

@ -7,4 +7,4 @@ USING: io.pathnames tools.test urls webbrowser ;
{ f } [ 123 url-string? ] unit-test
{ } [ "" absolute-path open-item ] unit-test
{ } [ URL" http://www.google.com" open-item ] unit-test
{ } [ url"http://www.google.com" open-item ] unit-test

View File

@ -43,7 +43,7 @@
</SPAN>
<SPAN TYPE="LITERAL1" NO_LINE_BREAK="TRUE"
DELEGATE="LITERAL">
<BEGIN>SBUF" </BEGIN>
<BEGIN>sbuf"</BEGIN>
<END>"</END>
</SPAN>
<SPAN TYPE="LITERAL1" NO_LINE_BREAK="TRUE"

View File

@ -32,12 +32,15 @@ IN: bootstrap.syntax
"MATH:"
"MIXIN:"
"NAN:"
"P\""
"POSTPONE:"
"PREDICATE:"
"PRIMITIVE:"
"PRIVATE>"
"SBUF\""
"path\""
"sbuf\""
"resource\""
"vocab-path\""
"home-path\""
"SINGLETON:"
"SINGLETONS:"
"BUILTIN:"

View File

@ -40,7 +40,7 @@ HELP: checksum-file
{ $examples
{ $example
"USING: checksums checksums.crc32 prettyprint ;"
"\"resource:LICENSE.txt\" crc32 checksum-file ."
"resource\"LICENSE.txt\" crc32 checksum-file ."
"B{ 125 29 106 28 }"
}
} ;

View File

@ -5,7 +5,7 @@ USING: checksums checksums.crc32 kernel tools.test ;
{ B{ 0xcb 0xf4 0x39 0x26 } } [ "123456789" crc32 checksum-bytes ] unit-test
{ t } [
"resource:LICENSE.txt" crc32
resource"LICENSE.txt" crc32
[ [ swap add-checksum-file get-checksum ] with-checksum-state ]
[ checksum-file ] 2bi =
] unit-test

View File

@ -160,7 +160,7 @@ M: circle big-mix-test drop "circle" ;
{ "complex" } [ -1 sqrt big-mix-test ] unit-test
{ "sequence" } [ B{ 1 2 3 } big-mix-test ] unit-test
{ "sequence" } [ ?{ t f t } big-mix-test ] unit-test
{ "sequence" } [ SBUF" hello world" big-mix-test ] unit-test
{ "sequence" } [ sbuf"hello world" big-mix-test ] unit-test
{ "sequence" } [ V{ "a" "b" } big-mix-test ] unit-test
{ "sequence" } [ BV{ 1 2 } big-mix-test ] unit-test
{ "sequence" } [ ?V{ t t f f } big-mix-test ] unit-test

View File

@ -83,7 +83,7 @@ HELP: file-lines
{ $examples
{ $example
"USING: io.files io.encodings.utf8 prettyprint sequences ;"
"\"resource:core/kernel/kernel.factor\" utf8 file-lines first ."
"resource\"core/kernel/kernel.factor\" utf8 file-lines first ."
"\"! Copyright (C) 2004, 2009 Slava Pestov.\""
}
}

View File

@ -241,7 +241,7 @@ CONSTANT: pt-array-1
] must-fail
{ } [
"resource:LICENSE.txt" binary [
resource"LICENSE.txt" binary [
44 read drop
tell-input 44 assert=
-44 seek-relative seek-input
@ -272,7 +272,7 @@ CONSTANT: pt-array-1
{ f t t } [
[
"resource:core" normalize-path
resource"core" normalize-path
[ cwd = ] [ cd ] [ cwd = ] tri
] cwd '[ _ dup cd cwd = ] [ ] cleanup
] unit-test

View File

@ -1,4 +1,4 @@
USING: accessors io io.streams.string kernel math namespaces
uSING: accessors io io.streams.string kernel math namespaces
parser sbufs sequences strings tools.test words ;
IN: io.tests
@ -76,7 +76,7 @@ M: dumb-writer stream-write1 vector>> push ; inline
[ vector>> ] tri
] unit-test
{ SBUF" asdf" }
{ sbuf"asdf" }
[ "asdf" <string-reader> 4 <sbuf> [ stream-copy ] keep ] unit-test
{ "asdf" }

View File

@ -112,7 +112,7 @@ SYMBOL: error-stream
stream-element-type +byte+ = B{ } "" ? ; inline
: stream-exemplar-growable ( stream -- exemplar )
stream-element-type +byte+ = BV{ } SBUF" " ? ; inline
stream-element-type +byte+ = BV{ } sbuf"" ? ; inline
: (new-sequence-for-stream) ( n stream -- seq )
stream-exemplar new-sequence ; inline

View File

@ -142,7 +142,7 @@ ARTICLE: "io.pathnames.presentations" "Pathname presentations"
<pathname>
}
"Literal pathname presentations:"
{ $subsections POSTPONE: P" }
{ $subsections POSTPONE: path" }
"Many words that accept pathname strings can also work on pathname presentations." ;
ARTICLE: "io.pathnames" "Pathnames"

View File

@ -9,6 +9,8 @@ system tools.test ;
{ "freetype6.dll" } [ "resource:freetype6.dll" file-name ] unit-test
{ "freetype6.dll" } [ "resource:/freetype6.dll" file-name ] unit-test
{ "freetype6.dll" } [ resource"freetype6.dll" file-name ] unit-test
{ "freetype6.dll" } [ resource"/freetype6.dll" file-name ] unit-test
{ "/usr/lib" } [ "/usr" "lib" append-path ] unit-test
{ "/usr/lib" } [ "/usr/" "lib" append-path ] unit-test
@ -49,7 +51,9 @@ system tools.test ;
{ "bar/foo" } [ "bar/baz" "./..//foo" append-path ] unit-test
{ "bar/foo" } [ "bar/baz" "./../././././././///foo" append-path ] unit-test
{ t } [ "resource:core" absolute-path? ] unit-test
! No longer a special-path, instead it's a file named "resource:core"
{ f } [ "resource:core" absolute-path? ] unit-test
{ t } [ resource"core" absolute-path? ] unit-test
{ f } [ "" absolute-path? ] unit-test
[| path |
@ -66,6 +70,15 @@ H{
unit-test
] with-variables
H{
{ current-directory "." }
{ "resource-path" ".." }
} [
[ "../core/bootstrap/stage2.factor" ]
[ resource"core/bootstrap/stage2.factor" absolute-path ]
unit-test
] with-variables
{ t } [ cwd "misc" resource-path [ ] with-directory cwd = ] unit-test
! Regression test for bug in file-extension

View File

@ -78,10 +78,27 @@ ERROR: no-parent-directory path ;
PRIVATE>
GENERIC: vocab-path ( path -- newpath )
GENERIC: absolute-path ( path -- path' )
TUPLE: pathname string ;
C: <pathname> pathname
M: pathname absolute-path string>> absolute-path ;
M: pathname <=> [ string>> ] compare ;
INSTANCE: pathname virtual-sequence
M: pathname length normalize-path length ;
M: pathname virtual@ normalize-path ;
M: pathname virtual-exemplar drop "" ;
M: pathname string-lines "" like string-lines ;
: absolute-path? ( path -- ? )
{
{ [ dup empty? ] [ f ] }
{ [ dup special-path? nip ] [ t ] }
{ [ os windows? ] [ windows-absolute-path? ] }
{ [ dup first path-separator? ] [ t ] }
[ f ]
@ -105,6 +122,10 @@ PRIVATE>
{ [ over absolute-path? over first path-separator? and ] [
[ 2 head ] dip append
] }
! { [ over pathname? ] [
! [ [ normalize-path ] dip append-path ] keepd like
! [ append-path ] curry change-string
! ] }
[ append-relative-path ]
} cond ;
@ -117,6 +138,7 @@ PRIVATE>
dup last-path-separator [ 1 + tail ] [
drop special-path? [ file-name ] when
] if
! dup last-path-separator [ 1 + tail ] [ drop ] if
] unless ;
: file-stem ( path -- stem )
@ -139,10 +161,6 @@ HOOK: home io-backend ( -- dir )
M: object home "" resource-path ;
GENERIC: vocab-path ( path -- newpath )
GENERIC: absolute-path ( path -- path' )
M: string absolute-path
"resource:" ?head [
trim-head-separators resource-path
@ -163,10 +181,29 @@ M: string absolute-path
M: object normalize-path ( path -- path' )
absolute-path ;
TUPLE: pathname string ;
TUPLE: resource-pathname < pathname ;
C: <pathname> pathname
: <resource-pathname> ( string -- resource-pathname )
resource-pathname new
swap trim-head-separators >>string ; inline
M: pathname absolute-path string>> absolute-path ;
M: resource-pathname absolute-path string>> resource-path absolute-path ;
! M: resource-pathname like drop normalize-path <resource-pathname> ;
M: pathname <=> [ string>> ] compare ;
TUPLE: vocab-pathname < pathname ;
: <vocab-pathname> ( string -- vocab-pathname )
vocab-pathname new
swap trim-head-separators >>string ; inline
M: vocab-pathname absolute-path string>> vocab-path absolute-path ;
! M: vocab-pathname like drop normalize-path <vocab-pathname> ;
TUPLE: home-pathname < pathname ;
: <home-pathname> ( string -- home-pathname )
home-pathname new
swap trim-head-separators >>string ; inline
M: home-pathname absolute-path string>> home prepend-path absolute-path ;
! M: home-pathname like drop normalize-path <home-pathname> ;

View File

@ -83,9 +83,25 @@ M: lexer skip-blank
GENERIC: skip-word ( lexer -- )
: find-container-delimiter ( i str -- n/f )
2dup [ "[" member? ] find-from [
[ swap subseq [ CHAR: = = ] all? ] keep and
] [
3drop f
] if ;
! Support tag"payload", tag[[payload]] soon
M: lexer skip-word
[
2dup nth CHAR: \" eq? [ drop 1 + ] [ f skip ] if
2dup [ " \"[" member? ] find-from
{
{ CHAR: \" [ 2nip 1 + ] }
{ CHAR: [ [
1 + over find-container-delimiter
dup [ 2nip 1 + ] [ drop f skip ] if
] }
[ 2drop f skip ]
} case
] change-lexer-column ;
: still-parsing? ( lexer -- ? )

View File

@ -18,7 +18,7 @@ $nl
<sbuf>
}
"If you don't care about initial capacity, a more elegant way to create a new string buffer is to write:"
{ $code "SBUF\" \" clone" } ;
{ $code "sbuf\"\" clone" } ;
ABOUT: "sbufs"

View File

@ -11,17 +11,17 @@ sequences.private strings tools.test ;
"buf" get >string
] unit-test
{ CHAR: h } [ 0 SBUF" hello world" nth ] unit-test
{ CHAR: h } [ 0 sbuf"hello world" nth ] unit-test
{ CHAR: H } [
CHAR: H 0 SBUF" hello world" [ set-nth ] keep first
CHAR: H 0 sbuf"hello world" [ set-nth ] keep first
] unit-test
{ SBUF" x" } [ 1 <sbuf> CHAR: x >bignum suffix! ] unit-test
{ sbuf"x" } [ 1 <sbuf> CHAR: x >bignum suffix! ] unit-test
{ fixnum } [ 1 >bignum SBUF" " new-sequence length class-of ] unit-test
{ fixnum } [ 1 >bignum sbuf"" new-sequence length class-of ] unit-test
{ fixnum } [ 1 >bignum <iota> [ ] SBUF" " map-as length class-of ] unit-test
{ fixnum } [ 1 >bignum <iota> [ ] sbuf"" map-as length class-of ] unit-test
[ 1.5 SBUF" " new-sequence ] must-fail
[ 1.5 sbuf"" new-sequence ] must-fail
[ CHAR: A 0.5 0.5 SBUF" a" set-nth-unsafe ] must-fail
[ CHAR: A 0.5 0.5 sbuf"a" set-nth-unsafe ] must-fail

View File

@ -17,7 +17,7 @@ M: sbuf set-nth-unsafe
M: sbuf new-sequence
drop [ 0 <string> ] [ integer>fixnum ] bi sbuf boa ; inline
: >sbuf ( seq -- sbuf ) SBUF" " clone-like ; inline
: >sbuf ( seq -- sbuf ) sbuf"" clone-like ; inline
M: sbuf contract 2drop ; inline

View File

@ -65,7 +65,7 @@ HELP: new-resizable
{ $contract "Outputs a resizable mutable sequence with an initial capacity of " { $snippet "len" } " elements and zero length, which can hold the elements of " { $snippet "seq" } "." }
{ $examples
{ $example "USING: prettyprint sequences ;" "300 V{ } new-resizable ." "V{ }" }
{ $example "USING: prettyprint sequences ;" "300 SBUF\" \" new-resizable ." "SBUF\" \"" }
{ $example "USING: prettyprint sequences ;" "300 sbuf\"\" new-resizable ." "sbuf\"\"" }
} ;
HELP: like
@ -749,8 +749,8 @@ HELP: join-as
{ $examples
"Join a list of strings as a string buffer:"
{ $example "USING: sequences prettyprint ;"
"{ \"a\" \"b\" \"c\" } \"1\" SBUF\" \"join-as ."
"SBUF\" a1b1c\""
"{ \"a\" \"b\" \"c\" } \"1\" sbuf\"\" join-as ."
"sbuf\"a1b1c\""
}
}
{ $errors "Throws an error if one of the sequences in " { $snippet "seq" } " contains elements not permitted in sequences of the same class as " { $snippet "exemplar" } "." } ;
@ -905,8 +905,8 @@ HELP: append-as
"B{ 1 2 3 4 }"
}
{ $example "USING: prettyprint sequences strings ;"
"\"go\" \"ing\" SBUF\" \" append-as ."
"SBUF\" going\""
"\"go\" \"ing\" sbuf\"\" append-as ."
"sbuf\"going\""
}
} ;
@ -936,8 +936,8 @@ HELP: prepend-as
"B{ 1 2 3 4 }"
}
{ $example "USING: prettyprint sequences strings ;"
"\"ing\" \"go\" SBUF\" \" prepend-as ."
"SBUF\" going\""
"\"ing\" \"go\" sbuf\"\" prepend-as ."
"sbuf\"going\""
}
} ;
@ -960,8 +960,8 @@ HELP: 3append-as
{ $errors "Throws an error if " { $snippet "seq1" } ", " { $snippet "seq2" } ", or " { $snippet "seq3" } " contain elements not permitted in sequences of the same class as " { $snippet "exemplar" } "." }
{ $examples
{ $example "USING: prettyprint sequences ;"
"\"a\" \"b\" \"c\" SBUF\" \" 3append-as ."
"SBUF\" abc\""
"\"a\" \"b\" \"c\" sbuf\"\" 3append-as ."
"sbuf\"abc\""
}
} ;

View File

@ -152,8 +152,8 @@ IN: sequences.tests
{ t } [ "xxfoo" 2 head-slice "xxbar" 2 head-slice = ] unit-test
{ t } [ "xxfoo" 2 head-slice "xxbar" 2 head-slice [ hashcode ] same? ] unit-test
{ t } [ "xxfoo" 2 head-slice SBUF" barxx" 2 tail-slice* = ] unit-test
{ t } [ "xxfoo" 2 head-slice SBUF" barxx" 2 tail-slice* [ hashcode ] same? ] unit-test
{ t } [ "xxfoo" 2 head-slice sbuf"barxx" 2 tail-slice* = ] unit-test
{ t } [ "xxfoo" 2 head-slice sbuf"barxx" 2 tail-slice* [ hashcode ] same? ] unit-test
{ t } [ [ 1 2 3 ] [ 1 2 3 ] sequence= ] unit-test
{ t } [ [ 1 2 3 ] { 1 2 3 } sequence= ] unit-test
@ -210,8 +210,8 @@ unit-test
{ 5 } [ 1 >bignum { 1 5 7 } nth-unsafe ] unit-test
{ 5 } [ 1 >bignum "\u000001\u000005\u000007" nth-unsafe ] unit-test
{ SBUF" before&after" } [
"&" 6 11 SBUF" before and after" replace-slice
{ sbuf"before&after" } [
"&" 6 11 sbuf"before and after" replace-slice
] unit-test
{ 3 "a" } [ { "a" "b" "c" "a" "d" } [ "a" = ] find-last ] unit-test
@ -240,11 +240,11 @@ unit-test
! Pathological case
{ "ihbye" } [ "hi" <reversed> "bye" append ] unit-test
{ t } [ "hi" <reversed> SBUF" hi" <reversed> = ] unit-test
{ t } [ "hi" <reversed> sbuf"hi" <reversed> = ] unit-test
{ t } [ "hi" <reversed> SBUF" hi" <reversed> = ] unit-test
{ t } [ "hi" <reversed> sbuf"hi" <reversed> = ] unit-test
{ t } [ "hi" <reversed> SBUF" hi" <reversed> [ hashcode ] same? ] unit-test
{ t } [ "hi" <reversed> sbuf"hi" <reversed> [ hashcode ] same? ] unit-test
[ -10 "hi" "bye" copy ] must-fail
[ 10 "hi" "bye" copy ] must-fail
@ -254,14 +254,14 @@ unit-test
] unit-test
! erg's random tester found this one
{ SBUF" 12341234" } [
{ sbuf"12341234" } [
9 <sbuf> dup "1234" swap push-all dup dup swap push-all
] unit-test
{ f } [ f V{ } like f V{ } like eq? ] unit-test
{ V{ f f f } } [ 3 V{ } new-sequence ] unit-test
{ SBUF" \0\0\0" } [ 3 SBUF" " new-sequence ] unit-test
{ sbuf"\0\0\0" } [ 3 sbuf"" new-sequence ] unit-test
{ 0 } [ f length ] unit-test
[ f first ] must-fail

View File

@ -58,18 +58,18 @@ unit-test
{ { "hello" "hi" } } [ "hello\r\nhi" string-lines ] unit-test
{ { "hello" "" "" } } [ "hello\n\n\n" string-lines ] unit-test
{ { } } [ SBUF" " string-lines ] unit-test
{ { "" } } [ SBUF" \n" string-lines ] unit-test
{ { "" } } [ SBUF" \r" string-lines ] unit-test
{ { "" } } [ SBUF" \r\n" string-lines ] unit-test
{ { "hello" } } [ SBUF" hello" string-lines ] unit-test
{ { "hello" } } [ SBUF" hello\n" string-lines ] unit-test
{ { "hello" } } [ SBUF" hello\r" string-lines ] unit-test
{ { "hello" } } [ SBUF" hello\r\n" string-lines ] unit-test
{ { "hello" "hi" } } [ SBUF" hello\nhi" string-lines ] unit-test
{ { "hello" "hi" } } [ SBUF" hello\rhi" string-lines ] unit-test
{ { "hello" "hi" } } [ SBUF" hello\r\nhi" string-lines ] unit-test
{ { "hello" "" "" } } [ SBUF" hello\n\n\n" string-lines ] unit-test
{ { } } [ sbuf"" string-lines ] unit-test
{ { "" } } [ sbuf"\n" string-lines ] unit-test
{ { "" } } [ sbuf"\r" string-lines ] unit-test
{ { "" } } [ sbuf"\r\n" string-lines ] unit-test
{ { "hello" } } [ sbuf"hello" string-lines ] unit-test
{ { "hello" } } [ sbuf"hello\n" string-lines ] unit-test
{ { "hello" } } [ sbuf"hello\r" string-lines ] unit-test
{ { "hello" } } [ sbuf"hello\r\n" string-lines ] unit-test
{ { "hello" "hi" } } [ sbuf"hello\nhi" string-lines ] unit-test
{ { "hello" "hi" } } [ sbuf"hello\rhi" string-lines ] unit-test
{ { "hello" "hi" } } [ sbuf"hello\r\nhi" string-lines ] unit-test
{ { "hello" "" "" } } [ sbuf"hello\n\n\n" string-lines ] unit-test
{ { "hey" "world" "what's" "happening" } }
[ "heyAworldBwhat'sChappening" [ LETTER? ] split-when ] unit-test

View File

@ -147,7 +147,7 @@ PRIVATE>
: parse-string ( -- str )
[
SBUF" " clone [
sbuf"" clone [
lexer get (parse-string)
] keep unescape-string
] rewind-lexer-on-error ;

View File

@ -180,7 +180,7 @@ ARTICLE: "syntax-strings" "Character and string syntax"
"Strings are documented in " { $link "strings" } "." ;
ARTICLE: "syntax-sbufs" "String buffer syntax"
{ $subsections POSTPONE: SBUF" }
{ $subsections POSTPONE: sbuf" }
"String buffers are documented in " { $link "sbufs" } "." ;
ARTICLE: "syntax-arrays" "Array syntax"
@ -218,7 +218,7 @@ ARTICLE: "syntax-byte-arrays" "Byte array syntax"
"Byte arrays are documented in " { $link "byte-arrays" } "." ;
ARTICLE: "syntax-pathnames" "Pathname syntax"
{ $subsections POSTPONE: P" }
{ $subsections POSTPONE: path" }
"Pathnames are documented in " { $link "io.pathnames" } "." ;
ARTICLE: "syntax-effects" "Stack effect syntax"
@ -620,17 +620,17 @@ HELP: "
{ $example "USE: io" "\"\\u{greek-capital-letter-sigma}\" print" "\u{greek-capital-letter-sigma}" }
} ;
HELP: SBUF"
{ $syntax "SBUF\" string... \"" }
HELP: sbuf"
{ $syntax "sbuf\" string... \"" }
{ $values { "string" "literal and escaped characters" } }
{ $description "Reads from the input string until the next occurrence of " { $link POSTPONE: " } ", converts the string to a string buffer, and appends it to the parse tree." }
{ $examples { $example "USING: io strings ;" "SBUF\" Hello world\" >string print" "Hello world" } } ;
{ $examples { $example "USING: io strings ;" "sbuf\"Hello world\" >string print" "Hello world" } } ;
HELP: P"
{ $syntax "P\" pathname\"" }
HELP: path"
{ $syntax "path\" pathname\"" }
{ $values { "pathname" "a pathname string" } }
{ $description "Reads from the input string until the next occurrence of " { $link POSTPONE: " } ", creates a new " { $link pathname } ", and appends it to the parse tree. Pathnames presented in the UI are clickable, which opens them in a text editor configured with " { $link "editor" } "." }
{ $examples { $example "USING: accessors io io.files ;" "P\" foo.txt\" string>> print" "foo.txt" } } ;
{ $examples { $example "USING: accessors io io.files ;" "path\" foo.txt\" string>> print" "foo.txt" } } ;
HELP: (
{ $syntax "( inputs -- outputs )" }

View File

@ -90,12 +90,24 @@ IN: bootstrap.syntax
"\"" [ parse-string suffix! ] define-core-syntax
"SBUF\"" [
lexer get skip-blank parse-string >sbuf suffix!
"sbuf\"" [
parse-string >sbuf suffix!
] define-core-syntax
"P\"" [
lexer get skip-blank parse-string <pathname> suffix!
"path\"" [
parse-string <pathname> suffix!
] define-core-syntax
"resource\"" [
parse-string <resource-pathname> suffix!
] define-core-syntax
"vocab-path\"" [
parse-string <vocab-pathname> suffix!
] define-core-syntax
"home-path\"" [
parse-string <home-pathname> suffix!
] define-core-syntax
"[" [ parse-quotation suffix! ] define-core-syntax

View File

@ -32,7 +32,7 @@ IN: vocabs.loader.tests
] unit-test
[
"resource:core/vocabs/loader/test/a/a.factor" forget-source
resource"core/vocabs/loader/test/a/a.factor" forget-source
"vocabs.loader.test.a" forget-vocab
] with-compilation-unit
@ -44,7 +44,7 @@ IN: vocabs.loader.tests
[ f ] [ "vocabs.loader.test.a" lookup-vocab source-loaded?>> ] unit-test
[ t ] [
"resource:core/vocabs/loader/test/a/a.factor"
resource"core/vocabs/loader/test/a/a.factor"
path>source-file definitions>>
"v-l-t-a-hello" "vocabs.loader.test.a" lookup-word dup .
swap first in?
@ -56,7 +56,7 @@ IN: vocabs.loader.tests
[
"IN: vocabs.loader.test.a v-l-t-a-hello"
<string-reader>
"resource:core/vocabs/loader/test/a/a.factor"
resource"core/vocabs/loader/test/a/a.factor"
parse-stream
] [ error>> error>> error>> no-word-error? ] must-fail-with
@ -107,10 +107,10 @@ IN: vocabs.loader.tests
{ 3 } [ "count-me" get-global ] unit-test
{ { "resource:core/kernel/kernel.factor" 1 } }
{ { resource"core/kernel/kernel.factor" 1 } }
[ "kernel" <vocab-link> where ] unit-test
{ { "resource:core/kernel/kernel.factor" 1 } }
{ { resource"core/kernel/kernel.factor" 1 } }
[ "kernel" lookup-vocab where ] unit-test
{ } [

View File

@ -9,7 +9,7 @@ IN: benchmark.dispatch2
10 >bignum <iota> ,
{ 1 2 3 } ,
"hello world" ,
SBUF" sbuf world" ,
sbuf"sbuf world" ,
V{ "a" "b" "c" } ,
double-array{ 1.0 2.0 3.0 } ,
"hello world" 4 tail-slice ,

View File

@ -25,7 +25,7 @@ M: object g drop "object" ;
10 >bignum ,
{ 1 2 3 } ,
"hello world" ,
SBUF" sbuf world" ,
sbuf"sbuf world" ,
V{ "a" "b" "c" } ,
double-array{ 1.0 2.0 3.0 } ,
"hello world" 4 tail-slice ,

View File

@ -71,7 +71,7 @@ PRIVATE>
<PRIVATE
: chart>url ( chart -- url )
[ URL" http://chart.googleapis.com/chart" clone ] dip {
[ url"http://chart.googleapis.com/chart" clone ] dip {
[ type>> "cht" set-query-param ]
[
[ width>> ] [ height>> ] bi 2dup and [

View File

@ -10,7 +10,7 @@ IN: google.search
<PRIVATE
: search-url ( query -- url )
URL" http://ajax.googleapis.com/ajax/services/search/web" clone
url"http://ajax.googleapis.com/ajax/services/search/web" clone
"1.0" "v" set-query-param
swap "q" set-query-param
"8" "rsz" set-query-param

View File

@ -40,7 +40,7 @@ ERROR: bad-location str ;
] [ drop f ] if ;
: stations-data ( -- seq )
URL" http://tgftp.nws.noaa.gov/data/nsd_cccc.txt"
url"http://tgftp.nws.noaa.gov/data/nsd_cccc.txt"
http-get nip CHAR: ; [ string>csv ] with-delimiter ;
PRIVATE>

View File

@ -5,7 +5,7 @@ strings namespaces urls ;
{ "%26&" } [ "&" f hmac-key ] unit-test
{ "B&http%3A%2F%2Ftwitter.com%2F&a%3Db" } [
URL" http://twitter.com"
url"http://twitter.com"
"B"
{ { "a" "b" } }
signature-base-string
@ -14,7 +14,7 @@ strings namespaces urls ;
{ "0EieqbHx0FJ/RtFskmRj9/TDpqo=" } [
"ABC" "DEF" <token> consumer-token set
URL" http://twitter.com"
url"http://twitter.com"
<request-token-params>
12345 >>timestamp
54321 >>nonce

View File

@ -15,7 +15,7 @@ USING: accessors calendar kernel oauth2 tools.test urls ;
! oauth2>auth-uri
{
URL" https://github.com/login/oauth/authorize?client_id=1234&scope=user&redirect_uri=test-pest&state=abcd&response_type=code&access_type=offline"
url"https://github.com/login/oauth/authorize?client_id=1234&scope=user&redirect_uri=test-pest&state=abcd&response_type=code&access_type=offline"
} [
"https://github.com/login/oauth/authorize"
"https://github.com/login/oauth/access_token"

View File

@ -23,7 +23,7 @@ foreground background page-color inset line-height metrics ;
54 54 54 54 <margin> >>margin
612 >>col-width
sans-serif-font 12 >>size >>font
SBUF" " >>stream
sbuf"" >>stream
0 >>line-height
{ 0 0 } >>inset
dup font>> font-metrics >>metrics ;

View File

@ -24,7 +24,7 @@ IN: project-euler.017
! --------
: euler017 ( -- answer )
1000 [1,b] SBUF" " clone [ number>text append! ] reduce [ Letter? ] count ;
1000 [1,b] sbuf"" clone [ number>text append! ] reduce [ Letter? ] count ;
! [ euler017 ] 100 ave-time
! 15 ms ave run time - 1.71 SD (100 trials)

View File

@ -34,7 +34,7 @@ IN: project-euler.040
] if ;
: concat-upto ( n -- str )
SBUF" " clone 1 -rot (concat-upto) ;
sbuf"" clone 1 -rot (concat-upto) ;
: nth-integer ( n str -- m )
[ 1 - ] dip nth digit> ;

View File

@ -11,16 +11,16 @@ urls ;
{ allows V{ } }
{ disallows
V{
URL" /cgi-bin/"
URL" /scripts/"
URL" /ChipList2/scripts/"
URL" /ChipList2/styles/"
URL" /ads/"
URL" /ChipList2/ads/"
URL" /advertisements/"
URL" /ChipList2/advertisements/"
URL" /graphics/"
URL" /ChipList2/graphics/"
url"/cgi-bin/"
url"/scripts/"
url"/ChipList2/scripts/"
url"/ChipList2/styles/"
url"/ads/"
url"/ChipList2/ads/"
url"/advertisements/"
url"/ChipList2/advertisements/"
url"/graphics/"
url"/ChipList2/graphics/"
}
}
{ visit-time
@ -36,163 +36,163 @@ urls ;
T{ rules
{ user-agents V{ "UbiCrawler" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "DOC" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Zao" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "sitecheck.internetseer.com" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Zealbot" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "MSIECrawler" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "SiteSnagger" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "WebStripper" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "WebCopier" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Fetch" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Offline Explorer" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Teleport" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "TeleportPro" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "WebZIP" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "linko" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "HTTrack" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Microsoft.URL.Control" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Xenu" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "larbin" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "libwww" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "ZyBORG" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "Download Ninja" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "wget" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "grub-client" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "k2spider" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "NPBot" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
{ user-agents V{ "WebReaper" } }
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
T{ rules
@ -327,7 +327,7 @@ urls ;
}
}
{ allows V{ } }
{ disallows V{ URL" /" } }
{ disallows V{ url"/" } }
{ unknowns H{ } }
}
}

View File

@ -26,7 +26,7 @@ visit-time request-rate crawl-delay unknowns ;
<PRIVATE
: >robots.txt-url ( url -- url' )
>url URL" robots.txt" derive-url ;
>url url"robots.txt" derive-url ;
: get-robots.txt ( url -- robots.txt )
>robots.txt-url http-get nip ;

View File

@ -42,7 +42,7 @@ IN: rosettacode.anagrams-deranged
deranged-anagrams [ first length ] sort-with last ;
: default-word-list ( -- path )
URL" http://puzzlers.org/pub/wordlists/unixdict.txt"
url"http://puzzlers.org/pub/wordlists/unixdict.txt"
"unixdict.txt" temp-file [ ?download-to ] keep ;
: longest-deranged-anagrams ( -- anagrams )

View File

@ -18,7 +18,7 @@ IN: rosetta-code.ordered-words
! this page.
MEMO: word-list ( -- seq )
URL" http://puzzlers.org/pub/wordlists/unixdict.txt"
url"http://puzzlers.org/pub/wordlists/unixdict.txt"
"unixdict.txt" temp-file
[ ?download-to ] [ utf8 file-lines ] bi ;

View File

@ -63,7 +63,7 @@ TUPLE: maxlicense max-count current-count times ;
MEMO: mlijobs ( -- lines )
"mlijobs.txt" temp-file dup exists? [
URL" http://rosettacode.org/resources/mlijobs.txt"
url"http://rosettacode.org/resources/mlijobs.txt"
over download-to
] unless ascii file-lines ;

View File

@ -59,7 +59,7 @@ CONSTANT: ALPHABET "abcdefghijklmnopqrstuvwxyz"
ascii file-contents words histogram ;
MEMO: default-dictionary ( -- counts )
URL" http://norvig.com/big.txt" "big.txt" temp-file
url"http://norvig.com/big.txt" "big.txt" temp-file
[ ?download-to ] [ load-dictionary ] bi ;
: (correct) ( word dictionary -- word/f )

View File

@ -296,8 +296,8 @@ CONSTANT: google-slides
"Libraries can define new parsing words"
}
{ $slide "The parser"
{ "Example: URLs define a " { $link POSTPONE: URL" } " word" }
{ $code "URL\" http://paste.factorcode.org/paste?id=81\"" }
{ "Example: URLs define a " { $link POSTPONE: url" } " word" }
{ $code "url\" http://paste.factorcode.org/paste?id=81\"" }
}
{ $slide "Example: memoization"
{ "Memoization with " { $link POSTPONE: MEMO: } }

View File

@ -23,7 +23,7 @@ TUPLE: calculator < dispatcher ;
{ "y" [ v-number ] }
} validate-params
URL" $calculator" clone "x" value "y" value + "z" set-query-param
url"$calculator" clone "x" value "y" value + "z" set-query-param
<redirect>
] >>submit ;

View File

@ -15,7 +15,7 @@ M: counter-app init-session* drop 0 count sset ;
<action>
swap '[
count _ schange
URL" $counter-app" <redirect>
url"$counter-app" <redirect>
] >>submit ;
: <display-action> ( -- action )

View File

@ -61,6 +61,6 @@ build-engineer? define-capability
"dashboard" add-responder
<action>
[ URL" $mason-app/dashboard" <redirect> ] >>display
[ url"$mason-app/dashboard" <redirect> ] >>display
"" add-responder
;

View File

@ -17,7 +17,7 @@ IN: webapps.mason.report
[ build-report ] >>display ;
: report-link ( builder -- xml )
[ URL" report" clone ] dip
[ url"report" clone ] dip
[ os>> "os" set-query-param ]
[ cpu>> "cpu" set-query-param ] bi
[XML <a href=<->>Latest build report</a> XML] ;

View File

@ -6,7 +6,7 @@ IN: webapps.mason.utils.tests
{
"http://builds.factorcode.org/report?os=the-os&cpu=the-cpu"
} [
URL" /" url set
url"/" url set
builder new "the-os" >>os "the-cpu" >>cpu report-url
present
] unit-test

View File

@ -46,15 +46,15 @@ IN: webapps.mason.utils
adjust-url ;
: package-url ( builder -- url )
[ URL" http://builds.factorcode.org/package" clone ] dip
[ url"http://builds.factorcode.org/package" clone ] dip
platform-url ;
: report-url ( builder -- url )
[ URL" http://builds.factorcode.org/report" clone ] dip
[ url"http://builds.factorcode.org/report" clone ] dip
platform-url ;
: release-url ( builder -- url )
[ URL" http://builds.factorcode.org/release" clone ] dip
[ url"http://builds.factorcode.org/release" clone ] dip
platform-url ;
: validate-secret ( -- )

View File

@ -74,7 +74,7 @@ TUPLE: annotation < entity parent ;
! LINKS, ETC
! ! !
CONSTANT: pastebin-url URL" $pastebin/"
CONSTANT: pastebin-url url"$pastebin/"
: paste-url ( id -- url )
"$pastebin/paste" >url swap "id" set-query-param ;

View File

@ -80,7 +80,7 @@ posting "POSTINGS"
: <planet-feed-action> ( -- action )
<feed-action>
[ "Planet Factor" ] >>title
[ URL" $planet" ] >>url
[ url"$planet" ] >>url
[ postings ] >>entries ;
:: <posting> ( entry name -- entry' )
@ -112,7 +112,7 @@ posting "POSTINGS"
<action>
[
update-cached-postings
URL" $planet/admin" <redirect>
url"$planet/admin" <redirect>
] >>submit ;
: <delete-blog-action> ( -- action )
@ -121,7 +121,7 @@ posting "POSTINGS"
[
"id" value <blog> delete-tuples
URL" $planet/admin" <redirect>
url"$planet/admin" <redirect>
] >>submit ;
: validate-blog ( -- )
@ -146,7 +146,7 @@ posting "POSTINGS"
[ deposit-blog-slots ]
[ insert-tuple ]
bi
URL" $planet/admin" <redirect>
url"$planet/admin" <redirect>
] >>submit ;
: <edit-blog-action> ( -- action )

View File

@ -5,7 +5,7 @@ furnace.redirection html.forms validators webapps.site-watcher.common
site-watcher.db site-watcher.spider kernel urls sequences ;
IN: webapps.site-watcher.spidering
CONSTANT: site-list-url URL" $site-watcher-app/spider-list"
CONSTANT: site-list-url url"$site-watcher-app/spider-list"
: <spider-list-action> ( -- action )
<page-action>

View File

@ -5,7 +5,7 @@ furnace.redirection html.forms site-watcher site-watcher.db
validators webapps.site-watcher.common urls ;
IN: webapps.site-watcher.watching
CONSTANT: site-list-url URL" $site-watcher-app/watch-list"
CONSTANT: site-list-url url"$site-watcher-app/watch-list"
: <watch-list-action> ( -- action )
<page-action>

View File

@ -79,7 +79,7 @@ todo "TODO"
] >>submit ;
: todo-list-url ( -- url )
URL" $todo-list/list" ;
url"$todo-list/list" ;
: <delete-action> ( -- action )
<action>
@ -98,7 +98,7 @@ todo "TODO"
: <todo-list> ( -- responder )
todo-list new-dispatcher
<list-action> "list" add-responder
URL" /list" <redirect-responder> "" add-responder
url"/list" <redirect-responder> "" add-responder
<view-action> "view" add-responder
<new-action> "new" add-responder
<edit-action> "edit" add-responder

View File

@ -77,7 +77,7 @@ TUPLE: user-admin < dispatcher ;
insert-tuple
URL" $user-admin" <redirect>
url"$user-admin" <redirect>
] >>submit ;
: validate-username ( -- )
@ -131,7 +131,7 @@ TUPLE: user-admin < dispatcher ;
update-tuple
URL" $user-admin" <redirect>
url"$user-admin" <redirect>
] >>submit ;
: <delete-user-action> ( -- action )
@ -139,7 +139,7 @@ TUPLE: user-admin < dispatcher ;
[
validate-username
"username" value <user> delete-tuples
URL" $user-admin" <redirect>
url"$user-admin" <redirect>
] >>submit ;
SYMBOL: can-administer-users?

View File

@ -261,7 +261,7 @@ M: revision feed-entry-url id>> revision-url ;
: <list-changes-feed-action> ( -- action )
<feed-action>
[ URL" $wiki/changes" ] >>url
[ url"$wiki/changes" ] >>url
[ "All changes" ] >>title
[ list-changes ] >>entries ;
@ -273,7 +273,7 @@ M: revision feed-entry-url id>> revision-url ;
[
"title" value <article> delete-tuples
f <revision> "title" value >>title delete-tuples
URL" $wiki" <redirect>
url"$wiki" <redirect>
] >>submit
<protected>
@ -347,7 +347,7 @@ M: revision feed-entry-url id>> revision-url ;
"Footer" latest-revision [ "footer" [ from-object ] nest-form ] when* ;
: init-relative-link-prefix ( -- )
URL" $wiki/view/" adjust-url present relative-link-prefix set ;
url"$wiki/view/" adjust-url present relative-link-prefix set ;
: <wiki> ( -- dispatcher )
wiki new-dispatcher

View File

@ -69,7 +69,7 @@ SYMBOLS: factor-recaptcha-public-key factor-recaptcha-private-key ;
: <concatenative-website> ( -- responder )
concatenative-website new-dispatcher
URL" /wiki/view/Front Page" <redirect-responder> "" add-responder ;
url"/wiki/view/Front Page" <redirect-responder> "" add-responder ;
SYMBOLS: key-password key-file dh-file ;
@ -95,7 +95,7 @@ SYMBOLS: key-password key-file dh-file ;
: <gitweb> ( path -- responder )
<dispatcher>
swap <static> enable-cgi >>default
URL" /gitweb.cgi" <redirect-responder> "" add-responder ;
url"/gitweb.cgi" <redirect-responder> "" add-responder ;
: init-production ( -- )
common-configuration

View File

@ -1,7 +1,7 @@
USING: calendar tools.test urls wikipedia.private ;
{
URL" http://en.wikipedia.org/wiki/October_10"
url"http://en.wikipedia.org/wiki/October_10"
} [
2010 10 10 <date> historical-url
] unit-test

View File

@ -9,7 +9,7 @@ USING: tools.test yahoo kernel io.files xml sequences accessors urls ;
} } [ "resource:extra/yahoo/test-results.xml" file>xml parse-yahoo first ] unit-test
{
URL" http://search.yahooapis.com/WebSearchService/V1/webSearch?appid=Factor-search&query=hi&results=2&similar_ok=1"
url"http://search.yahooapis.com/WebSearchService/V1/webSearch?appid=Factor-search&query=hi&results=2&similar_ok=1"
} [
"hi" <search> "Factor-search" >>appid 2 >>results t >>similar-ok query
] unit-test

View File

@ -18,7 +18,7 @@ format similar-ok language country site subscription license ;
first3 <result>
] map ;
CONSTANT: yahoo-url URL" http://search.yahooapis.com/WebSearchService/V1/webSearch"
CONSTANT: yahoo-url url"http://search.yahooapis.com/WebSearchService/V1/webSearch"
:: param ( search url name quot -- search url )
search url search quot call

View File

@ -42,7 +42,7 @@ CONSTANT: encodings H{
{ 102 T{ encoding f "webm" "720p" "VP8" "3D" f "Vorbis" 192 } }
}
CONSTANT: video-info-url URL" http://www.youtube.com/get_video_info"
CONSTANT: video-info-url url"http://www.youtube.com/get_video_info"
: get-video-info ( video-id -- video-info )
video-info-url clone

View File

@ -533,7 +533,7 @@ these lines in your .emacs:
(,factor-setter-regex . 'factor-font-lock-setter-word)
(,factor-getter-regex . 'factor-font-lock-getter-word)
(,factor-bad-string-regex . 'factor-font-lock-invalid-syntax)
("\\_<\\(P\\|SBUF\\|DLL\\)\"" 1 'factor-font-lock-parsing-word)
("\\_<\\(p\\|sbuf\\|dll\\)\"" 1 'factor-font-lock-parsing-word)
(,factor-constant-words-regex . 'factor-font-lock-constant)
,(factor-syntax factor-parsing-words-regex '("P"))
(,"\t" . 'whitespace-highlight-face)))