Merge branch 'master' into smalltalk

db4
Slava Pestov 2009-03-30 21:42:54 -05:00
commit 4a0ef8d0bc
12 changed files with 137 additions and 52 deletions

View File

@ -659,13 +659,40 @@ M: ppc %callback-value ( ctype -- )
M: ppc small-enough? ( n -- ? ) -32768 32767 between? ; M: ppc small-enough? ( n -- ? ) -32768 32767 between? ;
M: ppc return-struct-in-registers? ( c-type -- ? ) drop f ; M: ppc return-struct-in-registers? ( c-type -- ? )
c-type return-in-registers?>> ;
M: ppc %box-small-struct M: ppc %box-small-struct ( c-type -- )
drop "No small structs" throw ; #! Box a <= 16-byte struct returned in r3:r4:r5:r6
heap-size 7 LI
"box_medium_struct" f %alien-invoke ;
M: ppc %unbox-small-struct : %unbox-struct-1 ( -- )
drop "No small structs" throw ; ! Alien must be in r3.
"alien_offset" f %alien-invoke
3 3 0 LWZ ;
: %unbox-struct-2 ( -- )
! Alien must be in r3.
"alien_offset" f %alien-invoke
4 3 4 LWZ
3 3 0 LWZ ;
: %unbox-struct-4 ( -- )
! Alien must be in r3.
"alien_offset" f %alien-invoke
6 3 12 LWZ
5 3 8 LWZ
4 3 4 LWZ
3 3 0 LWZ ;
M: ppc %unbox-small-struct ( size -- )
#! Alien must be in EAX.
heap-size cell align cell /i {
{ 1 [ %unbox-struct-1 ] }
{ 2 [ %unbox-struct-2 ] }
{ 4 [ %unbox-struct-4 ] }
} case ;
USE: vocabs.loader USE: vocabs.loader
@ -673,3 +700,5 @@ USE: vocabs.loader
{ [ os macosx? ] [ "cpu.ppc.macosx" require ] } { [ os macosx? ] [ "cpu.ppc.macosx" require ] }
{ [ os linux? ] [ "cpu.ppc.linux" require ] } { [ os linux? ] [ "cpu.ppc.linux" require ] }
} cond } cond
"complex-double" c-type t >>return-in-registers? drop

View File

@ -3,7 +3,7 @@
! !
USING: kernel tools.test peg peg.ebnf words math math.parser USING: kernel tools.test peg peg.ebnf words math math.parser
sequences accessors peg.parsers parser namespaces arrays sequences accessors peg.parsers parser namespaces arrays
strings eval ; strings eval unicode.data multiline ;
IN: peg.ebnf.tests IN: peg.ebnf.tests
{ T{ ebnf-non-terminal f "abc" } } [ { T{ ebnf-non-terminal f "abc" } } [
@ -520,3 +520,13 @@ Tok = Spaces (Number | Special )
{ "\\" } [ { "\\" } [
"\\" [EBNF foo="\\" EBNF] "\\" [EBNF foo="\\" EBNF]
] unit-test ] unit-test
[ "USE: peg.ebnf [EBNF EBNF]" eval ] must-fail
[ <" USE: peg.ebnf [EBNF
lol = a
lol = b
EBNF] "> eval
] [
error>> [ redefined-rule? ] [ name>> "lol" = ] bi and
] must-fail-with

View File

@ -5,13 +5,18 @@ sequences quotations vectors namespaces make math assocs
continuations peg peg.parsers unicode.categories multiline continuations peg peg.parsers unicode.categories multiline
splitting accessors effects sequences.deep peg.search splitting accessors effects sequences.deep peg.search
combinators.short-circuit lexer io.streams.string stack-checker combinators.short-circuit lexer io.streams.string stack-checker
io combinators parser ; io combinators parser summary ;
IN: peg.ebnf IN: peg.ebnf
: rule ( name word -- parser ) : rule ( name word -- parser )
#! Given an EBNF word produced from EBNF: return the EBNF rule #! Given an EBNF word produced from EBNF: return the EBNF rule
"ebnf-parser" word-prop at ; "ebnf-parser" word-prop at ;
ERROR: no-rule rule parser ;
: lookup-rule ( rule parser -- rule' )
2dup rule [ 2nip ] [ no-rule ] if* ;
TUPLE: tokenizer any one many ; TUPLE: tokenizer any one many ;
: default-tokenizer ( -- tokenizer ) : default-tokenizer ( -- tokenizer )
@ -34,8 +39,13 @@ TUPLE: tokenizer any one many ;
: reset-tokenizer ( -- ) : reset-tokenizer ( -- )
default-tokenizer \ tokenizer set-global ; default-tokenizer \ tokenizer set-global ;
ERROR: no-tokenizer name ;
M: no-tokenizer summary
drop "Tokenizer not found" ;
SYNTAX: TOKENIZER: SYNTAX: TOKENIZER:
scan search [ "Tokenizer not found" throw ] unless* scan dup search [ nip ] [ no-tokenizer ] if*
execute( -- tokenizer ) \ tokenizer set-global ; execute( -- tokenizer ) \ tokenizer set-global ;
TUPLE: ebnf-non-terminal symbol ; TUPLE: ebnf-non-terminal symbol ;
@ -258,7 +268,7 @@ DEFER: 'choice'
"]]" token ensure-not , "]]" token ensure-not ,
"]?" token ensure-not , "]?" token ensure-not ,
[ drop t ] satisfy , [ drop t ] satisfy ,
] seq* [ first ] action repeat0 [ >string ] action ; ] seq* repeat0 [ concat >string ] action ;
: 'ensure-not' ( -- parser ) : 'ensure-not' ( -- parser )
#! Parses the '!' syntax to ensure that #! Parses the '!' syntax to ensure that
@ -368,14 +378,15 @@ M: ebnf-tokenizer (transform) ( ast -- parser )
dup parser-tokenizer \ tokenizer set-global dup parser-tokenizer \ tokenizer set-global
] if ; ] if ;
ERROR: redefined-rule name ;
M: redefined-rule summary
name>> "Rule '" "' defined more than once" surround ;
M: ebnf-rule (transform) ( ast -- parser ) M: ebnf-rule (transform) ( ast -- parser )
dup elements>> dup elements>>
(transform) [ (transform) [
swap symbol>> dup get parser? [ swap symbol>> dup get parser? [ redefined-rule ] [ set ] if
"Rule '" over append "' defined more than once" append throw
] [
set
] if
] keep ; ] keep ;
M: ebnf-sequence (transform) ( ast -- parser ) M: ebnf-sequence (transform) ( ast -- parser )
@ -467,13 +478,17 @@ ERROR: bad-effect quot effect ;
[ bad-effect ] [ bad-effect ]
} cond ; } cond ;
: ebnf-transform ( ast -- parser quot )
[ parser>> (transform) ]
[ code>> insert-escapes ]
[ parser>> ] tri build-locals
[ string-lines parse-lines ] call( string -- quot ) ;
M: ebnf-action (transform) ( ast -- parser ) M: ebnf-action (transform) ( ast -- parser )
[ parser>> (transform) ] [ code>> insert-escapes ] [ parser>> ] tri build-locals ebnf-transform check-action-effect action ;
[ string-lines parse-lines ] call( string -- quot ) check-action-effect action ;
M: ebnf-semantic (transform) ( ast -- parser ) M: ebnf-semantic (transform) ( ast -- parser )
[ parser>> (transform) ] [ code>> insert-escapes ] [ parser>> ] tri build-locals ebnf-transform semantic ;
[ string-lines parse-lines ] call( string -- quot ) semantic ;
M: ebnf-var (transform) ( ast -- parser ) M: ebnf-var (transform) ( ast -- parser )
parser>> (transform) ; parser>> (transform) ;
@ -481,19 +496,20 @@ M: ebnf-var (transform) ( ast -- parser )
M: ebnf-terminal (transform) ( ast -- parser ) M: ebnf-terminal (transform) ( ast -- parser )
symbol>> tokenizer one>> call( symbol -- parser ) ; symbol>> tokenizer one>> call( symbol -- parser ) ;
ERROR: ebnf-foreign-not-found name ;
M: ebnf-foreign-not-found summary
name>> "Foreign word '" "' not found" surround ;
M: ebnf-foreign (transform) ( ast -- parser ) M: ebnf-foreign (transform) ( ast -- parser )
dup word>> search dup word>> search [ word>> ebnf-foreign-not-found ] unless*
[ "Foreign word '" swap word>> append "' not found" append throw ] unless*
swap rule>> [ main ] unless* over rule [ swap rule>> [ main ] unless* over rule [
nip nip
] [ ] [
execute( -- parser ) execute( -- parser )
] if* ; ] if* ;
: parser-not-found ( name -- * ) ERROR: parser-not-found name ;
[
"Parser '" % % "' not found." %
] "" make throw ;
M: ebnf-non-terminal (transform) ( ast -- parser ) M: ebnf-non-terminal (transform) ( ast -- parser )
symbol>> [ symbol>> [
@ -504,16 +520,16 @@ M: ebnf-non-terminal (transform) ( ast -- parser )
'ebnf' parse transform ; 'ebnf' parse transform ;
: check-parse-result ( result -- result ) : check-parse-result ( result -- result )
dup [ [
dup remaining>> [ blank? ] trim empty? [ dup remaining>> [ blank? ] trim [
[ [
"Unable to fully parse EBNF. Left to parse was: " % "Unable to fully parse EBNF. Left to parse was: " %
remaining>> % remaining>> %
] "" make throw ] "" make throw
] unless ] unless-empty
] [ ] [
"Could not parse EBNF" throw "Could not parse EBNF" throw
] if ; ] if* ;
: parse-ebnf ( string -- hashtable ) : parse-ebnf ( string -- hashtable )
'ebnf' (parse) check-parse-result ast>> transform ; 'ebnf' (parse) check-parse-result ast>> transform ;
@ -522,14 +538,18 @@ M: ebnf-non-terminal (transform) ( ast -- parser )
parse-ebnf dup dup parser [ main swap at compile ] with-variable parse-ebnf dup dup parser [ main swap at compile ] with-variable
[ compiled-parse ] curry [ with-scope ast>> ] curry ; [ compiled-parse ] curry [ with-scope ast>> ] curry ;
SYNTAX: <EBNF "EBNF>" reset-tokenizer parse-multiline-string parse-ebnf main swap at SYNTAX: <EBNF
"EBNF>"
reset-tokenizer parse-multiline-string parse-ebnf main swap at
parsed reset-tokenizer ; parsed reset-tokenizer ;
SYNTAX: [EBNF "EBNF]" reset-tokenizer parse-multiline-string ebnf>quot nip SYNTAX: [EBNF
"EBNF]"
reset-tokenizer parse-multiline-string ebnf>quot nip
parsed \ call parsed reset-tokenizer ; parsed \ call parsed reset-tokenizer ;
SYNTAX: EBNF: SYNTAX: EBNF:
reset-tokenizer CREATE-WORD dup ";EBNF" parse-multiline-string reset-tokenizer CREATE-WORD dup ";EBNF" parse-multiline-string
ebnf>quot swapd (( input -- ast )) define-declared "ebnf-parser" set-word-prop ebnf>quot swapd
(( input -- ast )) define-declared "ebnf-parser" set-word-prop
reset-tokenizer ; reset-tokenizer ;

View File

@ -0,0 +1,11 @@
! Copyright (C) 2009 Doug Coleman.
! See http://factorcode.org/license.txt for BSD license.
USING: tools.test quoting ;
IN: quoting.tests
[ f ] [ "" quoted? ] unit-test
[ t ] [ "''" quoted? ] unit-test
[ t ] [ "\"\"" quoted? ] unit-test
[ t ] [ "\"Circus Maximus\"" quoted? ] unit-test
[ t ] [ "'Circus Maximus'" quoted? ] unit-test
[ f ] [ "Circus Maximus" quoted? ] unit-test

View File

@ -11,7 +11,7 @@ IN: sorting.human
: human>=< ( obj1 obj2 -- >=< ) human<=> invert-comparison ; inline : human>=< ( obj1 obj2 -- >=< ) human<=> invert-comparison ; inline
: human-compare ( obj1 obj2 quot -- <=> ) bi@ human<=> ; : human-compare ( obj1 obj2 quot -- <=> ) bi@ human<=> ; inline
: human-sort ( seq -- seq' ) [ human<=> ] sort ; : human-sort ( seq -- seq' ) [ human<=> ] sort ;

View File

@ -35,9 +35,9 @@ HELP: download-feed
{ $values { "url" url } { "feed" feed } } { $values { "url" url } { "feed" feed } }
{ $description "Downloads a feed from a URL using the " { $link "http.client" } "." } ; { $description "Downloads a feed from a URL using the " { $link "http.client" } "." } ;
HELP: string>feed HELP: parse-feed
{ $values { "string" string } { "feed" feed } } { $values { "sequence" "a string or a byte array" } { "feed" feed } }
{ $description "Parses a feed in string form." } ; { $description "Parses a feed." } ;
HELP: xml>feed HELP: xml>feed
{ $values { "xml" xml } { "feed" feed } } { $values { "xml" xml } { "feed" feed } }
@ -58,7 +58,7 @@ $nl
{ $subsection <entry> } { $subsection <entry> }
"Reading feeds:" "Reading feeds:"
{ $subsection download-feed } { $subsection download-feed }
{ $subsection string>feed } { $subsection parse-feed }
{ $subsection xml>feed } { $subsection xml>feed }
"Writing feeds:" "Writing feeds:"
{ $subsection feed>xml } { $subsection feed>xml }

View File

@ -1,4 +1,4 @@
USING: syndication io kernel io.files tools.test io.encodings.utf8 USING: syndication io kernel io.files tools.test io.encodings.binary
calendar urls xml.writer ; calendar urls xml.writer ;
IN: syndication.tests IN: syndication.tests
@ -8,7 +8,7 @@ IN: syndication.tests
: load-news-file ( filename -- feed ) : load-news-file ( filename -- feed )
#! Load an news syndication file and process it, returning #! Load an news syndication file and process it, returning
#! it as an feed tuple. #! it as an feed tuple.
utf8 file-contents string>feed ; binary file-contents parse-feed ;
[ T{ [ T{
feed feed

View File

@ -1,11 +1,11 @@
! Copyright (C) 2006 Chris Double, Daniel Ehrenberg. ! Copyright (C) 2006 Chris Double, Daniel Ehrenberg.
! Portions copyright (C) 2008 Slava Pestov. ! Portions copyright (C) 2008, 2009 Slava Pestov.
! See http://factorcode.org/license.txt for BSD license. ! See http://factorcode.org/license.txt for BSD license.
USING: xml.traversal kernel assocs math.order USING: xml.traversal kernel assocs math.order strings sequences
strings sequences xml.data xml.writer xml.data xml.writer io.streams.string combinators xml
io.streams.string combinators xml xml.entities.html io.files io xml.entities.html io.files io http.client namespaces make
http.client namespaces make xml.syntax hashtables xml.syntax hashtables calendar.format accessors continuations
calendar.format accessors continuations urls present ; urls present byte-arrays ;
IN: syndication IN: syndication
: any-tag-named ( tag names -- tag-inside ) : any-tag-named ( tag names -- tag-inside )
@ -106,12 +106,15 @@ TUPLE: entry title url description date ;
{ "feed" [ atom1.0 ] } { "feed" [ atom1.0 ] }
} case ; } case ;
: string>feed ( string -- feed ) GENERIC: parse-feed ( string -- feed )
[ string>xml xml>feed ] with-html-entities ;
M: string parse-feed [ string>xml xml>feed ] with-html-entities ;
M: byte-array parse-feed [ bytes>xml xml>feed ] with-html-entities ;
: download-feed ( url -- feed ) : download-feed ( url -- feed )
#! Retrieve an news syndication file, return as a feed tuple. #! Retrieve an news syndication file, return as a feed tuple.
http-get nip string>feed ; http-get nip parse-feed ;
! Atom generation ! Atom generation

View File

@ -59,11 +59,11 @@ C: <transaction> transaction
[ dup [ over [ swap call ] dip ] dip 1 days time+ ] dip each-day [ dup [ over [ swap call ] dip ] dip 1 days time+ ] dip each-day
] [ ] [
3drop 3drop
] if ; ] if ; inline recursive
: process-to-date ( account date -- account ) : process-to-date ( account date -- account )
over interest-last-paid>> 1 days time+ over interest-last-paid>> 1 days time+
[ dupd process-day ] spin each-day ; [ dupd process-day ] spin each-day ; inline
: inserting-transactions ( account transactions -- account ) : inserting-transactions ( account transactions -- account )
[ [ date>> process-to-date ] keep >>transaction ] each ; [ [ date>> process-to-date ] keep >>transaction ] each ;

View File

@ -165,7 +165,7 @@ M: irc-chat to-chat in-messages>> mailbox-put ;
" hostname servername :irc.factor" irc-print ; " hostname servername :irc.factor" irc-print ;
: /CONNECT ( server port -- stream ) : /CONNECT ( server port -- stream )
irc> connect>> call drop ; irc> connect>> call drop ; inline
: /JOIN ( channel password -- ) : /JOIN ( channel password -- )
"JOIN " irc-write "JOIN " irc-write

View File

@ -160,7 +160,7 @@ void box_value_struct(void *src, CELL size)
dpush(tag_object(array)); dpush(tag_object(array));
} }
/* On OS X, structs <= 8 bytes are returned in registers. */ /* On some x86 OSes, structs <= 8 bytes are returned in registers. */
void box_small_struct(CELL x, CELL y, CELL size) void box_small_struct(CELL x, CELL y, CELL size)
{ {
CELL data[2]; CELL data[2];
@ -169,6 +169,17 @@ void box_small_struct(CELL x, CELL y, CELL size)
box_value_struct(data,size); box_value_struct(data,size);
} }
/* On OS X/PPC, complex numbers are returned in registers. */
void box_medium_struct(CELL x1, CELL x2, CELL x3, CELL x4, CELL size)
{
CELL data[4];
data[0] = x1;
data[1] = x2;
data[2] = x3;
data[3] = x4;
box_value_struct(data,size);
}
/* open a native library and push a handle */ /* open a native library and push a handle */
void primitive_dlopen(void) void primitive_dlopen(void)
{ {

View File

@ -40,6 +40,7 @@ void primitive_set_alien_cell(void);
DLLEXPORT void to_value_struct(CELL src, void *dest, CELL size); DLLEXPORT void to_value_struct(CELL src, void *dest, CELL size);
DLLEXPORT void box_value_struct(void *src, CELL size); DLLEXPORT void box_value_struct(void *src, CELL size);
DLLEXPORT void box_small_struct(CELL x, CELL y, CELL size); DLLEXPORT void box_small_struct(CELL x, CELL y, CELL size);
void box_medium_struct(CELL x1, CELL x2, CELL x3, CELL x4, CELL size);
DEFINE_UNTAG(F_DLL,DLL_TYPE,dll) DEFINE_UNTAG(F_DLL,DLL_TYPE,dll)