factor: All RENAME: and FROM: and EXCLUDE: to have \foo as word names.
Grab bag of other cleanups. tests and docs parse!modern-harvey2
parent
13d9a78ec6
commit
baa6af4831
|
@ -121,6 +121,7 @@ TYPEDEF: int alien-parser-test-int ! reasonably unique name...
|
|||
] unit-test
|
||||
|
||||
! Redefinitions
|
||||
{ } [
|
||||
[ C-TYPE: hi TYPEDEF: void* hi ] with-compilation-unit
|
||||
] unit-test
|
||||
<<
|
||||
C-TYPE: hi
|
||||
TYPEDEF: void* hi
|
||||
>>
|
||||
|
|
|
@ -583,17 +583,22 @@ FUNCTION: test_struct_14 ffi_test_44 ( ) inline
|
|||
[ ] [ stack-frame-bustage 2drop ] unit-test
|
||||
|
||||
! C99 tests
|
||||
os windows? [
|
||||
FUNCTION: complex-float ffi_test_45 ( int x )
|
||||
FUNCTION: complex-double ffi_test_46 ( int x )
|
||||
FUNCTION: complex-float ffi_test_47 ( complex-float x, complex-double y )
|
||||
STRUCT: bool-field-test
|
||||
{ name c-string }
|
||||
{ on bool }
|
||||
{ parents short } ;
|
||||
|
||||
FUNCTION: complex-float ffi_test_45 ( int x )
|
||||
FUNCTION: short ffi_test_48 ( bool-field-test x )
|
||||
|
||||
os windows? [
|
||||
|
||||
[ C{ 3.0 0.0 } ] [ 3 ffi_test_45 ] unit-test
|
||||
|
||||
FUNCTION: complex-double ffi_test_46 ( int x )
|
||||
|
||||
[ C{ 3.0 0.0 } ] [ 3 ffi_test_46 ] unit-test
|
||||
|
||||
FUNCTION: complex-float ffi_test_47 ( complex-float x, complex-double y )
|
||||
|
||||
[ C{ 4.0 4.0 } ] [
|
||||
C{ 1.0 2.0 }
|
||||
|
@ -601,13 +606,6 @@ os windows? [
|
|||
] unit-test
|
||||
|
||||
! Reported by jedahu
|
||||
STRUCT: bool-field-test
|
||||
{ name c-string }
|
||||
{ on bool }
|
||||
{ parents short } ;
|
||||
|
||||
FUNCTION: short ffi_test_48 ( bool-field-test x )
|
||||
|
||||
[ 123 ] [
|
||||
bool-field-test <struct>
|
||||
123 >>parents
|
||||
|
@ -868,14 +866,15 @@ TUPLE: some-tuple x ;
|
|||
|
||||
[ ] [ anton's-regression ] unit-test
|
||||
|
||||
|
||||
STRUCT: bool-and-ptr
|
||||
{ b bool }
|
||||
{ ptr void* } ;
|
||||
|
||||
FUNCTION: bool-and-ptr ffi_test_61 ( )
|
||||
|
||||
os windows? [
|
||||
|
||||
STRUCT: bool-and-ptr
|
||||
{ b bool }
|
||||
{ ptr void* } ;
|
||||
|
||||
FUNCTION: bool-and-ptr ffi_test_61 ( )
|
||||
|
||||
! { S{ bool-and-ptr { b t } { ptr f } } } [ ffi_test_61 ] unit-test
|
||||
{ t } [ ffi_test_61 bool-and-ptr? ] unit-test
|
||||
{ { t f } } [ ffi_test_61 [ b>> ] [ ptr>> ] bi 2array ] unit-test
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
USING: kernel tools.test definitions compiler.units ;
|
||||
IN: compiler.tests.redefine21
|
||||
|
||||
[ ] [ : a ( -- ) ; << : b ( quot -- ) call a ; inline >> [ ] b ] unit-test
|
||||
: a ( -- ) ;
|
||||
<< : b ( quot -- ) call a ; inline >>
|
||||
{ } [ [ ] b ] unit-test
|
||||
{ } [ [ { a b } forget-all ] with-compilation-unit ] unit-test
|
||||
|
||||
[ ] [ [ { a b } forget-all ] with-compilation-unit ] unit-test
|
||||
|
||||
[ ] [ : A ( -- ) ; << : B ( -- ) A ; inline >> B ] unit-test
|
||||
|
||||
[ ] [ [ { A B } forget-all ] with-compilation-unit ] unit-test
|
||||
: A ( -- ) ;
|
||||
<< : B ( -- ) A ; inline >>
|
||||
{ } [ B ] unit-test
|
||||
{ } [ [ { A B } forget-all ] with-compilation-unit ] unit-test
|
||||
|
|
|
@ -62,7 +62,7 @@ cpu x86.64? [
|
|||
|
||||
! %clear
|
||||
{ t } [
|
||||
[ D: 0 %clear ] B{ } make
|
||||
[ d: 0 %clear ] B{ } make
|
||||
cpu x86.32? B{ 199 6 144 18 0 0 } B{ 73 199 6 144 18 0 0 } ? =
|
||||
] unit-test
|
||||
|
||||
|
@ -115,6 +115,6 @@ cpu x86.64? [
|
|||
B{ 73 199 6 0 0 0 0 }
|
||||
}
|
||||
[
|
||||
init-relocation [ 34.0 D: 0 %replace-imm ] B{ } make
|
||||
init-relocation [ 34.0 d: 0 %replace-imm ] B{ } make
|
||||
] unit-test
|
||||
] when
|
||||
|
|
|
@ -6,14 +6,14 @@ IN: furnace.utilities.tests
|
|||
CONSTANT: dummy-vocab [[
|
||||
IN: dummy-vocab
|
||||
|
||||
: dummy-word ( -- ) ;
|
||||
]]
|
||||
: dummy-word ( -- ) ;]]
|
||||
|
||||
dummy-vocab "dummy.factor" temp-file [ utf8 set-file-contents ] keep run-file
|
||||
>>
|
||||
|
||||
USE: dummy-vocab
|
||||
|
||||
{ t } [
|
||||
USE: dummy-vocab
|
||||
{ dummy-word "index" } resolve-template-path "index" temp-file =
|
||||
] unit-test
|
||||
|
||||
|
|
|
@ -89,6 +89,6 @@ MACRO: interpolate-locals ( str -- quot )
|
|||
parse-multiline-string
|
||||
interpolate-locals-quot append! ;
|
||||
|
||||
SYNTAX: I[[ "]]" define-interpolate-syntax ;
|
||||
SYNTAX: I[=[ "]=]" define-interpolate-syntax ;
|
||||
SYNTAX: I[==[ "]==]" define-interpolate-syntax ;
|
||||
SYNTAX: \I[[ "]]" define-interpolate-syntax ;
|
||||
SYNTAX: \I[=[ "]=]" define-interpolate-syntax ;
|
||||
SYNTAX: \I[==[ "]==]" define-interpolate-syntax ;
|
||||
|
|
|
@ -1,50 +1,12 @@
|
|||
USING: help.markup help.syntax strings ;
|
||||
IN: multiline
|
||||
|
||||
HELP: \STRING:
|
||||
{ $syntax "STRING: name\nfoo\n;" }
|
||||
{ $description "Forms a multiline string literal, or 'here document' stored in the word called name. A semicolon is used to signify the end, and that semicolon must be on a line by itself, not preceeded or followed by any whitespace. The string will have newlines in between lines but not at the end, unless there is a blank line before the semicolon." } ;
|
||||
|
||||
HELP: /*
|
||||
{ $syntax "/* comment */" }
|
||||
{ $description "Provides C-like comments that can span multiple lines. One caveat is that " { $snippet "/*" } " and " { $snippet "*/" } " are still tokens and must not appear in the comment text itself." }
|
||||
{ $examples
|
||||
{ $example "USING: multiline ;"
|
||||
"/* I think that I shall never see"
|
||||
" A poem lovely as a tree. */"
|
||||
""
|
||||
}
|
||||
} ;
|
||||
|
||||
HELP: \HEREDOC:
|
||||
{ $syntax "HEREDOC: marker\n...text...\nmarker" }
|
||||
{ $values { "marker" "a word (token)" } { "text" "arbitrary text" } { "value" string } }
|
||||
{ $description "Returns a string delimited by an arbitrary user-defined token. This delimiter must be exactly the text beginning at the first non-blank character after " { $link postpone: \HEREDOC: } " until the end of the line containing " { $link postpone: \HEREDOC: } ". Text is captured until a line is found containing exactly this delimiter string." }
|
||||
{ $warning "Whitespace is significant." }
|
||||
{ $examples
|
||||
{ $example "USING: multiline prettyprint ;"
|
||||
"HEREDOC: END\nx\nEND\n."
|
||||
"\"x\\n\""
|
||||
}
|
||||
{ $example "USING: multiline prettyprint sequences ;"
|
||||
"2 5 HEREDOC: zap\nfoo\nbar\nzap\nsubseq ."
|
||||
"\"o\\nb\""
|
||||
}
|
||||
} ;
|
||||
|
||||
HELP: parse-multiline-string
|
||||
{ $values { "end-text" "a string delineating the end" } { "str" "the parsed string" } }
|
||||
{ $description "Parses the input stream until the " { $snippet "end-text" } " is reached and returns the parsed text as a string." }
|
||||
{ $notes "Used to implement " { $link postpone: /* } "." } ;
|
||||
{ $notes "Used to implement " { $link postpone: \[[ } "." } ;
|
||||
|
||||
ARTICLE: "multiline" "Multiline"
|
||||
"Multiline strings:"
|
||||
{ $subsections
|
||||
postpone: \STRING:
|
||||
postpone: \HEREDOC:
|
||||
}
|
||||
"Multiline comments:"
|
||||
{ $subsections postpone: /* }
|
||||
"Writing new multiline parsing words:"
|
||||
{ $subsections parse-multiline-string }
|
||||
;
|
||||
|
|
|
@ -8,87 +8,3 @@ bar
|
|||
]]
|
||||
|
||||
{ "foo\nbar\n" } [ test-it ] unit-test
|
||||
|
||||
|
||||
! HEREDOC:
|
||||
|
||||
{ "foo\nbar\n" } [ HEREDOC: END
|
||||
foo
|
||||
bar
|
||||
END
|
||||
] unit-test
|
||||
|
||||
{ "" } [ HEREDOC: END
|
||||
END
|
||||
] unit-test
|
||||
|
||||
{ " END\n" } [ HEREDOC: END
|
||||
END
|
||||
END
|
||||
] unit-test
|
||||
|
||||
{ "\n" } [ HEREDOC: END
|
||||
|
||||
END
|
||||
] unit-test
|
||||
|
||||
{ "x\n" } [ HEREDOC: END
|
||||
x
|
||||
END
|
||||
] unit-test
|
||||
|
||||
{ "x\n" } [ HEREDOC: END
|
||||
x
|
||||
END
|
||||
] unit-test
|
||||
|
||||
! there's a space after xyz
|
||||
{ "xyz \n" } [ HEREDOC: END
|
||||
xyz
|
||||
END
|
||||
] unit-test
|
||||
|
||||
{ "} ! * # \" «\n" } [ HEREDOC: END
|
||||
} ! * # " «
|
||||
END
|
||||
] unit-test
|
||||
|
||||
{ 21 "foo\nbar\n" " HEREDOC: FOO\n FOO\n" 22 } [ 21 HEREDOC: X
|
||||
foo
|
||||
bar
|
||||
X
|
||||
HEREDOC: END
|
||||
HEREDOC: FOO
|
||||
FOO
|
||||
END
|
||||
22 ] unit-test
|
||||
|
||||
{ "lol\n xyz\n" }
|
||||
[
|
||||
HEREDOC: xyz
|
||||
lol
|
||||
xyz
|
||||
xyz
|
||||
] unit-test
|
||||
|
||||
![[
|
||||
<<
|
||||
SYNTAX: \MULTILINE-LITERAL: parse-here suffix! ;
|
||||
>>
|
||||
|
||||
{ { "bar" } }
|
||||
[
|
||||
CONSTANT: foo { MULTILINE-LITERAL:
|
||||
bar
|
||||
;
|
||||
} foo
|
||||
] unit-test
|
||||
|
||||
! Make sure parse-here fails if extra crap appears on the first line
|
||||
[
|
||||
"CONSTANT: foo { MULTILINE-LITERAL: asdfasfdasdfas
|
||||
bar
|
||||
;
|
||||
}" eval
|
||||
] must-fail
|
||||
]]
|
||||
|
|
|
@ -1,42 +1,9 @@
|
|||
! Copyright (C) 2007 Daniel Ehrenberg
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: accessors combinators kernel lexer locals make math
|
||||
namespaces parser quotations sequences words ;
|
||||
USING: accessors kernel lexer locals make math namespaces
|
||||
sequences ;
|
||||
IN: multiline
|
||||
|
||||
ERROR: bad-heredoc identifier ;
|
||||
|
||||
<PRIVATE
|
||||
|
||||
: rest-of-line ( lexer -- seq )
|
||||
[ line-text>> ] [ column>> ] bi tail ;
|
||||
|
||||
: next-line-text ( lexer -- str ? )
|
||||
[ next-line ] [ line-text>> ] [ still-parsing? ] tri ;
|
||||
|
||||
: (parse-here) ( lexer -- )
|
||||
dup next-line-text [
|
||||
dup ";" =
|
||||
[ drop next-line ]
|
||||
[ % char: \n , (parse-here) ] if
|
||||
] [ ";" throw-unexpected-eof ] if ;
|
||||
|
||||
PRIVATE>
|
||||
|
||||
ERROR: text-found-before-eol string ;
|
||||
|
||||
: parse-here ( -- str )
|
||||
[
|
||||
lexer get
|
||||
dup rest-of-line [ text-found-before-eol ] unless-empty
|
||||
(parse-here)
|
||||
] "" make but-last ;
|
||||
|
||||
SYNTAX: \STRING:
|
||||
scan-new-word
|
||||
parse-here 1quotation
|
||||
( -- string ) define-inline ;
|
||||
|
||||
<PRIVATE
|
||||
|
||||
:: (scan-multiline-string) ( i end lexer -- j )
|
||||
|
@ -58,40 +25,11 @@ SYNTAX: \STRING:
|
|||
change-column drop
|
||||
] "" make ;
|
||||
|
||||
: advance-same-line ( lexer text -- )
|
||||
length [ + ] curry change-column drop ;
|
||||
|
||||
:: (parse-til-line-begins) ( begin-text lexer -- )
|
||||
lexer still-parsing? [
|
||||
lexer line-text>> begin-text sequence= [
|
||||
lexer begin-text advance-same-line
|
||||
] [
|
||||
lexer line-text>> % char: \n ,
|
||||
lexer next-line
|
||||
begin-text lexer (parse-til-line-begins)
|
||||
] if
|
||||
] [
|
||||
begin-text bad-heredoc
|
||||
] if ;
|
||||
|
||||
: parse-til-line-begins ( begin-text lexer -- seq )
|
||||
[ (parse-til-line-begins) ] "" make ;
|
||||
|
||||
PRIVATE>
|
||||
|
||||
: parse-multiline-string ( end-text -- str )
|
||||
lexer get 1 (parse-multiline-string) ;
|
||||
|
||||
SYNTAX: /* "*/" parse-multiline-string drop ;
|
||||
|
||||
SYNTAX: \HEREDOC:
|
||||
lexer get {
|
||||
[ skip-blank ]
|
||||
[ rest-of-line ]
|
||||
[ next-line ]
|
||||
[ parse-til-line-begins ]
|
||||
} cleave suffix! ;
|
||||
|
||||
SYNTAX: \[[ "]]" parse-multiline-string suffix! ;
|
||||
SYNTAX: \[=[ "]=]" parse-multiline-string suffix! ;
|
||||
SYNTAX: \[==[ "]==]" parse-multiline-string suffix! ;
|
||||
|
|
|
@ -589,16 +589,16 @@ Tok = Spaces (Number | Special )
|
|||
error>> [ redefined-rule? ] [ name>> "lol" = ] bi and
|
||||
] must-fail-with
|
||||
|
||||
EBNF: foo [=[ Bar = "a":a1 "a":a2 => [[ a1 a2 2array ]] ]=]
|
||||
{
|
||||
{ "a" "a" }
|
||||
} [
|
||||
EBNF: foo [=[ Bar = "a":a1 "a":a2 => [[ a1 a2 2array ]] ]=]
|
||||
"aa" foo
|
||||
] unit-test
|
||||
|
||||
EBNF: foo2 [=[ Bar = "a":a-1 "a":a-2 => [[ a-1 a-2 2array ]] ]=]
|
||||
{
|
||||
{ "a" "a" }
|
||||
} [
|
||||
EBNF: foo2 [=[ Bar = "a":a-1 "a":a-2 => [[ a-1 a-2 2array ]] ]=]
|
||||
"aa" foo2
|
||||
] unit-test
|
||||
|
|
|
@ -92,11 +92,11 @@ HELP: roman/mod
|
|||
|
||||
{ roman* roman/i roman/mod } related-words
|
||||
|
||||
HELP: \ROMAN:
|
||||
HELP: \roman:
|
||||
{ $description "A parsing word that reads the next token and converts it to an integer." }
|
||||
{ $examples
|
||||
{ $example "USING: prettyprint roman ;"
|
||||
"ROMAN: v ."
|
||||
"roman: v ."
|
||||
"5"
|
||||
}
|
||||
} ;
|
||||
|
@ -104,7 +104,7 @@ HELP: \ROMAN:
|
|||
ARTICLE: "roman" "Roman numerals"
|
||||
"The " { $vocab-link "roman" } " vocabulary can convert numbers to and from the Roman numeral system and can perform arithmetic given Roman numerals as input." $nl
|
||||
"A parsing word for literal Roman numerals:"
|
||||
{ $subsections postpone: \ROMAN: }
|
||||
{ $subsections postpone: \roman: }
|
||||
"Converting to Roman numerals:"
|
||||
{ $subsections
|
||||
>roman
|
||||
|
|
|
@ -37,7 +37,7 @@ USING: arrays kernel math roman roman.private sequences tools.test ;
|
|||
{ "i" "ii" } [ "v" "iii" roman/mod ] unit-test
|
||||
[ "iii" "iii" roman- ] must-fail
|
||||
|
||||
{ 30 } [ ROMAN: xxx ] unit-test
|
||||
{ 30 } [ roman: xxx ] unit-test
|
||||
|
||||
[ roman+ ] must-infer
|
||||
[ roman- ] must-infer
|
||||
|
|
|
@ -69,4 +69,4 @@ ROMAN-OP: * ( x y -- z )
|
|||
ROMAN-OP: /i ( x y -- z )
|
||||
ROMAN-OP: /mod ( x y -- z w )
|
||||
|
||||
SYNTAX: \ROMAN: scan-token roman> suffix! ;
|
||||
SYNTAX: \roman: scan-token roman> suffix! ;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: accessors arrays combinators continuations io kernel
|
||||
kernel.private math math.parser namespaces sequences
|
||||
sequences.private source-files.errors strings vectors ;
|
||||
sequences.private source-files.errors splitting strings vectors ;
|
||||
IN: lexer
|
||||
|
||||
TUPLE: lexer
|
||||
|
@ -143,6 +143,12 @@ PREDICATE: unexpected-eof < unexpected got>> not ;
|
|||
: parse-tokens ( end -- seq )
|
||||
[ ] map-tokens ;
|
||||
|
||||
: unescape-token ( string -- string' )
|
||||
"\\" ?head drop ;
|
||||
|
||||
: unescape-tokens ( seq -- seq' )
|
||||
[ unescape-token ] map ;
|
||||
|
||||
TUPLE: lexer-error line column line-text parsing-words error ;
|
||||
|
||||
M: lexer-error error-file error>> error-file ;
|
||||
|
|
|
@ -65,15 +65,19 @@ IN: bootstrap.syntax
|
|||
"QUALIFIED-WITH:" [ scan-token scan-token add-qualified ] define-core-syntax
|
||||
|
||||
"FROM:" [
|
||||
scan-token "=>" expect ";" parse-tokens add-words-from
|
||||
scan-token unescape-token
|
||||
"=>" expect ";" parse-tokens unescape-tokens add-words-from
|
||||
] define-core-syntax
|
||||
|
||||
"EXCLUDE:" [
|
||||
scan-token "=>" expect ";" parse-tokens add-words-excluding
|
||||
scan-token unescape-token
|
||||
"=>" expect ";" parse-tokens unescape-tokens add-words-excluding
|
||||
] define-core-syntax
|
||||
|
||||
"RENAME:" [
|
||||
scan-token scan-token "=>" expect scan-token add-renamed-word
|
||||
scan-token unescape-token
|
||||
scan-token
|
||||
"=>" expect scan-token unescape-token add-renamed-word
|
||||
] define-core-syntax
|
||||
|
||||
"nan:" [ 16 scan-base <fp-nan> suffix! ] define-core-syntax
|
||||
|
@ -149,7 +153,7 @@ IN: bootstrap.syntax
|
|||
] define-core-syntax
|
||||
|
||||
"ALIAS:" [
|
||||
scan-new-word scan-word define-alias
|
||||
scan-new-syntax-word scan-syntax-word define-alias
|
||||
] define-core-syntax
|
||||
|
||||
"CONSTANT:" [
|
||||
|
|
|
@ -5,7 +5,7 @@ ALIAS: foo +
|
|||
{ } [ "IN: words.alias.tests CONSTANT: foo 5" eval( -- ) ] unit-test
|
||||
{ ( -- value ) } [ \ foo stack-effect ] unit-test
|
||||
|
||||
ALIAS: MY-H{ H{
|
||||
ALIAS: \MY-H{ \H{
|
||||
{ H{ { 1 2 } } } [
|
||||
"IN: words.alias.tests MY-H{ { 1 2 } }" eval( -- x )
|
||||
] unit-test
|
||||
|
|
|
@ -4,7 +4,7 @@ alien.data alien.fortran alien.fortran.private alien.strings
|
|||
classes.struct arrays assocs byte-arrays combinators fry
|
||||
generalizations io.encodings.ascii kernel macros
|
||||
macros.expander namespaces sequences shuffle tools.test vocabs.parser ;
|
||||
FROM: alien.syntax => pointer: ;
|
||||
FROM: alien.syntax => \pointer: ;
|
||||
QUALIFIED-WITH: alien.c-types c
|
||||
IN: alien.fortran.tests
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
! Copyright (C) 2009 Jose Antonio Ortega Ruiz.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: continuations fuel fuel.eval io.streams.string kernel math
|
||||
namespaces random.data sequences tools.test vocabs.parser ;
|
||||
USING: continuations fuel fuel.eval io.streams.string kernel
|
||||
math namespaces random.data sequences tools.test vocabs.parser ;
|
||||
IN: fuel.eval.tests
|
||||
|
||||
! pop-restarts
|
||||
|
@ -40,7 +40,6 @@ IN: fuel.eval.tests
|
|||
{
|
||||
"(nil \"IN: http.server : <500> ( error -- response )\" \"\")\n<~FUEL~>\n"
|
||||
} [
|
||||
USE: http.server
|
||||
[
|
||||
[
|
||||
V{ "\"<500>\"" "fuel-word-synopsis" }
|
||||
|
|
|
@ -2,11 +2,11 @@ USING: eval kernel math tools.test ;
|
|||
IN: literate
|
||||
|
||||
{ 2 3 t } [
|
||||
<LITERATE
|
||||
LITERATE[[
|
||||
1
|
||||
> 2
|
||||
> 3
|
||||
blah
|
||||
> 2dup 1 - =
|
||||
LITERATE>
|
||||
]]
|
||||
] unit-test
|
||||
|
|
|
@ -19,7 +19,7 @@ M: literate-lexer skip-blank
|
|||
] [ drop ] if*
|
||||
] [ call-next-method ] if ;
|
||||
|
||||
SYNTAX: \<LITERATE
|
||||
"LITERATE>" parse-multiline-string string-lines [
|
||||
SYNTAX: \LITERATE[[
|
||||
"]]" parse-multiline-string string-lines [
|
||||
<literate-lexer> (parse-lines) append!
|
||||
] with-nested-compilation-unit ;
|
||||
|
|
|
@ -163,7 +163,14 @@ ERROR: unexpected-terminator n string slice ;
|
|||
] dip swap 2array ;
|
||||
|
||||
: strict-upper? ( string -- ? )
|
||||
[ { [ char: A char: Z between? ] [ ":-" member? ] } 1|| ] all? ;
|
||||
{
|
||||
[
|
||||
[
|
||||
{ [ char: A char: Z between? ] [ ":-" member? ] } 1||
|
||||
] all?
|
||||
]
|
||||
[ [ char: A char: Z between? ] any? ]
|
||||
} 1&& ;
|
||||
|
||||
! <a <a: but not <a>
|
||||
: section-open? ( string -- ? )
|
||||
|
@ -309,4 +316,5 @@ ERROR: mismatched-terminator n string slice ;
|
|||
: lex-extra ( -- assoc ) extra-vocabs lex-vocabs ;
|
||||
: lex-all ( -- assoc ) lex-core lex-basis lex-extra 3append ;
|
||||
|
||||
: lex-docs ( -- assoc ) all-docs-paths lex-paths ;
|
||||
: lex-docs ( -- assoc ) all-docs-paths lex-paths ;
|
||||
: lex-tests ( -- assoc ) all-tests-paths lex-paths ;
|
|
@ -91,8 +91,6 @@ ERROR: not-a-source-path path ;
|
|||
"resource:core/vocabs/loader/test/n/n.factor"
|
||||
"resource:core/vocabs/loader/test/o/o.factor"
|
||||
"resource:core/vocabs/loader/test/p/p.factor"
|
||||
"resource:extra/math/blas/vectors/vectors.factor" ! need .modern file
|
||||
"resource:extra/math/blas/matrices/matrices.factor" ! need .modern file
|
||||
} diff
|
||||
! Don't parse .modern files yet
|
||||
[ ".modern" tail? ] reject ;
|
||||
|
|
|
@ -6,6 +6,7 @@ DEFER: fake
|
|||
\ fake H{ } clone "multi-methods" set-word-prop
|
||||
<< ( -- ) \ fake set-stack-effect >>
|
||||
|
||||
DEFER: testing
|
||||
[
|
||||
[ "fake-{ }" ] [ { } \ fake method-word-name ] unit-test
|
||||
|
||||
|
@ -22,8 +23,6 @@ DEFER: fake
|
|||
|
||||
[ ] [ \ fake update-generic ] unit-test
|
||||
|
||||
DEFER: testing
|
||||
|
||||
[ ] [ \ testing ( -- ) define-generic ] unit-test
|
||||
|
||||
[ t ] [ \ testing generic? ] unit-test
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
USING: multi-methods tools.test math sequences namespaces system
|
||||
kernel strings definitions prettyprint debugger arrays
|
||||
hashtables continuations classes assocs accessors see ;
|
||||
RENAME: GENERIC: multi-methods => multi-methods:GENERIC:
|
||||
RENAME: \GENERIC: multi-methods => \multi-methods:GENERIC:
|
||||
IN: multi-methods.tests
|
||||
|
||||
multi-methods:GENERIC: first-test ( -- )
|
||||
|
|
Loading…
Reference in New Issue