extra changes:
index* -> index-from last-index* -> last-index-from 1 tail -> rest 1 tail-slice -> rest-slice subset -> filter prepose find* -> find-from find-last* -> find-last-from before, after generic, < for integers make between? work for timestampsdb4
parent
15402ed1b4
commit
b7c1f9dbe8
|
|
@ -2,7 +2,7 @@
|
|||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: arrays calendar combinators generic init kernel math
|
||||
namespaces sequences heaps boxes threads debugger quotations
|
||||
assocs ;
|
||||
assocs math.order ;
|
||||
IN: alarms
|
||||
|
||||
TUPLE: alarm quot time interval entry ;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
IN: ascii.tests
|
||||
USING: ascii tools.test sequences kernel math ;
|
||||
IN: ascii.tests
|
||||
|
||||
[ t ] [ CHAR: a letter? ] unit-test
|
||||
[ f ] [ CHAR: A letter? ] unit-test
|
||||
|
|
@ -8,7 +8,6 @@ USING: ascii tools.test sequences kernel math ;
|
|||
[ t ] [ CHAR: 0 digit? ] unit-test
|
||||
[ f ] [ CHAR: x digit? ] unit-test
|
||||
|
||||
|
||||
[ 4 ] [
|
||||
0 "There are Four Upper Case characters"
|
||||
[ LETTER? [ 1+ ] when ] each
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
! Copyright (C) 2005, 2008 Slava Pestov.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: sequences math kernel ;
|
||||
USING: kernel math math.order sequences ;
|
||||
IN: ascii
|
||||
|
||||
: blank? ( ch -- ? ) " \t\n\r" member? ; inline
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ TUPLE: x30 ;
|
|||
M: x30 g ;
|
||||
|
||||
: my-classes ( -- seq )
|
||||
"benchmark.dispatch1" words [ tuple-class? ] subset ;
|
||||
"benchmark.dispatch1" words [ tuple-class? ] filter ;
|
||||
|
||||
: a-bunch-of-objects ( -- seq )
|
||||
my-classes [ new ] map ;
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ TUPLE: x30 ;
|
|||
INSTANCE: x30 g
|
||||
|
||||
: my-classes ( -- seq )
|
||||
"benchmark.dispatch5" words [ tuple-class? ] subset ;
|
||||
"benchmark.dispatch5" words [ tuple-class? ] filter ;
|
||||
|
||||
: a-bunch-of-objects ( -- seq )
|
||||
my-classes [ new ] map ;
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ M: check< summary drop "Number exceeds upper bound" ;
|
|||
>r keys r> define-slots ;
|
||||
|
||||
: filter-pad ( slots -- slots )
|
||||
[ drop padding-name? not ] assoc-subset ;
|
||||
[ drop padding-name? not ] assoc-filter ;
|
||||
|
||||
: define-bitfield ( classname slots -- )
|
||||
[
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ over boid-vel -rot relative-position angle-between ;
|
|||
<--&& ;
|
||||
|
||||
: cohesion-neighborhood ( self -- boids )
|
||||
boids> [ within-cohesion-neighborhood? ] with subset ;
|
||||
boids> [ within-cohesion-neighborhood? ] with filter ;
|
||||
|
||||
: cohesion-force ( self -- force )
|
||||
dup cohesion-neighborhood
|
||||
|
|
@ -136,7 +136,7 @@ over boid-vel -rot relative-position angle-between ;
|
|||
<--&& ;
|
||||
|
||||
: separation-neighborhood ( self -- boids )
|
||||
boids> [ within-separation-neighborhood? ] with subset ;
|
||||
boids> [ within-separation-neighborhood? ] with filter ;
|
||||
|
||||
: separation-force ( self -- force )
|
||||
dup separation-neighborhood
|
||||
|
|
@ -156,7 +156,7 @@ over boid-vel -rot relative-position angle-between ;
|
|||
<--&& ;
|
||||
|
||||
: alignment-neighborhood ( self -- boids )
|
||||
boids> [ within-alignment-neighborhood? ] with subset ;
|
||||
boids> [ within-alignment-neighborhood? ] with filter ;
|
||||
|
||||
: alignment-force ( self -- force )
|
||||
alignment-neighborhood
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ IN: bootstrap.help
|
|||
|
||||
[ drop ] load-vocab-hook [
|
||||
vocabs
|
||||
[ vocab-docs-loaded? not ] subset
|
||||
[ vocab-docs-loaded? not ] filter
|
||||
[ load-docs ] each
|
||||
] with-variable ;
|
||||
|
||||
|
|
|
|||
|
|
@ -5,9 +5,9 @@ USING: kernel continuations arrays assocs sequences sorting math
|
|||
IN: builder.benchmark
|
||||
|
||||
! : passing-benchmarks ( table -- table )
|
||||
! [ second first2 number? swap number? and ] subset ;
|
||||
! [ second first2 number? swap number? and ] filter ;
|
||||
|
||||
: passing-benchmarks ( table -- table ) [ second number? ] subset ;
|
||||
: passing-benchmarks ( table -- table ) [ second number? ] filter ;
|
||||
|
||||
! : simplify-table ( table -- table ) [ first2 second 2array ] map ;
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ M: bunny-gadget graft* ( gadget -- )
|
|||
[ <bunny-fixed-pipeline> ]
|
||||
[ <bunny-cel-shaded> ]
|
||||
[ <bunny-outlined> ]
|
||||
} map-call-with [ ] subset
|
||||
} map-call-with [ ] filter
|
||||
0
|
||||
roll {
|
||||
set-bunny-gadget-geom
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ float-arrays continuations namespaces sequences.lib ;
|
|||
IN: bunny.model
|
||||
|
||||
: numbers ( str -- seq )
|
||||
" " split [ string>number ] map [ ] subset ;
|
||||
" " split [ string>number ] map [ ] filter ;
|
||||
|
||||
: (parse-model) ( vs is -- vs is )
|
||||
readln [
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
USING: arrays kernel math math.functions namespaces sequences
|
||||
strings system vocabs.loader calendar.backend threads
|
||||
accessors combinators locals classes.tuple ;
|
||||
accessors combinators locals classes.tuple math.order ;
|
||||
IN: calendar
|
||||
|
||||
TUPLE: timestamp year month day hour minute second gmt-offset ;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
USING: math math.parser kernel sequences io calendar
|
||||
accessors arrays io.streams.string splitting
|
||||
accessors arrays io.streams.string splitting math.order
|
||||
combinators accessors debugger ;
|
||||
IN: calendar.format
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ MACRO: >tuple< ( class -- )
|
|||
|
||||
MACRO: >tuple*< ( class -- )
|
||||
all-slots
|
||||
[ slot-spec-name "*" tail? ] subset
|
||||
[ slot-spec-name "*" tail? ] filter
|
||||
reader-slots ;
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -142,7 +142,7 @@ H{
|
|||
} assoc-union alien>objc-types set-global
|
||||
|
||||
: objc-struct-type ( i string -- ctype )
|
||||
2dup CHAR: = -rot index* swap subseq
|
||||
2dup CHAR: = -rot index-from swap subseq
|
||||
dup c-types get key? [
|
||||
"Warning: no such C type: " write dup print
|
||||
drop "void*"
|
||||
|
|
|
|||
|
|
@ -11,15 +11,15 @@ HELP: parallel-each
|
|||
{ $description "Spawns a new thread for applying " { $snippet "quot" } " to every element of " { $snippet "seq" } ", blocking until all quotations complete." }
|
||||
{ $errors "Throws an error if one of the iterations throws an error." } ;
|
||||
|
||||
HELP: parallel-subset
|
||||
HELP: parallel-filter
|
||||
{ $values { "seq" sequence } { "quot" "a quotation with stack effect " { $snippet "( elt -- ? )" } } { "newseq" sequence } }
|
||||
{ $description "Spawns a new thread for applying " { $snippet "quot" } " to every element of " { $snippet "seq" } ", collecting the elements for which the quotation yielded a true value." }
|
||||
{ $errors "Throws an error if one of the iterations throws an error." } ;
|
||||
|
||||
ARTICLE: "concurrency.combinators" "Concurrent combinators"
|
||||
"The " { $vocab-link "concurrency.combinators" } " vocabulary provides concurrent variants of " { $link each } ", " { $link map } " and " { $link subset } ":"
|
||||
"The " { $vocab-link "concurrency.combinators" } " vocabulary provides concurrent variants of " { $link each } ", " { $link map } " and " { $link filter } ":"
|
||||
{ $subsection parallel-each }
|
||||
{ $subsection parallel-map }
|
||||
{ $subsection parallel-subset } ;
|
||||
{ $subsection parallel-filter } ;
|
||||
|
||||
ABOUT: "concurrency.combinators"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ concurrency.mailboxes threads sequences accessors ;
|
|||
|
||||
[ [ drop ] parallel-each ] must-infer
|
||||
[ [ ] parallel-map ] must-infer
|
||||
[ [ ] parallel-subset ] must-infer
|
||||
[ [ ] parallel-filter ] must-infer
|
||||
|
||||
[ { 1 4 9 } ] [ { 1 2 3 } [ sq ] parallel-map ] unit-test
|
||||
|
||||
|
|
@ -14,7 +14,7 @@ concurrency.mailboxes threads sequences accessors ;
|
|||
[ error>> "Even" = ] must-fail-with
|
||||
|
||||
[ V{ 0 3 6 9 } ]
|
||||
[ 10 [ 3 mod zero? ] parallel-subset ] unit-test
|
||||
[ 10 [ 3 mod zero? ] parallel-filter ] unit-test
|
||||
|
||||
[ 10 ]
|
||||
[
|
||||
|
|
|
|||
|
|
@ -13,5 +13,5 @@ IN: concurrency.combinators
|
|||
[ [ >r curry r> spawn-stage ] 2curry each ] keep await ;
|
||||
inline
|
||||
|
||||
: parallel-subset ( seq quot -- newseq )
|
||||
: parallel-filter ( seq quot -- newseq )
|
||||
over >r pusher >r each r> r> like ; inline
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ SYMBOL: event-stream-callbacks
|
|||
|
||||
[
|
||||
event-stream-callbacks global
|
||||
[ [ drop expired? not ] assoc-subset H{ } assoc-like ] change-at
|
||||
[ [ drop expired? not ] assoc-filter H{ } assoc-like ] change-at
|
||||
] "core-foundation" add-init-hook
|
||||
|
||||
: add-event-source-callback ( quot -- id )
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ M: nonthrowable execute-statement* ( statement type -- )
|
|||
|
||||
: with-db ( db seq quot -- )
|
||||
>r make-db db-open db r>
|
||||
[ db get swap [ drop ] swap compose with-disposal ] curry with-variable ;
|
||||
[ db get swap [ drop ] prepose with-disposal ] curry with-variable ;
|
||||
inline
|
||||
|
||||
: default-query ( query -- result-set )
|
||||
|
|
|
|||
|
|
@ -95,6 +95,6 @@ M: db <select-by-slots-statement> ( tuple class -- statement )
|
|||
|
||||
" from " 0% 0%
|
||||
dupd
|
||||
[ slot-name>> swap get-slot-named ] with subset
|
||||
[ slot-name>> swap get-slot-named ] with filter
|
||||
dup empty? [ 2drop ] [ where-clause ] if ";" 0%
|
||||
] query-make ;
|
||||
|
|
|
|||
|
|
@ -83,13 +83,13 @@ FACTOR-BLOB NULL ;
|
|||
dup number? [ number>string ] when ;
|
||||
|
||||
: maybe-remove-id ( specs -- obj )
|
||||
[ +native-id+? not ] subset ;
|
||||
[ +native-id+? not ] filter ;
|
||||
|
||||
: remove-relations ( specs -- newcolumns )
|
||||
[ relation? not ] subset ;
|
||||
[ relation? not ] filter ;
|
||||
|
||||
: remove-id ( specs -- obj )
|
||||
[ primary-key>> not ] subset ;
|
||||
[ primary-key>> not ] filter ;
|
||||
|
||||
! SQLite Types: http://www.sqlite.org/datatype3.html
|
||||
! NULL INTEGER REAL TEXT BLOB
|
||||
|
|
@ -152,7 +152,7 @@ HOOK: bind# db ( spec obj -- )
|
|||
tuck offset-of-slot set-slot ;
|
||||
|
||||
: tuple>filled-slots ( tuple -- alist )
|
||||
<mirror> [ nip ] assoc-subset ;
|
||||
<mirror> [ nip ] assoc-filter ;
|
||||
|
||||
: tuple>params ( specs tuple -- obj )
|
||||
[
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
! Copyright (C) 2006, 2007 Slava Pestov
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: arrays io kernel math models namespaces sequences strings
|
||||
splitting combinators unicode.categories ;
|
||||
splitting combinators unicode.categories math.order ;
|
||||
IN: documents
|
||||
|
||||
: +col ( loc n -- newloc ) >r first2 r> + 2array ;
|
||||
|
|
@ -184,10 +184,10 @@ M: one-char-elt next-elt 2drop ;
|
|||
[ >r blank? r> xor ] curry ; inline
|
||||
|
||||
: (prev-word) ( ? col str -- col )
|
||||
rot break-detector find-last* drop ?1+ ;
|
||||
rot break-detector find-last-from drop ?1+ ;
|
||||
|
||||
: (next-word) ( ? col str -- col )
|
||||
[ rot break-detector find* drop ] keep
|
||||
[ rot break-detector find-from drop ] keep
|
||||
over not [ nip length ] [ drop ] if ;
|
||||
|
||||
TUPLE: one-word-elt ;
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ pointer-window up-till-frame dup <wm-frame> is? [ ] [ drop f ] if ;
|
|||
|
||||
wm-root>
|
||||
<- children
|
||||
[ <- mapped? ] subset
|
||||
[ <- mapped? ] filter
|
||||
[ check-window-table ] map
|
||||
reverse
|
||||
|
||||
|
|
@ -64,7 +64,7 @@ drop
|
|||
|
||||
! wm-root>
|
||||
! <- children
|
||||
! [ <- mapped? ] subset
|
||||
! [ <- mapped? ] filter
|
||||
! [ check-window-table ] map
|
||||
! reverse
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ IN: factory
|
|||
! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
: manage-windows ( -- )
|
||||
dpy get $default-root <- children [ <- mapped? ] subset
|
||||
dpy get $default-root <- children [ <- mapped? ] filter
|
||||
[ $id <wm-frame> new* drop ] each ;
|
||||
|
||||
! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ TUPLE: q/a question answer ;
|
|||
C: <q/a> q/a
|
||||
|
||||
: li>q/a ( li -- q/a )
|
||||
[ "br" tag-named*? not ] subset
|
||||
[ "br" tag-named*? not ] filter
|
||||
[ "strong" tag-named*? ] find-after
|
||||
>r tag-children r> <q/a> ;
|
||||
|
||||
|
|
@ -39,7 +39,7 @@ C: <question-list> question-list
|
|||
|
||||
: xml>question-list ( list -- question-list )
|
||||
[ "title" swap at ] keep
|
||||
tag-children [ tag? ] subset [ xml>q/a ] map
|
||||
tag-children [ tag? ] filter [ xml>q/a ] map
|
||||
<question-list> ;
|
||||
|
||||
: question-list>xml ( question-list -- list )
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ DEFER: (shallow-fry)
|
|||
: ((shallow-fry)) ( accum quot adder -- result )
|
||||
>r [ ] swap (shallow-fry) r>
|
||||
append swap dup empty? [ drop ] [
|
||||
[ swap compose ] curry append
|
||||
[ prepose ] curry append
|
||||
] if ; inline
|
||||
|
||||
: (shallow-fry) ( accum quot -- result )
|
||||
|
|
@ -51,7 +51,7 @@ DEFER: (shallow-fry)
|
|||
[
|
||||
dup callable? [
|
||||
[
|
||||
[ { , namespaces:, @ } member? ] subset length
|
||||
[ { , namespaces:, @ } member? ] filter length
|
||||
\ , <repetition> %
|
||||
]
|
||||
[ deep-fry % ] bi
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ IN: hardware-info.linux
|
|||
|
||||
: uname ( -- seq )
|
||||
65536 "char" <c-array> [ (uname) io-error ] keep
|
||||
"\0" split [ empty? not ] subset [ >string ] map
|
||||
"\0" split [ empty? not ] filter [ >string ] map
|
||||
6 "" pad-right ;
|
||||
|
||||
: sysname ( -- string ) uname first ;
|
||||
|
|
@ -18,4 +18,4 @@ IN: hardware-info.linux
|
|||
: domainname ( -- string ) uname 5 swap nth ;
|
||||
|
||||
: kernel-version ( -- seq )
|
||||
release ".-" split [ ] subset 5 "" pad-right ;
|
||||
release ".-" split [ ] filter 5 "" pad-right ;
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ $nl
|
|||
"You can create a new array, only containing elements which satisfy some condition:"
|
||||
{ $example
|
||||
": negative? ( n -- ? ) 0 < ;"
|
||||
"{ -12 10 16 0 -1 -3 -9 } [ negative? ] subset ."
|
||||
"{ -12 10 16 0 -1 -3 -9 } [ negative? ] filter ."
|
||||
"{ -12 -1 -3 -9 }"
|
||||
}
|
||||
{ $references
|
||||
|
|
|
|||
|
|
@ -228,13 +228,13 @@ ARTICLE: "article-index" "Article index"
|
|||
{ $index [ articles get keys ] } ;
|
||||
|
||||
ARTICLE: "primitive-index" "Primitive index"
|
||||
{ $index [ all-words [ primitive? ] subset ] } ;
|
||||
{ $index [ all-words [ primitive? ] filter ] } ;
|
||||
|
||||
ARTICLE: "error-index" "Error index"
|
||||
{ $index [ all-errors ] } ;
|
||||
|
||||
ARTICLE: "type-index" "Type index"
|
||||
{ $index [ builtins get [ ] subset ] } ;
|
||||
{ $index [ builtins get [ ] filter ] } ;
|
||||
|
||||
ARTICLE: "class-index" "Class index"
|
||||
{ $index [ classes ] } ;
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ M: predicate word-help* drop \ $predicate ;
|
|||
|
||||
: all-articles ( -- seq )
|
||||
articles get keys
|
||||
all-words [ word-help ] subset append ;
|
||||
all-words [ word-help ] filter append ;
|
||||
|
||||
: xref-help ( -- )
|
||||
all-articles [ xref-article ] each ;
|
||||
|
|
@ -41,7 +41,7 @@ M: predicate word-help* drop \ $predicate ;
|
|||
[ dup article-title ] { } map>assoc sort-values keys ;
|
||||
|
||||
: all-errors ( -- seq )
|
||||
all-words [ error? ] subset sort-articles ;
|
||||
all-words [ error? ] filter sort-articles ;
|
||||
|
||||
M: word article-name word-name ;
|
||||
|
||||
|
|
@ -135,7 +135,7 @@ M: word set-article-parent swap "help-parent" set-word-prop ;
|
|||
":vars - list all variables at error time" print ;
|
||||
|
||||
: :help ( -- )
|
||||
error get delegates [ error-help ] map [ ] subset
|
||||
error get delegates [ error-help ] map [ ] filter
|
||||
{
|
||||
{ [ dup empty? ] [ (:help-none) ] }
|
||||
{ [ dup length 1 = ] [ first help ] }
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ IN: help.lint
|
|||
[ help ] with-string-writer drop ;
|
||||
|
||||
: all-word-help ( words -- seq )
|
||||
[ word-help ] subset ;
|
||||
[ word-help ] filter ;
|
||||
|
||||
TUPLE: help-error topic ;
|
||||
|
||||
|
|
@ -131,7 +131,7 @@ M: help-error error.
|
|||
articles get keys "group-articles" set
|
||||
child-vocabs
|
||||
[ dup check-vocab ] { } map>assoc
|
||||
[ nip empty? not ] assoc-subset
|
||||
[ nip empty? not ] assoc-filter
|
||||
] with-scope ;
|
||||
|
||||
: typos. ( assoc -- )
|
||||
|
|
@ -150,12 +150,12 @@ M: help-error error.
|
|||
: help-lint-all ( -- ) "" help-lint ;
|
||||
|
||||
: unlinked-words ( words -- seq )
|
||||
all-word-help [ article-parent not ] subset ;
|
||||
all-word-help [ article-parent not ] filter ;
|
||||
|
||||
: linked-undocumented-words ( -- seq )
|
||||
all-words
|
||||
[ word-help not ] subset
|
||||
[ article-parent ] subset
|
||||
[ "predicating" word-prop not ] subset ;
|
||||
[ word-help not ] filter
|
||||
[ article-parent ] filter
|
||||
[ "predicating" word-prop not ] filter ;
|
||||
|
||||
MAIN: help-lint
|
||||
|
|
|
|||
|
|
@ -123,8 +123,8 @@ $nl
|
|||
{ $code "\"A man, a plan, a canal: Panama.\"" }
|
||||
"Now, place a quotation containing " { $link Letter? } " on the stack; quoting code places it on the stack instead of executing it immediately:"
|
||||
{ $code "[ Letter? ]" }
|
||||
"Finally, pass the string and the quotation to the " { $link subset } " word:"
|
||||
{ $code "subset" }
|
||||
"Finally, pass the string and the quotation to the " { $link filter } " word:"
|
||||
{ $code "filter" }
|
||||
"Now the stack should contain the following string:"
|
||||
{ "\"AmanaplanacanalPanama\"" }
|
||||
"This is almost what we want; we just need to convert the string to lower case now. This can be done by calling " { $link >lower } "; the " { $snippet ">" } " prefix is a naming convention for conversion operations, and should be read as ``to'':"
|
||||
|
|
@ -132,9 +132,9 @@ $nl
|
|||
"Finally, let's print the top of the stack and discard it:"
|
||||
{ $code "." }
|
||||
"This will output " { $snippet "amanaplanacanalpanama" } ". This string is in the form that we want, and we evaluated the following code to get it into this form:"
|
||||
{ $code "[ Letter? ] subset >lower" }
|
||||
{ $code "[ Letter? ] filter >lower" }
|
||||
"This code starts with a string on the stack, removes non-alphabetical characters, and converts the result to lower case, leaving a new string on the stack. We put this code in a new word, and add the new word to " { $snippet "palindrome.factor" } ":"
|
||||
{ $code ": normalize ( str -- newstr ) [ Letter? ] subset >lower ;" }
|
||||
{ $code ": normalize ( str -- newstr ) [ Letter? ] filter >lower ;" }
|
||||
"You will need to add " { $vocab-link "unicode.categories" } " to the vocabulary search path, so that " { $link Letter? } " can be used in the source file."
|
||||
$nl
|
||||
"We modify " { $snippet "palindrome?" } " to first apply " { $snippet "normalize" } " to its input:"
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ IN: html.parser.analyzer
|
|||
(find-relative) ;
|
||||
|
||||
: (find-all) ( n seq quot -- )
|
||||
2dup >r >r find* [
|
||||
2dup >r >r find-from [
|
||||
dupd 2array , 1+ r> r> (find-all)
|
||||
] [
|
||||
r> r> 3drop
|
||||
|
|
@ -21,7 +21,7 @@ IN: html.parser.analyzer
|
|||
[ 0 -rot (find-all) ] { } make ;
|
||||
|
||||
: (find-nth) ( offset seq quot n count -- obj )
|
||||
>r >r [ find* ] 2keep 4 npick [
|
||||
>r >r [ find-from ] 2keep 4 npick [
|
||||
r> r> 1+ 2dup <= [
|
||||
4drop
|
||||
] [
|
||||
|
|
@ -46,7 +46,7 @@ IN: html.parser.analyzer
|
|||
] [
|
||||
drop t
|
||||
] if
|
||||
] subset ;
|
||||
] filter ;
|
||||
|
||||
: trim-text ( vector -- vector' )
|
||||
[
|
||||
|
|
@ -57,14 +57,14 @@ IN: html.parser.analyzer
|
|||
] map ;
|
||||
|
||||
: find-by-id ( id vector -- vector )
|
||||
[ tag-attributes "id" swap at = ] with subset ;
|
||||
[ tag-attributes "id" swap at = ] with filter ;
|
||||
|
||||
: find-by-class ( id vector -- vector )
|
||||
[ tag-attributes "class" swap at = ] with subset ;
|
||||
[ tag-attributes "class" swap at = ] with filter ;
|
||||
|
||||
: find-by-name ( str vector -- vector )
|
||||
>r >lower r>
|
||||
[ tag-name = ] with subset ;
|
||||
[ tag-name = ] with filter ;
|
||||
|
||||
: find-first-name ( str vector -- i/f tag/f )
|
||||
>r >lower r>
|
||||
|
|
@ -76,13 +76,13 @@ IN: html.parser.analyzer
|
|||
|
||||
: find-by-attribute-key ( key vector -- vector )
|
||||
>r >lower r>
|
||||
[ tag-attributes at ] with subset
|
||||
[ ] subset ;
|
||||
[ tag-attributes at ] with filter
|
||||
[ ] filter ;
|
||||
|
||||
: find-by-attribute-key-value ( value key vector -- vector )
|
||||
>r >lower r>
|
||||
[ tag-attributes at over = ] with subset nip
|
||||
[ ] subset ;
|
||||
[ tag-attributes at over = ] with filter nip
|
||||
[ ] filter ;
|
||||
|
||||
: find-first-attribute-key-value ( value key vector -- i/f tag/f )
|
||||
>r >lower r>
|
||||
|
|
@ -109,12 +109,12 @@ IN: html.parser.analyzer
|
|||
tag-attributes [ "href" swap at ] [ f ] if* ;
|
||||
|
||||
: find-links ( vector -- vector )
|
||||
[ tag-name "a" = ] subset
|
||||
[ tag-link ] subset ;
|
||||
[ tag-name "a" = ] filter
|
||||
[ tag-link ] filter ;
|
||||
|
||||
|
||||
: find-by-text ( seq quot -- tag )
|
||||
[ dup tag-name text = ] swap compose find drop ;
|
||||
[ dup tag-name text = ] prepose find drop ;
|
||||
|
||||
: find-opening-tags-by-name ( name seq -- seq )
|
||||
[ [ tag-name = ] keep tag-closing? not and ] with find-all ;
|
||||
|
|
@ -125,11 +125,11 @@ IN: html.parser.analyzer
|
|||
: query>assoc* ( str -- hash )
|
||||
"?" split1 nip query>assoc ;
|
||||
|
||||
! clear "http://fark.com" http-get parse-html find-links [ "go.pl" swap start ] subset [ "=" split peek ] map
|
||||
! clear "http://fark.com" http-get parse-html find-links [ "go.pl" swap start ] filter [ "=" split peek ] map
|
||||
|
||||
! clear "http://www.sailwx.info/shiptrack/cruiseships.phtml" http-get parse-html remove-blank-text
|
||||
! "a" over find-opening-tags-by-name
|
||||
! [ nip "shipposition.phtml?call=GBTT" swap href-contains? ] assoc-subset
|
||||
! [ nip "shipposition.phtml?call=GBTT" swap href-contains? ] assoc-filter
|
||||
! first first 8 + over nth
|
||||
! tag-attributes "href" swap at query>assoc*
|
||||
! "lat" over at "lon" rot at
|
||||
|
|
|
|||
|
|
@ -221,7 +221,7 @@ SYMBOL: exit-continuation
|
|||
'[ exit-continuation set @ ] callcc1 exit-continuation off ;
|
||||
|
||||
: split-path ( string -- path )
|
||||
"/" split [ empty? not ] subset ;
|
||||
"/" split [ empty? not ] filter ;
|
||||
|
||||
: do-request ( request -- response )
|
||||
[
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ must-fail-with
|
|||
] unit-test
|
||||
|
||||
: run-template
|
||||
with-string-writer [ "\r\n\t" member? not ] subset
|
||||
with-string-writer [ "\r\n\t" member? not ] filter
|
||||
"?>" split1 nip ; inline
|
||||
|
||||
: test-template ( name -- template )
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ SYMBOL: tags
|
|||
<form
|
||||
"POST" =method
|
||||
[ "action" required-attr resolve-base-path =action ]
|
||||
[ tag-attrs [ drop name-tag "action" = not ] assoc-subset print-attrs ] bi
|
||||
[ tag-attrs [ drop name-tag "action" = not ] assoc-filter print-attrs ] bi
|
||||
form>
|
||||
hidden-form-field ;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
! Copyright (C) 2004, 2005 Mackenzie Straight.
|
||||
! Copyright (C) 2006, 2008 Slava Pestov.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
IN: io.buffers
|
||||
USING: alien alien.accessors alien.c-types alien.syntax kernel
|
||||
kernel.private libc math sequences byte-arrays strings hints
|
||||
accessors ;
|
||||
accessors math.order ;
|
||||
IN: io.buffers
|
||||
|
||||
TUPLE: buffer size ptr fill pos ;
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ IN: io.encodings.8-bit
|
|||
|
||||
: process-contents ( lines -- assoc )
|
||||
[ "#" split1 drop ] map
|
||||
[ empty? not ] subset
|
||||
[ empty? not ] filter
|
||||
[ "\t" split 2 head [ 2 tail-if hex> ] map ] map ;
|
||||
|
||||
: byte>ch ( assoc -- array )
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
! Copyright (C) 2005, 2008 Slava Pestov, Doug Coleman
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: math kernel io sequences io.buffers io.timeouts generic
|
||||
byte-vectors system io.streams.duplex io.encodings
|
||||
byte-vectors system io.streams.duplex io.encodings math.order
|
||||
io.backend continuations debugger classes byte-arrays namespaces
|
||||
splitting dlists assocs io.encodings.binary inspector accessors ;
|
||||
IN: io.nonblocking
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ M: f parse-sockaddr nip ;
|
|||
: parse-addrinfo-list ( addrinfo -- seq )
|
||||
[ addrinfo-next ] follow
|
||||
[ addrinfo>addrspec ] map
|
||||
[ ] subset ;
|
||||
[ ] filter ;
|
||||
|
||||
: prepare-resolve-host ( host serv passive? -- host' serv' flags )
|
||||
#! If the port is a number, we resolve for 'http' then
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: alien.c-types kernel io.nonblocking io.unix.backend
|
||||
bit-arrays sequences assocs unix math namespaces structs
|
||||
accessors ;
|
||||
accessors math.order ;
|
||||
IN: io.unix.select
|
||||
|
||||
TUPLE: select-mx < mx read-fdset write-fdset ;
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ IN: koszul
|
|||
} cond ;
|
||||
|
||||
: canonicalize
|
||||
[ nip zero? not ] assoc-subset ;
|
||||
[ nip zero? not ] assoc-filter ;
|
||||
|
||||
SYMBOL: terms
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ SYMBOL: terms
|
|||
[ natural-sort ] keep [ index ] curry map ;
|
||||
|
||||
: (inversions) ( n seq -- n )
|
||||
[ > ] with subset length ;
|
||||
[ > ] with filter length ;
|
||||
|
||||
: inversions ( seq -- n )
|
||||
0 swap [ length ] keep [
|
||||
|
|
@ -148,7 +148,7 @@ DEFER: (d)
|
|||
: nth-basis-elt ( generators n -- elt )
|
||||
over length [
|
||||
3dup bit? [ nth ] [ 2drop f ] if
|
||||
] map [ ] subset 2nip ;
|
||||
] map [ ] filter 2nip ;
|
||||
|
||||
: basis ( generators -- seq )
|
||||
natural-sort dup length 2^ [ nth-basis-elt ] with map ;
|
||||
|
|
@ -279,7 +279,7 @@ DEFER: (d)
|
|||
|
||||
: bigraded-laplacian ( u-generators z-generators quot -- seq )
|
||||
>r [ basis graded ] bi@ tensor bigraded-triples r>
|
||||
[ [ first3 ] swap compose map ] curry map ; inline
|
||||
[ [ first3 ] prepose map ] curry map ; inline
|
||||
|
||||
: bigraded-laplacian-betti ( u-generators z-generators -- seq )
|
||||
[ laplacian-betti ] bigraded-laplacian ;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ IN: lazy-lists.examples
|
|||
: naturals 0 lfrom ;
|
||||
: positives 1 lfrom ;
|
||||
: evens 0 [ 2 + ] lfrom-by ;
|
||||
: odds 1 lfrom [ 2 mod 1 = ] lsubset ;
|
||||
: odds 1 lfrom [ 2 mod 1 = ] lfilter ;
|
||||
: powers-of-2 1 [ 2 * ] lfrom-by ;
|
||||
: ones 1 [ ] lfrom-by ;
|
||||
: squares naturals [ dup * ] lmap ;
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ HELP: uncons
|
|||
{ $values { "cons" "a cons object" } { "car" "the head of the list" } { "cdr" "the tail of the list" } }
|
||||
{ $description "Put the head and tail of the list on the stack." } ;
|
||||
|
||||
{ leach lreduce lmap lmap-with ltake lsubset lappend lfrom lfrom-by lconcat lcartesian-product lcartesian-product* lcomp lcomp* lmerge lreduce lwhile luntil } related-words
|
||||
{ leach lreduce lmap lmap-with ltake lfilter lappend lfrom lfrom-by lconcat lcartesian-product lcartesian-product* lcomp lcomp* lmerge lreduce lwhile luntil } related-words
|
||||
|
||||
HELP: leach
|
||||
{ $values { "list" "a cons object" } { "quot" "a quotation with stack effect ( obj -- )" } }
|
||||
|
|
@ -104,9 +104,9 @@ HELP: ltake
|
|||
{ $values { "n" "a non negative integer" } { "list" "a cons object" } { "result" "resulting cons object" } }
|
||||
{ $description "Outputs a lazy list containing the first n items in the list. This is done a lazy manner. No evaluation of the list elements occurs initially but a " { $link <lazy-take> } " object is returned which conforms to the list protocol. Calling " { $link car } ", " { $link cdr } " or " { $link nil? } " on this will evaluate elements as required." } ;
|
||||
|
||||
HELP: lsubset
|
||||
HELP: lfilter
|
||||
{ $values { "list" "a cons object" } { "quot" "a quotation with stack effect ( -- X )" } { "result" "resulting cons object" } }
|
||||
{ $description "Perform a similar functionality to that of the " { $link subset } " word, but in a lazy manner. No evaluation of the list elements occurs initially but a " { $link <lazy-subset> } " object is returned which conforms to the list protocol. Calling " { $link car } ", " { $link cdr } " or " { $link nil? } " on this will evaluate elements as required." } ;
|
||||
{ $description "Perform a similar functionality to that of the " { $link filter } " word, but in a lazy manner. No evaluation of the list elements occurs initially but a " { $link <lazy-filter> } " object is returned which conforms to the list protocol. Calling " { $link car } ", " { $link cdr } " or " { $link nil? } " on this will evaluate elements as required." } ;
|
||||
|
||||
HELP: lwhile
|
||||
{ $values { "list" "a cons object" } { "quot" "a quotation with stack effect ( X -- bool )" } { "result" "resulting cons object" } }
|
||||
|
|
|
|||
|
|
@ -201,37 +201,37 @@ M: lazy-while cdr ( lazy-while -- cdr )
|
|||
M: lazy-while nil? ( lazy-while -- bool )
|
||||
[ car ] keep lazy-while-quot call not ;
|
||||
|
||||
TUPLE: lazy-subset cons quot ;
|
||||
TUPLE: lazy-filter cons quot ;
|
||||
|
||||
C: <lazy-subset> lazy-subset
|
||||
C: <lazy-filter> lazy-filter
|
||||
|
||||
: lsubset ( list quot -- result )
|
||||
over nil? [ 2drop nil ] [ <lazy-subset> <memoized-cons> ] if ;
|
||||
: lfilter ( list quot -- result )
|
||||
over nil? [ 2drop nil ] [ <lazy-filter> <memoized-cons> ] if ;
|
||||
|
||||
: car-subset? ( lazy-subset -- ? )
|
||||
[ lazy-subset-cons car ] keep
|
||||
lazy-subset-quot call ;
|
||||
: car-filter? ( lazy-filter -- ? )
|
||||
[ lazy-filter-cons car ] keep
|
||||
lazy-filter-quot call ;
|
||||
|
||||
: skip ( lazy-subset -- )
|
||||
[ lazy-subset-cons cdr ] keep
|
||||
set-lazy-subset-cons ;
|
||||
: skip ( lazy-filter -- )
|
||||
[ lazy-filter-cons cdr ] keep
|
||||
set-lazy-filter-cons ;
|
||||
|
||||
M: lazy-subset car ( lazy-subset -- car )
|
||||
dup car-subset? [ lazy-subset-cons ] [ dup skip ] if car ;
|
||||
M: lazy-filter car ( lazy-filter -- car )
|
||||
dup car-filter? [ lazy-filter-cons ] [ dup skip ] if car ;
|
||||
|
||||
M: lazy-subset cdr ( lazy-subset -- cdr )
|
||||
dup car-subset? [
|
||||
[ lazy-subset-cons cdr ] keep
|
||||
lazy-subset-quot lsubset
|
||||
M: lazy-filter cdr ( lazy-filter -- cdr )
|
||||
dup car-filter? [
|
||||
[ lazy-filter-cons cdr ] keep
|
||||
lazy-filter-quot lfilter
|
||||
] [
|
||||
dup skip cdr
|
||||
] if ;
|
||||
|
||||
M: lazy-subset nil? ( lazy-subset -- bool )
|
||||
dup lazy-subset-cons nil? [
|
||||
M: lazy-filter nil? ( lazy-filter -- bool )
|
||||
dup lazy-filter-cons nil? [
|
||||
drop t
|
||||
] [
|
||||
dup car-subset? [
|
||||
dup car-filter? [
|
||||
drop f
|
||||
] [
|
||||
dup skip nil?
|
||||
|
|
@ -373,7 +373,7 @@ M: lazy-concat nil? ( lazy-concat -- bool )
|
|||
[ lcartesian-product* ] dip lmap ;
|
||||
|
||||
: lcomp* ( list guards quot -- result )
|
||||
[ [ lcartesian-product* ] dip [ lsubset ] each ] dip lmap ;
|
||||
[ [ lcartesian-product* ] dip [ lfilter ] each ] dip lmap ;
|
||||
|
||||
DEFER: lmerge
|
||||
|
||||
|
|
@ -442,4 +442,4 @@ INSTANCE: lazy-from-by list
|
|||
INSTANCE: lazy-zip list
|
||||
INSTANCE: lazy-while list
|
||||
INSTANCE: lazy-until list
|
||||
INSTANCE: lazy-subset list
|
||||
INSTANCE: lazy-filter list
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ PRIVATE>
|
|||
>r >r dup r> r> 2curry annotate ;
|
||||
|
||||
: call-logging-quot ( quot word level -- quot' )
|
||||
"called" -rot [ log-message ] 3curry swap compose ;
|
||||
"called" -rot [ log-message ] 3curry prepose ;
|
||||
|
||||
: add-logging ( word level -- )
|
||||
[ call-logging-quot ] (define-logging) ;
|
||||
|
|
@ -88,7 +88,7 @@ PRIVATE>
|
|||
: input# stack-effect effect-in length ;
|
||||
|
||||
: input-logging-quot ( quot word level -- quot' )
|
||||
over input# -rot [ log-stack ] 3curry swap compose ;
|
||||
over input# -rot [ log-stack ] 3curry prepose ;
|
||||
|
||||
: add-input-logging ( word level -- )
|
||||
[ input-logging-quot ] (define-logging) ;
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ SYMBOL: log-files
|
|||
write bl write ": " write print ;
|
||||
|
||||
: write-message ( msg word-name level -- )
|
||||
rot [ empty? not ] subset {
|
||||
rot [ empty? not ] filter {
|
||||
{ [ dup empty? ] [ 3drop ] }
|
||||
{ [ dup length 1 = ] [ first -rot f (write-message) ] }
|
||||
[
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
USING: kernel math math.constants math.functions tools.test
|
||||
prettyprint ;
|
||||
USING: kernel math math.constants math.functions math.order
|
||||
tools.test prettyprint ;
|
||||
IN: math.complex.tests
|
||||
|
||||
[ 1 C{ 0 1 } rect> ] must-fail
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
USING: help.markup help.syntax kernel math
|
||||
USING: help.markup help.syntax kernel math math.order
|
||||
sequences quotations math.functions.private ;
|
||||
IN: math.functions
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
USING: kernel math math.constants math.functions math.private
|
||||
math.libm tools.test ;
|
||||
USING: kernel math math.constants math.functions math.order
|
||||
math.private math.libm tools.test ;
|
||||
IN: math.functions.tests
|
||||
|
||||
[ t ] [ 4 4 .00000001 ~ ] unit-test
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
! Copyright (C) 2004, 2007 Slava Pestov.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: math kernel math.constants math.private
|
||||
math.libm combinators ;
|
||||
math.libm combinators math.order ;
|
||||
IN: math.functions
|
||||
|
||||
<PRIVATE
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ SYMBOL: matrix
|
|||
: cols ( -- n ) 0 nth-row length ;
|
||||
|
||||
: skip ( i seq quot -- n )
|
||||
over >r find* drop r> length or ; inline
|
||||
over >r find-from drop r> length or ; inline
|
||||
|
||||
: first-col ( row# -- n )
|
||||
#! First non-zero column
|
||||
|
|
@ -69,7 +69,7 @@ SYMBOL: matrix
|
|||
: echelon ( matrix -- matrix' )
|
||||
[ 0 0 (echelon) ] with-matrix ;
|
||||
|
||||
: nonzero-rows [ [ zero? ] all? not ] subset ;
|
||||
: nonzero-rows [ [ zero? ] all? not ] filter ;
|
||||
|
||||
: null/rank ( matrix -- null rank )
|
||||
echelon dup length swap nonzero-rows length [ - ] keep ;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
USING: kernel layouts math namespaces sequences
|
||||
USING: kernel layouts math math.order namespaces sequences
|
||||
sequences.private accessors ;
|
||||
IN: math.ranges
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
USING: kernel math math.parser math.ratios math.functions
|
||||
tools.test ;
|
||||
USING: kernel math math.order math.parser math.ratios
|
||||
math.functions tools.test ;
|
||||
IN: math.ratios.tests
|
||||
|
||||
[ 1 2 ] [ 1/2 >fraction ] unit-test
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
! Copyright (C) 2005, 2007 Slava Pestov.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: arrays kernel sequences math math.functions hints
|
||||
float-arrays ;
|
||||
float-arrays math.order ;
|
||||
IN: math.vectors
|
||||
|
||||
: vneg ( u -- v ) [ neg ] map ;
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ SYMBOL: visited
|
|||
: choices ( cell -- seq )
|
||||
{ { -1 0 } { 1 0 } { 0 -1 } { 0 1 } }
|
||||
[ v+ ] with map
|
||||
[ unvisited? ] subset ;
|
||||
[ unvisited? ] filter ;
|
||||
|
||||
: random-neighbour ( cell -- newcell ) choices random ;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
! Copyright (C) 2007 Slava Pestov, Daniel Ehrenberg.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: math kernel memoize tools.test parser ;
|
||||
IN: memoize.tests
|
||||
|
||||
MEMO: fib ( m -- n )
|
||||
dup 1 <= [ drop 1 ] [ dup 1 - fib swap 2 - fib + ] if ;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
! Copyright (C) 2006, 2008 Slava Pestov.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: generic kernel math sequences arrays assocs alarms
|
||||
calendar ;
|
||||
calendar math.order ;
|
||||
IN: models
|
||||
|
||||
TUPLE: model < identity-tuple
|
||||
|
|
|
|||
|
|
@ -19,12 +19,12 @@ SYMBOL: total
|
|||
|
||||
: canonicalize-specializer-1 ( specializer -- specializer' )
|
||||
[
|
||||
[ class? ] subset
|
||||
[ class? ] filter
|
||||
[ length <reversed> [ 1+ neg ] map ] keep zip
|
||||
[ length args [ max ] change ] keep
|
||||
]
|
||||
[
|
||||
[ pair? ] subset
|
||||
[ pair? ] filter
|
||||
[ keys [ hooks get push-new ] each ] keep
|
||||
] bi append ;
|
||||
|
||||
|
|
@ -73,7 +73,7 @@ SYMBOL: total
|
|||
! Part II: Topologically sorting specializers
|
||||
: maximal-element ( seq quot -- n elt )
|
||||
dupd [
|
||||
swapd [ call 0 < ] 2curry subset empty?
|
||||
swapd [ call 0 < ] 2curry filter empty?
|
||||
] 2curry find [ "Topological sort failed" throw ] unless* ;
|
||||
inline
|
||||
|
||||
|
|
@ -111,7 +111,7 @@ SYMBOL: total
|
|||
: multi-predicate ( classes -- quot )
|
||||
dup length <reversed>
|
||||
[ picker 2array ] 2map
|
||||
[ drop object eq? not ] assoc-subset
|
||||
[ drop object eq? not ] assoc-filter
|
||||
dup empty? [ drop [ t ] ] [
|
||||
[ (multi-predicate) ] { } assoc>map
|
||||
unclip [ swap [ f ] \ if 3array append [ ] like ] reduce
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ METHOD: as-mutate { object object assoc } set-at ;
|
|||
|
||||
! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
: subset-of ( quot seq -- seq ) swap subset ;
|
||||
: filter-of ( quot seq -- seq ) swap filter ;
|
||||
|
||||
! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
|
|
|
|||
|
|
@ -43,6 +43,6 @@ reset-gl-function-number-counter
|
|||
scan drop "}" parse-tokens swap prefix
|
||||
gl-function-number
|
||||
[ gl-function-pointer ] 2curry swap
|
||||
";" parse-tokens [ "()" subseq? not ] subset
|
||||
";" parse-tokens [ "()" subseq? not ] filter
|
||||
define-indirect
|
||||
; parsing
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ verify-load-locations ] unit-test
|
|||
! SYMBOL: ssl
|
||||
!
|
||||
! : is-set ( seq -- newseq )
|
||||
! <enum> >alist [ nip ] assoc-subset >hashtable keys ;
|
||||
! <enum> >alist [ nip ] assoc-filter >hashtable keys ;
|
||||
!
|
||||
! ! 1234 server-socket sock set
|
||||
! "127.0.0.1" 1234 <inet4> SOCK_STREAM server-fd sock set
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ optimizer math ;
|
|||
[ r> 1+ count-optimization-passes ] [ drop r> ] if ;
|
||||
|
||||
: results
|
||||
[ [ second ] swap compose compare ] curry sort 20 tail*
|
||||
[ [ second ] prepose compare ] curry sort 20 tail*
|
||||
print
|
||||
standard-table-style
|
||||
[
|
||||
|
|
@ -16,7 +16,7 @@ optimizer math ;
|
|||
] tabular-output ;
|
||||
|
||||
: optimizer-report
|
||||
all-words [ compiled? ] subset
|
||||
all-words [ compiled? ] filter
|
||||
[
|
||||
dup [
|
||||
word-dataflow nip 1 count-optimization-passes
|
||||
|
|
|
|||
|
|
@ -200,7 +200,7 @@ M: just-parser parse ( input parser -- result )
|
|||
#! from the results anything where the remaining
|
||||
#! input to be parsed is not empty. So ensures a
|
||||
#! fully parsed input string.
|
||||
just-parser-p1 parse [ parse-result-unparsed empty? ] lsubset ;
|
||||
just-parser-p1 parse [ parse-result-unparsed empty? ] lfilter ;
|
||||
|
||||
TUPLE: apply-parser p1 quot ;
|
||||
|
||||
|
|
|
|||
|
|
@ -285,7 +285,7 @@ M: ebnf-optional (transform) ( ast -- parser )
|
|||
GENERIC: build-locals ( code ast -- code )
|
||||
|
||||
M: ebnf-sequence build-locals ( code ast -- code )
|
||||
elements>> dup [ ebnf-var? ] subset empty? [
|
||||
elements>> dup [ ebnf-var? ] filter empty? [
|
||||
drop
|
||||
] [
|
||||
[
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
! Copyright (C) 2007, 2008 Chris Double.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
USING: kernel sequences strings fry namespaces math assocs shuffle
|
||||
vectors arrays math.parser
|
||||
vectors arrays math.parser math.order
|
||||
unicode.categories compiler.units parser
|
||||
words quotations effects memoize accessors locals effects splitting ;
|
||||
IN: peg
|
||||
|
|
|
|||
|
|
@ -17,14 +17,14 @@ MEMO: any-char-parser ( -- parser )
|
|||
|
||||
: search ( string parser -- seq )
|
||||
any-char-parser [ drop f ] action 2array choice repeat0 parse dup [
|
||||
parse-result-ast [ ] subset
|
||||
parse-result-ast [ ] filter
|
||||
] [
|
||||
drop { }
|
||||
] if ;
|
||||
|
||||
|
||||
: (replace) ( string parser -- seq )
|
||||
any-char-parser 2array choice repeat0 parse parse-result-ast [ ] subset ;
|
||||
any-char-parser 2array choice repeat0 parse parse-result-ast [ ] filter ;
|
||||
|
||||
: replace ( string parser -- result )
|
||||
[ (replace) [ tree-write ] each ] with-string-writer ;
|
||||
|
|
|
|||
|
|
@ -62,5 +62,5 @@ io.files io.encodings.utf8 ;
|
|||
"extra/porter-stemmer/test/voc.txt" resource-lines
|
||||
[ stem ] map
|
||||
"extra/porter-stemmer/test/output.txt" resource-lines
|
||||
[ 2array ] 2map [ first2 = not ] subset
|
||||
[ 2array ] 2map [ first2 = not ] filter
|
||||
] unit-test
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ IN: project-euler.001
|
|||
! -------------------
|
||||
|
||||
: euler001a ( -- answer )
|
||||
1000 [ dup 5 mod swap 3 mod [ zero? ] either? ] subset sum ;
|
||||
1000 [ dup 5 mod swap 3 mod [ zero? ] either? ] filter sum ;
|
||||
|
||||
! [ euler001a ] 100 ave-time
|
||||
! 0 ms run / 0 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ PRIVATE>
|
|||
V{ 0 } clone 1 rot (fib-upto) ;
|
||||
|
||||
: euler002 ( -- answer )
|
||||
1000000 fib-upto [ even? ] subset sum ;
|
||||
1000000 fib-upto [ even? ] filter sum ;
|
||||
|
||||
! [ euler002 ] 100 ave-time
|
||||
! 0 ms run / 0 ms GC ave time - 100 trials
|
||||
|
|
@ -44,7 +44,7 @@ PRIVATE>
|
|||
1 head-slice* { 0 1 } prepend ;
|
||||
|
||||
: euler002a ( -- answer )
|
||||
1000000 fib-upto* [ even? ] subset sum ;
|
||||
1000000 fib-upto* [ even? ] filter sum ;
|
||||
|
||||
! [ euler002a ] 100 ave-time
|
||||
! 0 ms run / 0 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ IN: project-euler.004
|
|||
<PRIVATE
|
||||
|
||||
: source-004 ( -- seq )
|
||||
100 999 [a,b] [ 10 mod zero? not ] subset ;
|
||||
100 999 [a,b] [ 10 mod zero? not ] filter ;
|
||||
|
||||
: max-palindrome ( seq -- palindrome )
|
||||
natural-sort [ palindrome? ] find-last nip ;
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ IN: project-euler.022
|
|||
|
||||
: source-022 ( -- seq )
|
||||
"extra/project-euler/022/names.txt" resource-path
|
||||
ascii file-contents [ quotable? ] subset "," split ;
|
||||
ascii file-contents [ quotable? ] filter "," split ;
|
||||
|
||||
: name-scores ( seq -- seq )
|
||||
[ 1+ swap alpha-value * ] map-index ;
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ IN: project-euler.023
|
|||
46 [1,b] 47 20161 2 <range> append ;
|
||||
|
||||
: abundants-upto ( n -- seq )
|
||||
[1,b] [ abundant? ] subset ;
|
||||
[1,b] [ abundant? ] filter ;
|
||||
|
||||
: possible-sums ( seq -- seq )
|
||||
dup { } -rot [
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ IN: project-euler.026
|
|||
<PRIVATE
|
||||
|
||||
: source-026 ( -- seq )
|
||||
1 1000 (a,b) [ prime? ] subset [ 1 swap / ] map ;
|
||||
1 1000 (a,b) [ prime? ] filter [ 1 swap / ] map ;
|
||||
|
||||
: (mult-order) ( n a m -- k )
|
||||
3dup ^ swap mod 1 = [ 2nip ] [ 1+ (mult-order) ] if ;
|
||||
|
|
|
|||
|
|
@ -46,8 +46,8 @@ IN: project-euler.027
|
|||
<PRIVATE
|
||||
|
||||
: source-027 ( -- seq )
|
||||
1000 [ prime? ] subset [ dup [ neg ] map append ] keep
|
||||
cartesian-product [ first2 < ] subset ;
|
||||
1000 [ prime? ] filter [ dup [ neg ] map append ] keep
|
||||
cartesian-product [ first2 < ] filter ;
|
||||
|
||||
: quadratic ( b a n -- m )
|
||||
dup sq -rot * + + ;
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ IN: project-euler.030
|
|||
PRIVATE>
|
||||
|
||||
: euler030 ( -- answer )
|
||||
325537 [ dup sum-fifth-powers = ] subset sum 1- ;
|
||||
325537 [ dup sum-fifth-powers = ] filter sum 1- ;
|
||||
|
||||
! [ euler030 ] 100 ave-time
|
||||
! 2537 ms run / 125 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ IN: project-euler.032
|
|||
PRIVATE>
|
||||
|
||||
: euler032 ( -- answer )
|
||||
source-032 [ valid? ] subset products prune sum ;
|
||||
source-032 [ valid? ] filter products prune sum ;
|
||||
|
||||
! [ euler032 ] 10 ave-time
|
||||
! 23922 ms run / 1505 ms GC ave time - 10 trials
|
||||
|
|
@ -70,7 +70,7 @@ PRIVATE>
|
|||
PRIVATE>
|
||||
|
||||
: euler032a ( -- answer )
|
||||
source-032a [ mmp ] map [ pandigital? ] subset products prune sum ;
|
||||
source-032a [ mmp ] map [ pandigital? ] filter products prune sum ;
|
||||
|
||||
! [ euler032a ] 100 ave-time
|
||||
! 5978 ms run / 327 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ IN: project-euler.033
|
|||
<PRIVATE
|
||||
|
||||
: source-033 ( -- seq )
|
||||
10 99 [a,b] dup cartesian-product [ first2 < ] subset ;
|
||||
10 99 [a,b] dup cartesian-product [ first2 < ] filter ;
|
||||
|
||||
: safe? ( ax xb -- ? )
|
||||
[ 10 /mod ] bi@ -roll = rot zero? not and nip ;
|
||||
|
|
@ -42,7 +42,7 @@ IN: project-euler.033
|
|||
2dup / [ ax/xb ] dip = ;
|
||||
|
||||
: curious-fractions ( seq -- seq )
|
||||
[ first2 curious? ] subset [ first2 / ] map ;
|
||||
[ first2 curious? ] filter [ first2 / ] map ;
|
||||
|
||||
PRIVATE>
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ IN: project-euler.034
|
|||
PRIVATE>
|
||||
|
||||
: euler034 ( -- answer )
|
||||
3 2000000 [a,b] [ factorion? ] subset sum ;
|
||||
3 2000000 [a,b] [ factorion? ] filter sum ;
|
||||
|
||||
! [ euler034 ] 10 ave-time
|
||||
! 15089 ms run / 725 ms GC ave time - 10 trials
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ IN: project-euler.035
|
|||
PRIVATE>
|
||||
|
||||
: euler035 ( -- answer )
|
||||
source-035 [ possible? ] subset [ circular? ] count ;
|
||||
source-035 [ possible? ] filter [ circular? ] count ;
|
||||
|
||||
! [ euler035 ] 100 ave-time
|
||||
! 904 ms run / 86 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ IN: project-euler.036
|
|||
PRIVATE>
|
||||
|
||||
: euler036 ( -- answer )
|
||||
1 1000000 2 <range> [ both-bases? ] subset sum ;
|
||||
1 1000000 2 <range> [ both-bases? ] filter sum ;
|
||||
|
||||
! [ euler036 ] 100 ave-time
|
||||
! 3891 ms run / 173 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ IN: project-euler.037
|
|||
PRIVATE>
|
||||
|
||||
: euler037 ( -- answer )
|
||||
23 1000000 primes-between [ r-trunc? ] subset [ l-trunc? ] subset sum ;
|
||||
23 1000000 primes-between [ r-trunc? ] filter [ l-trunc? ] filter sum ;
|
||||
|
||||
! [ euler037 ] 100 ave-time
|
||||
! 768 ms run / 9 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ IN: project-euler.038
|
|||
PRIVATE>
|
||||
|
||||
: euler038 ( -- answer )
|
||||
9123 9876 [a,b] [ concat-product ] map [ pandigital? ] subset supremum ;
|
||||
9123 9876 [a,b] [ concat-product ] map [ pandigital? ] filter supremum ;
|
||||
|
||||
! [ euler038 ] 100 ave-time
|
||||
! 37 ms run / 1 ms GC ave time - 100 trials
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ IN: project-euler.042
|
|||
|
||||
: source-042 ( -- seq )
|
||||
"extra/project-euler/042/words.txt" resource-path
|
||||
ascii file-contents [ quotable? ] subset "," split ;
|
||||
ascii file-contents [ quotable? ] filter "," split ;
|
||||
|
||||
: (triangle-upto) ( limit n -- )
|
||||
2dup nth-triangle > [
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ PRIVATE>
|
|||
|
||||
: euler043 ( -- answer )
|
||||
1234567890 number>digits all-permutations
|
||||
[ interesting? ] subset [ 10 digits>integer ] map sum ;
|
||||
[ interesting? ] filter [ 10 digits>integer ] map sum ;
|
||||
|
||||
! [ euler043 ] time
|
||||
! 125196 ms run / 19548 ms GC time
|
||||
|
|
@ -70,20 +70,20 @@ PRIVATE>
|
|||
<PRIVATE
|
||||
|
||||
: candidates ( n -- seq )
|
||||
1000 over <range> [ number>digits 3 0 pad-left ] map [ all-unique? ] subset ;
|
||||
1000 over <range> [ number>digits 3 0 pad-left ] map [ all-unique? ] filter ;
|
||||
|
||||
: overlap? ( seq -- ? )
|
||||
dup first 2 tail* swap second 2 head = ;
|
||||
|
||||
: clean ( seq -- seq )
|
||||
[ unclip 1 head prefix concat ] map [ all-unique? ] subset ;
|
||||
[ unclip 1 head prefix concat ] map [ all-unique? ] filter ;
|
||||
|
||||
: add-missing-digit ( seq -- seq )
|
||||
dup natural-sort 10 diff first prefix ;
|
||||
|
||||
: interesting-pandigitals ( -- seq )
|
||||
17 candidates { 13 11 7 5 3 2 } [
|
||||
candidates swap cartesian-product [ overlap? ] subset clean
|
||||
candidates swap cartesian-product [ overlap? ] filter clean
|
||||
] each [ add-missing-digit ] map ;
|
||||
|
||||
PRIVATE>
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ PRIVATE>
|
|||
|
||||
: euler044 ( -- answer )
|
||||
2500 [1,b] [ nth-pentagonal ] map dup cartesian-product
|
||||
[ first2 sum-and-diff? ] subset [ first2 - abs ] map infimum ;
|
||||
[ first2 sum-and-diff? ] filter [ first2 - abs ] map infimum ;
|
||||
|
||||
! [ euler044 ] 10 ave-time
|
||||
! 8924 ms run / 2872 ms GC ave time - 10 trials
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ IN: project-euler.079
|
|||
dup empty? [ "Topological sort failed" throw ] [ first ] if ;
|
||||
|
||||
: remove-source ( seq elt -- seq )
|
||||
[ swap member? not ] curry subset ;
|
||||
[ swap member? not ] curry filter ;
|
||||
|
||||
: (topological-sort) ( seq -- )
|
||||
dup length 1 > [
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ IN: project-euler.150
|
|||
0 0 rot [ (partial-sum-infimum) ] each drop ; inline
|
||||
|
||||
: generate ( n quot -- seq )
|
||||
[ drop ] swap compose map ; inline
|
||||
[ drop ] prepose map ; inline
|
||||
|
||||
: map-infimum ( seq quot -- min )
|
||||
[ min ] compose 0 swap reduce ; inline
|
||||
|
|
|
|||
|
|
@ -291,7 +291,7 @@ TUPLE: regexp source parser ignore-case? ;
|
|||
|
||||
: parse-regexp ( accum end -- accum )
|
||||
lexer get dup skip-blank
|
||||
[ [ index* dup 1+ swap ] 2keep swapd subseq swap ] change-lexer-column
|
||||
[ [ index-from dup 1+ swap ] 2keep swapd subseq swap ] change-lexer-column
|
||||
lexer get dup still-parsing-line?
|
||||
[ (parse-token) parse-options ] [ drop f ] if
|
||||
<regexp> parsed ;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ IN: report.optimizer
|
|||
[ r> 1+ count-optimization-passes ] [ drop r> ] if ;
|
||||
|
||||
: results
|
||||
[ [ second ] swap compose compare ] curry sort 20 tail*
|
||||
[ [ second ] prepose compare ] curry sort 20 tail*
|
||||
print
|
||||
standard-table-style
|
||||
[
|
||||
|
|
@ -16,7 +16,7 @@ IN: report.optimizer
|
|||
] tabular-output ; inline
|
||||
|
||||
: optimizer-measurements ( -- alist )
|
||||
all-words [ compiled? ] subset
|
||||
all-words [ compiled? ] filter
|
||||
[
|
||||
dup [
|
||||
word-dataflow nip 1 count-optimization-passes
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ HELP: deep-map
|
|||
{ $values { "obj" "an object" } { "quot" "a quotation ( elt -- newelt )" } { "newobj" "the mapped object" } }
|
||||
{ $description "Execute a quotation on each nested element of an object and its children, in preorder. That is, the result of the execution of the quotation on the outer is used to map the inner elements." } ;
|
||||
|
||||
HELP: deep-subset
|
||||
HELP: deep-filter
|
||||
{ $values { "obj" "an object" } { "quot" "a quotation ( elt -- ? )" } { "seq" "a sequence" } }
|
||||
{ $description "Creates a sequence of sub-nodes in the object which satisfy the given quotation, in preorder. This includes the object itself, if it passes the quotation." } ;
|
||||
|
||||
|
|
|
|||
|
|
@ -4,11 +4,11 @@ IN: sequences.deep.tests
|
|||
|
||||
[ [ "hello" 3 4 swap ] ] [ [ { "hello" V{ 3 4 } } swap ] flatten ] unit-test
|
||||
|
||||
[ "foo" t ] [ { { "foo" } "bar" } [ string? ] deep-find* ] unit-test
|
||||
[ "foo" t ] [ { { "foo" } "bar" } [ string? ] deep-find-from ] unit-test
|
||||
|
||||
[ f f ] [ { { "foo" } "bar" } [ number? ] deep-find* ] unit-test
|
||||
[ f f ] [ { { "foo" } "bar" } [ number? ] deep-find-from ] unit-test
|
||||
|
||||
[ { { "foo" } "bar" } t ] [ { { "foo" } "bar" } [ array? ] deep-find* ] unit-test
|
||||
[ { { "foo" } "bar" } t ] [ { { "foo" } "bar" } [ array? ] deep-find-from ] unit-test
|
||||
|
||||
: change-something ( seq -- newseq )
|
||||
dup array? [ "hi" suffix ] [ "hello" append ] if ;
|
||||
|
|
|
|||
|
|
@ -18,21 +18,21 @@ IN: sequences.deep
|
|||
[ call ] keep over branch?
|
||||
[ [ deep-map ] curry map ] [ drop ] if ; inline
|
||||
|
||||
: deep-subset ( obj quot -- seq )
|
||||
: deep-filter ( obj quot -- seq )
|
||||
over >r
|
||||
pusher >r deep-each r>
|
||||
r> dup branch? [ like ] [ drop ] if ; inline
|
||||
|
||||
: deep-find* ( obj quot -- elt ? )
|
||||
: deep-find-from ( obj quot -- elt ? )
|
||||
[ call ] 2keep rot [ drop t ] [
|
||||
over branch? [
|
||||
f -rot [ >r nip r> deep-find* ] curry find drop >boolean
|
||||
f -rot [ >r nip r> deep-find-from ] curry find drop >boolean
|
||||
] [ 2drop f f ] if
|
||||
] if ; inline
|
||||
|
||||
: deep-find ( obj quot -- elt ) deep-find* drop ; inline
|
||||
: deep-find ( obj quot -- elt ) deep-find-from drop ; inline
|
||||
|
||||
: deep-contains? ( obj quot -- ? ) deep-find* nip ; inline
|
||||
: deep-contains? ( obj quot -- ? ) deep-find-from nip ; inline
|
||||
|
||||
: deep-all? ( obj quot -- ? )
|
||||
[ not ] compose deep-contains? not ; inline
|
||||
|
|
@ -43,4 +43,4 @@ IN: sequences.deep
|
|||
] curry change-each ] [ 2drop ] if ; inline
|
||||
|
||||
: flatten ( obj -- seq )
|
||||
[ branch? not ] deep-subset ;
|
||||
[ branch? not ] deep-filter ;
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
USING: combinators.lib kernel sequences math namespaces assocs
|
||||
random sequences.private shuffle math.functions mirrors
|
||||
arrays math.parser math.private sorting strings ascii macros
|
||||
assocs.lib quotations hashtables ;
|
||||
assocs.lib quotations hashtables math.order ;
|
||||
IN: sequences.lib
|
||||
|
||||
: each-withn ( seq quot n -- ) nwith each ; inline
|
||||
|
|
@ -45,7 +45,7 @@ MACRO: firstn ( n -- )
|
|||
>r
|
||||
dup length
|
||||
dup [ / ] curry
|
||||
[ 1+ ] swap compose
|
||||
[ 1+ ] prepose
|
||||
r> compose
|
||||
2each ; inline
|
||||
|
||||
|
|
@ -129,11 +129,11 @@ MACRO: firstn ( n -- )
|
|||
: take-while ( seq quot -- newseq )
|
||||
[ not ] compose
|
||||
[ find drop [ head-slice ] when* ] curry
|
||||
[ dup ] swap compose keep like ;
|
||||
[ dup ] prepose keep like ;
|
||||
|
||||
: replicate ( seq quot -- newseq )
|
||||
#! quot: ( -- obj )
|
||||
[ drop ] swap compose map ; inline
|
||||
[ drop ] prepose map ; inline
|
||||
|
||||
! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
|
|
@ -159,7 +159,7 @@ PRIVATE>
|
|||
|
||||
: switches ( seq1 seq -- subseq )
|
||||
! seq1 is a sequence of ones and zeroes
|
||||
>r [ length ] keep [ nth 1 = ] curry subset r>
|
||||
>r [ length ] keep [ nth 1 = ] curry filter r>
|
||||
[ nth ] curry { } map-as ;
|
||||
|
||||
: power-set ( seq -- subsets )
|
||||
|
|
@ -216,7 +216,7 @@ USE: continuations
|
|||
>r dup length swap r>
|
||||
[ = [ ] [ drop f ] if ] curry
|
||||
2map
|
||||
[ ] subset ;
|
||||
[ ] filter ;
|
||||
|
||||
<PRIVATE
|
||||
: (attempt-each-integer) ( i n quot -- result )
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
USING: shufflers tools.test ;
|
||||
IN: shufflers.tests
|
||||
|
||||
SHUFFLE: abcd 4
|
||||
[ ] [ 1 2 3 4 abcd- ] unit-test
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ IN: smtp.tests
|
|||
prepare
|
||||
dup headers>> >alist sort-keys [
|
||||
drop { "Date" "Message-Id" } member? not
|
||||
] assoc-subset
|
||||
] assoc-filter
|
||||
over to>>
|
||||
rot from>>
|
||||
] unit-test
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ TUPLE: board width height rows ;
|
|||
] if ;
|
||||
|
||||
: remove-full-rows ( board -- )
|
||||
dup board-rows [ row-not-full? ] subset swap set-board-rows ;
|
||||
dup board-rows [ row-not-full? ] filter swap set-board-rows ;
|
||||
|
||||
: check-rows ( board -- n )
|
||||
#! remove full rows, then add blank ones at the top, returning the number
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ M: word reset
|
|||
"--- Entering: " write swap .
|
||||
"--- Variable values:" print
|
||||
[ dup get ] H{ } map>assoc describe
|
||||
] 2curry swap compose ;
|
||||
] 2curry prepose ;
|
||||
|
||||
: watch-vars ( word vars -- )
|
||||
dupd [ (watch-vars) ] 2curry annotate ;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
! Copyright (C) 2005, 2008 Slava Pestov.
|
||||
! See http://factorcode.org/license.txt for BSD license.
|
||||
IN: tools.completion
|
||||
USING: kernel arrays sequences math namespaces strings io
|
||||
vectors words assocs combinators sorting unicode.case
|
||||
unicode.categories ;
|
||||
unicode.categories math.order ;
|
||||
IN: tools.completion
|
||||
|
||||
: (fuzzy) ( accum ch i full -- accum i ? )
|
||||
index*
|
||||
index-from
|
||||
[
|
||||
[ swap push ] 2keep 1+ t
|
||||
] [
|
||||
|
|
@ -52,7 +52,7 @@ unicode.categories ;
|
|||
: rank-completions ( results -- newresults )
|
||||
sort-keys <reversed>
|
||||
[ 0 [ first max ] reduce 3 /f ] keep
|
||||
[ first < ] with subset
|
||||
[ first < ] with filter
|
||||
[ second ] map ;
|
||||
|
||||
: complete ( full short -- score )
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue