factor: fry '[ -> $[

don't etc to dont to free up '
locals-and-roots
Doug Coleman 2016-06-25 12:04:03 -07:00
parent 8771a75f35
commit 3dba6e3607
768 changed files with 2219 additions and 2219 deletions

View File

@ -25,13 +25,13 @@ PRIVATE<
[ hacker-news-recent-ids ] dip head hacker-news-items ; [ hacker-news-recent-ids ] dip head hacker-news-items ;
: write-title ( title url -- ) : write-title ( title url -- )
'[ $[
_ presented ,, _ presented ,,
ui-running? color: black color: white ? foreground ,, ui-running? color: black color: white ? foreground ,,
] H{ } make format ; ] H{ } make format ;
: write-link ( title url -- ) : write-link ( title url -- )
'[ $[
_ presented ,, _ presented ,,
hexcolor: 888888 foreground ,, hexcolor: 888888 foreground ,,
] H{ } make format ; ] H{ } make format ;

View File

@ -78,13 +78,13 @@ PRIVATE>
PRIVATE< PRIVATE<
: write-title ( title url -- ) : write-title ( title url -- )
'[ $[
_ presented ,, _ presented ,,
color: blue foreground ,, color: blue foreground ,,
] H{ } make format ; ] H{ } make format ;
: write-link ( title url -- ) : write-link ( title url -- )
'[ $[
_ presented ,, _ presented ,,
hexcolor: 888888 foreground ,, hexcolor: 888888 foreground ,,
] H{ } make format ; ] H{ } make format ;

View File

@ -6,10 +6,10 @@ tools.profiler.sampling tools.test tools.time vocabs.hierarchy vocabs.loader ;
IN: benchmark IN: benchmark
: run-timing-benchmark ( vocab -- time ) : run-timing-benchmark ( vocab -- time )
5 swap '[ gc [ _ run ] benchmark ] replicate infimum ; 5 swap $[ gc [ _ run ] benchmark ] replicate infimum ;
: run-profile-benchmark ( vocab -- profile ) : run-profile-benchmark ( vocab -- profile )
compact-gc '[ _ run ] profile most-recent-profile-data ; compact-gc $[ _ run ] profile most-recent-profile-data ;
: find-benchmark-vocabs ( -- seq ) : find-benchmark-vocabs ( -- seq )
"benchmark" disk-child-vocab-names [ find-vocab-root ] filter ; "benchmark" disk-child-vocab-names [ find-vocab-root ] filter ;
@ -20,14 +20,14 @@ PRIVATE<
"=== %s\n" printf ; "=== %s\n" printf ;
: run-benchmark ( vocab quot: ( vocab -- res ) -- result ok? ) : run-benchmark ( vocab quot: ( vocab -- res ) -- result ok? )
over write-header '[ _ @ t ] [ over write-header $[ _ @ t ] [
f f f <test-failure> f f f f <test-failure> f
] recover ; inline ] recover ; inline
PRIVATE> PRIVATE>
: run-benchmarks ( benchmarks quot: ( vocab -- res ) -- results errors ) : run-benchmarks ( benchmarks quot: ( vocab -- res ) -- results errors )
'[ dup _ run-benchmark 3array ] map $[ dup _ run-benchmark 3array ] map
[ third ] partition [ [ 2 head ] map ] bi@ ; inline [ third ] partition [ [ 2 head ] map ] bi@ ; inline
: run-profile-benchmarks ( -- results errors ) : run-profile-benchmarks ( -- results errors )

View File

@ -50,7 +50,7 @@ TUPLE: meeting-place count mailbox ;
: print-color-table ( -- ) : print-color-table ( -- )
{ blue red yellow } dup { blue red yellow } dup
'[ _ '[ color-string print ] with each ] each ; $[ _ $[ color-string print ] with each ] each ;
: try-meet ( meeting-place creature -- ) : try-meet ( meeting-place creature -- )
over count>> 0 < [ over count>> 0 < [
@ -75,7 +75,7 @@ TUPLE: meeting-place count mailbox ;
mailbox>> mailbox-get-all mailbox>> mailbox-get-all
[ f swap mailbox>> mailbox-put ] each [ f swap mailbox>> mailbox-put ] each
] [ ] [
[ mailbox>> 2 swap '[ _ mailbox-get ] replicate creature-meeting ] [ mailbox>> 2 swap $[ _ mailbox-get ] replicate creature-meeting ]
[ run-meeting-place ] bi [ run-meeting-place ] bi
] if ; ] if ;
@ -86,7 +86,7 @@ TUPLE: meeting-place count mailbox ;
[ <meeting-place> ] [ make-creatures ] bi* [ <meeting-place> ] [ make-creatures ] bi*
{ {
[ nip nl bl [ bl ] [ color>> name>> write ] interleave nl ] [ nip nl bl [ bl ] [ color>> name>> write ] interleave nl ]
[ [ '[ _ _ try-meet ] in-thread ] with each ] [ [ $[ _ _ try-meet ] in-thread ] with each ]
[ drop run-meeting-place ] [ drop run-meeting-place ]
[ nip [ [ count>> number>string write bl ] [ self-count>> number>text write nl ] bi ] each ] [ nip [ [ count>> number>string write bl ] [ self-count>> number>text write nl ] bi ] each ]

View File

@ -11,6 +11,6 @@ IN: benchmark.dawes
120000 iota [ 255 bitand ] int-array{ } map-as ; inline 120000 iota [ 255 bitand ] int-array{ } map-as ; inline
: dawes-benchmark ( -- ) : dawes-benchmark ( -- )
200 make-int-array '[ _ count-ones ] replicate drop ; 200 make-int-array $[ _ count-ones ] replicate drop ;
MAIN: dawes-benchmark MAIN: dawes-benchmark

View File

@ -23,10 +23,10 @@ IN: benchmark.dispatch2
1 [ + ] curry , 1 [ + ] curry ,
] { } make ; ] { } make ;
: don't-flush-me ( obj -- ) drop ; : dont-flush-me ( obj -- ) drop ;
: dispatch2-benchmark ( -- ) : dispatch2-benchmark ( -- )
1000000 sequences 1000000 sequences
[ [ first don't-flush-me ] each ] curry times ; [ [ first dont-flush-me ] each ] curry times ;
MAIN: dispatch2-benchmark MAIN: dispatch2-benchmark

View File

@ -7,10 +7,10 @@ IN: benchmark.fannkuch
: count ( quot: ( -- ? ) -- n ) : count ( quot: ( -- ? ) -- n )
! Call quot until it returns false, return number of times ! Call quot until it returns false, return number of times
! it was true ! it was true
[ 0 ] dip '[ _ dip swap [ [ 1 + ] when ] keep ] loop ; inline [ 0 ] dip $[ _ dip swap [ [ 1 + ] when ] keep ] loop ; inline
: count-flips ( perm -- flip# ) : count-flips ( perm -- flip# )
'[ $[
_ dup first dup 1 = _ dup first dup 1 =
[ 2drop f ] [ head-slice reverse! drop t ] if [ 2drop f ] [ head-slice reverse! drop t ] if
] count ; inline ] count ; inline

View File

@ -53,7 +53,7 @@ TYPED: make-cumulative ( freq -- chars: byte-array floats: double-array )
seed next-fasta-random floats [ <= ] with find drop chars nth-unsafe ; inline seed next-fasta-random floats [ <= ] with find drop chars nth-unsafe ; inline
TYPED: make-random-fasta ( seed: float len: fixnum chars: byte-array floats: double-array -- seed: float ) TYPED: make-random-fasta ( seed: float len: fixnum chars: byte-array floats: double-array -- seed: float )
'[ _ _ select-random ] "" replicate-as print ; $[ _ _ select-random ] "" replicate-as print ;
: write-description ( desc id -- ) : write-description ( desc id -- )
">" write write bl print ; ">" write write bl print ;
@ -65,7 +65,7 @@ TYPED: make-random-fasta ( seed: float len: fixnum chars: byte-array floats: dou
TYPED: write-random-fasta ( seed: float n: fixnum chars: byte-array floats: double-array desc id -- seed: float ) TYPED: write-random-fasta ( seed: float n: fixnum chars: byte-array floats: double-array desc id -- seed: float )
write-description write-description
'[ _ _ make-random-fasta ] split-lines ; $[ _ _ make-random-fasta ] split-lines ;
TYPED:: make-repeat-fasta ( k: fixnum len: fixnum alu: string -- k': fixnum ) TYPED:: make-repeat-fasta ( k: fixnum len: fixnum alu: string -- k': fixnum )
alu length :> kn alu length :> kn

View File

@ -6,6 +6,6 @@ IN: benchmark.gc3
: gc3-benchmark ( -- ) : gc3-benchmark ( -- )
1000000 iota 1000000 iota
1000000 <hashtable> 1000000 <hashtable>
'[ [ number>string ] keep _ set-at ] each ; $[ [ number>string ] keep _ set-at ] each ;
MAIN: gc3-benchmark MAIN: gc3-benchmark

View File

@ -10,7 +10,7 @@ CONSTANT: test-sets $$[
] ; ] ;
: do-times ( n quot: ( set1 set2 -- set' ) -- ) : do-times ( n quot: ( set1 set2 -- set' ) -- )
'[ 2dup @ drop ] times 2drop ; inline $[ 2dup @ drop ] times 2drop ; inline
: bench-sets ( seq -- ) : bench-sets ( seq -- )
2 [ 2 [

View File

@ -9,6 +9,6 @@ IN: benchmark.interval-sets
: interval-sets-benchmark ( -- ) : interval-sets-benchmark ( -- )
10,000 [ random-32 ] replicate natural-sort 10,000 [ random-32 ] replicate natural-sort
2 <groups> <interval-set> 2 <groups> <interval-set>
3,000,000 swap '[ random-32 _ in? drop ] times ; 3,000,000 swap $[ random-32 _ in? drop ] times ;
MAIN: interval-sets-benchmark MAIN: interval-sets-benchmark

View File

@ -20,7 +20,7 @@ CONSTANT: knucleotide-in "vocab:benchmark/knucleotide/knucleotide-input.txt" ;
: handle-table ( inputs n -- ) : handle-table ( inputs n -- )
clump clump
[ histogram sort-values reverse ] [ length ] bi [ histogram sort-values reverse ] [ length ] bi
'[ $[
[ first write bl ] [ first write bl ]
[ second 100 * _ /f "%.3f" printf nl ] bi [ second 100 * _ /f "%.3f" printf nl ] bi
] each ; ] each ;

View File

@ -5,11 +5,11 @@ IN: benchmark.linked-assocs
: (linked-assocs-benchmark) ( -- ) : (linked-assocs-benchmark) ( -- )
10,000 iota <linked-hash> { 10,000 iota <linked-hash> {
[ '[ 0 swap _ set-at ] each ] [ $[ 0 swap _ set-at ] each ]
[ '[ _ at ] map-sum 0 assert= ] [ $[ _ at ] map-sum 0 assert= ]
[ '[ dup _ set-at ] each ] [ $[ dup _ set-at ] each ]
[ '[ _ at ] map-sum 49995000 assert= ] [ $[ _ at ] map-sum 49995000 assert= ]
[ '[ _ delete-at ] each ] [ $[ _ delete-at ] each ]
[ nip assoc-size 0 assert= ] [ nip assoc-size 0 assert= ]
} 2cleave ; } 2cleave ;

View File

@ -11,7 +11,7 @@ CONSTANT: sat 0.85 ;
CONSTANT: val 0.85 ; CONSTANT: val 0.85 ;
: <color-map> ( nb-cols -- map ) : <color-map> ( nb-cols -- map )
[ iota ] keep '[ [ iota ] keep $[
360 * _ 1 + / sat val 360 * _ 1 + / sat val
1 <hsva> >rgba scale-rgb 1 <hsva> >rgba scale-rgb
] map ; ] map ;

View File

@ -14,17 +14,17 @@ IN: benchmark.mandel
: c ( i j -- c ) scale center width height scale 2 / - + ; inline : c ( i j -- c ) scale center width height scale 2 / - + ; inline
: count-iterations ( z max-iterations step-quot test-quot -- #iters ) : count-iterations ( z max-iterations step-quot test-quot -- #iters )
'[ drop @ dup @ ] find-last-integer nip ; inline $[ drop @ dup @ ] find-last-integer nip ; inline
: pixel ( c -- iterations ) : pixel ( c -- iterations )
[ C{ 0.0 0.0 } max-iterations ] dip [ C{ 0.0 0.0 } max-iterations ] dip
'[ sq _ + ] [ absq 4.0 >= ] count-iterations ; inline $[ sq _ + ] [ absq 4.0 >= ] count-iterations ; inline
: color ( iterations -- color ) : color ( iterations -- color )
[ color-map [ length mod ] keep nth ] [ B{ 0 0 0 } ] if* ; inline [ color-map [ length mod ] keep nth ] [ B{ 0 0 0 } ] if* ; inline
: render ( -- ) : render ( -- )
height iota [ width iota swap '[ _ c pixel color write ] each ] each ; inline height iota [ width iota swap $[ _ c pixel color write ] each ] each ; inline
: ppm-header ( -- ) : ppm-header ( -- )
ascii encode-output ascii encode-output

View File

@ -4,7 +4,7 @@ USING: fry kernel math random random.mersenne-twister ;
IN: benchmark.mt IN: benchmark.mt
: mt-bench ( n -- ) : mt-bench ( n -- )
>fixnum 0x533d <mersenne-twister> '[ _ random-32* drop ] times ; >fixnum 0x533d <mersenne-twister> $[ _ random-32* drop ] times ;
: mt-benchmark ( -- ) 10000000 mt-bench ; : mt-benchmark ( -- ) 10000000 mt-bench ;

View File

@ -67,19 +67,19 @@ SPECIALIZED-ARRAY: body
] each-index ; inline ] each-index ; inline
: update-position ( body dt -- ) : update-position ( body dt -- )
[ dup velocity>> ] dip '[ _ _ v*n v+ ] change-location drop ; inline [ dup velocity>> ] dip $[ _ _ v*n v+ ] change-location drop ; inline
: mag ( dt body other-body -- mag d ) : mag ( dt body other-body -- mag d )
[ location>> ] bi@ v- [ norm-sq dup sqrt * / ] keep ; inline [ location>> ] bi@ v- [ norm-sq dup sqrt * / ] keep ; inline
:: update-velocity ( other-body body dt -- ) :: update-velocity ( other-body body dt -- )
dt body other-body mag dt body other-body mag
[ [ body ] 2dip '[ other-body mass>> _ * _ n*v v- ] change-velocity drop ] [ [ body ] 2dip $[ other-body mass>> _ * _ n*v v- ] change-velocity drop ]
[ [ other-body ] 2dip '[ body mass>> _ * _ n*v v+ ] change-velocity drop ] 2bi ; inline [ [ other-body ] 2dip $[ body mass>> _ * _ n*v v+ ] change-velocity drop ] 2bi ; inline
: advance ( system dt -- ) : advance ( system dt -- )
[ '[ _ update-velocity ] [ drop ] each-pair ] [ $[ _ update-velocity ] [ drop ] each-pair ]
[ '[ _ update-position ] each ] [ $[ _ update-position ] each ]
2bi ; inline 2bi ; inline
: inertia ( body -- e ) : inertia ( body -- e )
@ -95,7 +95,7 @@ SPECIALIZED-ARRAY: body
>fixnum >fixnum
<nbody-system> <nbody-system>
[ energy number>string print ] [ energy number>string print ]
[ '[ _ 0.01 advance ] times ] [ $[ _ 0.01 advance ] times ]
[ energy number>string print ] tri ; [ energy number>string print ] tri ;
: nbody-simd-benchmark ( -- ) 1000000 nbody ; : nbody-simd-benchmark ( -- ) 1000000 nbody ;

View File

@ -67,20 +67,20 @@ TUPLE: nbody-system { bodies array read-only } ;
] each-index ; inline ] each-index ; inline
: update-position ( body dt -- ) : update-position ( body dt -- )
[ dup velocity>> ] dip '[ _ _ v*n v+ ] change-location drop ; [ dup velocity>> ] dip $[ _ _ v*n v+ ] change-location drop ;
: mag ( dt body other-body -- mag d ) : mag ( dt body other-body -- mag d )
[ location>> ] bi@ v- [ norm-sq dup sqrt * / ] keep ; inline [ location>> ] bi@ v- [ norm-sq dup sqrt * / ] keep ; inline
:: update-velocity ( other-body body dt -- ) :: update-velocity ( other-body body dt -- )
dt body other-body mag dt body other-body mag
[ [ body ] 2dip '[ other-body mass>> _ * _ n*v v- ] change-velocity drop ] [ [ body ] 2dip $[ other-body mass>> _ * _ n*v v- ] change-velocity drop ]
[ [ other-body ] 2dip '[ body mass>> _ * _ n*v v+ ] change-velocity drop ] 2bi ; [ [ other-body ] 2dip $[ body mass>> _ * _ n*v v+ ] change-velocity drop ] 2bi ;
: advance ( system dt -- ) : advance ( system dt -- )
[ bodies>> ] dip [ bodies>> ] dip
[ '[ _ update-velocity ] [ drop ] each-pair ] [ $[ _ update-velocity ] [ drop ] each-pair ]
[ '[ _ update-position ] each ] [ $[ _ update-position ] each ]
2bi ; inline 2bi ; inline
: inertia ( body -- e ) : inertia ( body -- e )
@ -94,7 +94,7 @@ TUPLE: nbody-system { bodies array read-only } ;
: nbody ( n -- ) : nbody ( n -- )
<nbody-system> <nbody-system>
[ energy . ] [ '[ _ 0.01 advance ] times ] [ energy . ] tri ; [ energy . ] [ $[ _ 0.01 advance ] times ] [ energy . ] tri ;
HINTS: update-position body float ; HINTS: update-position body float ;
HINTS: update-velocity body body float ; HINTS: update-velocity body body float ;

View File

@ -5,8 +5,8 @@ combinators hints fry sequences ;
IN: benchmark.partial-sums IN: benchmark.partial-sums
! Helper words ! Helper words
: summing-integers ( n quot -- y ) [ 0.0 ] [ iota ] [ ] tri* '[ 1 + @ + ] each ; inline : summing-integers ( n quot -- y ) [ 0.0 ] [ iota ] [ ] tri* $[ 1 + @ + ] each ; inline
: summing-floats ( n quot -- y ) '[ >float @ ] summing-integers ; inline : summing-floats ( n quot -- y ) $[ >float @ ] summing-integers ; inline
: cube ( x -- y ) dup dup * * ; inline : cube ( x -- y ) dup dup * * ; inline
: -1^ ( n -- -1/1 ) 2 mod 2 * 1 - ; inline : -1^ ( n -- -1/1 ) 2 mod 2 * 1 - ; inline

View File

@ -7,7 +7,7 @@ math.vectors prettyprint sequences sequences.deep ;
IN: benchmark.pidigits IN: benchmark.pidigits
: extract ( z x -- n ) : extract ( z x -- n )
[ first2 ] dip '[ first2 [ _ * ] [ + ] bi* ] bi@ /i ; [ first2 ] dip $[ first2 [ _ * ] [ + ] bi* ] bi@ /i ;
: next ( z -- n ) : next ( z -- n )
3 extract ; 3 extract ;
@ -32,7 +32,7 @@ IN: benchmark.pidigits
10 col - number>string glue ; 10 col - number>string glue ;
: padded-total ( row col -- ) : padded-total ( row col -- )
(padded-total) '[ _ printf ] call( str n -- ) ; (padded-total) $[ _ printf ] call( str n -- ) ;
:: (pidigits) ( k z n row col -- ) :: (pidigits) ( k z n row col -- )
n 0 > [ n 0 > [

View File

@ -8,7 +8,7 @@ IN: benchmark.regexp
200 200
20,000 iota [ number>string ] map 20,000 iota [ number>string ] map
200 iota [ 1 + char: a <string> ] map 200 iota [ 1 + char: a <string> ] map
'[ $[
_ R[[ \d+]] [ matches? ] curry all? t assert= _ R[[ \d+]] [ matches? ] curry all? t assert=
_ R[[ [a]+]] [ matches? ] curry all? t assert= _ R[[ [a]+]] [ matches? ] curry all? t assert=
] times ; ] times ;

View File

@ -4,7 +4,7 @@ USING: fry kernel math random random.sfmt ;
IN: benchmark.sfmt IN: benchmark.sfmt
: sfmt-bench ( n -- ) : sfmt-bench ( n -- )
>fixnum 0x533d <sfmt-19937> '[ _ random-32* drop ] times ; >fixnum 0x533d <sfmt-19937> $[ _ random-32* drop ] times ;
: sfmt-benchmark ( -- ) 10000000 sfmt-bench ; : sfmt-benchmark ( -- ) 10000000 sfmt-bench ;

View File

@ -33,7 +33,7 @@ ERROR: incorrect-#bytes ;
M: tcp-echo handle-client* M: tcp-echo handle-client*
[ #times>> ] [ #bytes>> ] bi [ #times>> ] [ #bytes>> ] bi
'[ _ [ _ test-bytes write-read ] times ] call ; $[ _ [ _ test-bytes write-read ] times ] call ;
: server>address ( server -- port ) : server>address ( server -- port )
servers>> first addr>> port>> local-server ; servers>> first addr>> port>> local-server ;

View File

@ -10,7 +10,7 @@ IN: benchmark.udp-echo0
[ 2dup addr>> ] [ send ] bi* receive drop assert= ; [ 2dup addr>> ] [ send ] bi* receive drop assert= ;
: udp-echo ( #times #bytes -- ) : udp-echo ( #times #bytes -- )
'[ $[
_ iota [ _ >be ] map _ iota [ _ >be ] map
"127.0.0.1" 0 <inet4> <datagram> &dispose "127.0.0.1" 0 <inet4> <datagram> &dispose
"127.0.0.1" 0 <inet4> <datagram> &dispose "127.0.0.1" 0 <inet4> <datagram> &dispose

View File

@ -217,10 +217,10 @@ ERROR: 2d-expected shaped ;
! [ ! [
! [ underlying>> [ length iota ] keep zip ] ! [ underlying>> [ length iota ] keep zip ]
! [ ] bi ! [ ] bi
! ] dip '[ _ [ _ set- ] @ ] assoc-each ; inline ! ] dip $[ _ [ _ set- ] @ ] assoc-each ; inline
: shaped-map! ( .. sa quot -- sa ) : shaped-map! ( .. sa quot -- sa )
'[ _ map ] change-underlying ; inline $[ _ map ] change-underlying ; inline
: shaped-map ( .. sa quot -- sa' ) : shaped-map ( .. sa quot -- sa' )
[ [ underlying>> ] dip map ] [ [ underlying>> ] dip map ]
@ -228,7 +228,7 @@ ERROR: 2d-expected shaped ;
: pad-shapes ( sa0 sa1 -- sa0' sa1' ) : pad-shapes ( sa0 sa1 -- sa0' sa1' )
2dup [ shape>> ] bi@ 2dup [ shape>> ] bi@
2dup longer length '[ _ 1 pad-head ] bi@ 2dup longer length $[ _ 1 pad-head ] bi@
[ shaped-like ] bi-curry@ bi* ; [ shaped-like ] bi-curry@ bi* ;
: output-shape ( sa0 sa1 -- shape ) : output-shape ( sa0 sa1 -- shape )
@ -243,7 +243,7 @@ ERROR: 2d-expected shaped ;
: broadcastable? ( sa0 sa1 -- ? ) : broadcastable? ( sa0 sa1 -- ? )
pad-shapes pad-shapes
[ [ shape>> ] bi@ ] [ output-shape ] 2bi [ [ shape>> ] bi@ ] [ output-shape ] 2bi
'[ _ broadcast-shape-matches? ] both? ; $[ _ broadcast-shape-matches? ] both? ;
TUPLE: block-array shaped shape ; TUPLE: block-array shaped shape ;
@ -266,32 +266,32 @@ TUPLE: block-array shaped shape ;
: map-strict-lower ( shaped quot -- shaped ) : map-strict-lower ( shaped quot -- shaped )
[ check-2d ] dip [ check-2d ] dip
'[ first2 first2 > _ when ] map-shaped-index ; inline $[ first2 first2 > _ when ] map-shaped-index ; inline
: map-lower ( shaped quot -- shaped ) : map-lower ( shaped quot -- shaped )
[ check-2d ] dip [ check-2d ] dip
'[ first2 first2 >= _ when ] map-shaped-index ; inline $[ first2 first2 >= _ when ] map-shaped-index ; inline
: map-strict-upper ( shaped quot -- shaped ) : map-strict-upper ( shaped quot -- shaped )
[ check-2d ] dip [ check-2d ] dip
'[ first2 first2 < _ when ] map-shaped-index ; inline $[ first2 first2 < _ when ] map-shaped-index ; inline
: map-upper ( shaped quot -- shaped ) : map-upper ( shaped quot -- shaped )
[ check-2d ] dip [ check-2d ] dip
'[ first2 first2 <= _ when ] map-shaped-index ; inline $[ first2 first2 <= _ when ] map-shaped-index ; inline
: map-diagonal ( shaped quot -- shaped ) : map-diagonal ( shaped quot -- shaped )
[ check-2d ] dip [ check-2d ] dip
'[ first2 first2 = _ when ] map-shaped-index ; inline $[ first2 first2 = _ when ] map-shaped-index ; inline
: upper ( shape obj -- shaped ) : upper ( shape obj -- shaped )
[ zeros check-2d ] dip '[ drop _ ] map-upper ; [ zeros check-2d ] dip $[ drop _ ] map-upper ;
: strict-upper ( shape obj -- shaped ) : strict-upper ( shape obj -- shaped )
[ zeros check-2d ] dip '[ drop _ ] map-strict-upper ; [ zeros check-2d ] dip $[ drop _ ] map-strict-upper ;
: lower ( shape obj -- shaped ) : lower ( shape obj -- shaped )
[ zeros check-2d ] dip '[ drop _ ] map-lower ; [ zeros check-2d ] dip $[ drop _ ] map-lower ;
: strict-lower ( shape obj -- shaped ) : strict-lower ( shape obj -- shaped )
[ zeros check-2d ] dip '[ drop _ ] map-strict-lower ; [ zeros check-2d ] dip $[ drop _ ] map-strict-lower ;

View File

@ -25,7 +25,7 @@ PRIVATE<
: (set-bits) ( bit-array n -- ) : (set-bits) ( bit-array n -- )
[ [ length bits>cells ] keep ] dip swap underlying>> [ [ length bits>cells ] keep ] dip swap underlying>>
'[ [ _ _ ] dip 4 * set-alien-unsigned-4 ] each-integer ; inline $[ [ _ _ ] dip 4 * set-alien-unsigned-4 ] each-integer ; inline
: zero-end-bits ( bit-array -- bit-array ) : zero-end-bits ( bit-array -- bit-array )
! Zero bits after the end. ! Zero bits after the end.

View File

@ -60,7 +60,7 @@ M: bit-set subset?
[ intersect ] keep = ; [ intersect ] keep = ;
M: bit-set members M: bit-set members
table>> [ length iota ] keep '[ _ nth-unsafe ] filter ; table>> [ length iota ] keep $[ _ nth-unsafe ] filter ;
PRIVATE< PRIVATE<

View File

@ -182,6 +182,6 @@ M: msb0-bit-reader peek ( n bs -- bits )
:: byte-array-n>sequence ( byte-array n -- seq ) :: byte-array-n>sequence ( byte-array n -- seq )
byte-array length 8 * n / iota byte-array length 8 * n / iota
byte-array <msb0-bit-reader> '[ byte-array <msb0-bit-reader> $[
drop n _ read drop n _ read
] { } map-as ; ] { } map-as ;

View File

@ -84,7 +84,7 @@ PRIVATE<
! tradeoff to support it, and I haven't done my own, but we'll ! tradeoff to support it, and I haven't done my own, but we'll
! go with it anyway. ! go with it anyway.
: size-bloom-filter ( error-rate number-objects -- number-hashes number-bits ) : size-bloom-filter ( error-rate number-objects -- number-hashes number-bits )
[ #hashes-range identity-configuration ] 2dip '[ [ #hashes-range identity-configuration ] 2dip $[
dup _ _ bits-to-satisfy-error-rate dup _ _ bits-to-satisfy-error-rate
2array smaller-second 2array smaller-second
] reduce check-hashes first2 ; ] reduce check-hashes first2 ;
@ -127,7 +127,7 @@ PRIVATE<
: relevant-indices ( object bloom-filter -- n quot: ( elt -- n ) ) : relevant-indices ( object bloom-filter -- n quot: ( elt -- n ) )
[ double-hashcodes ] [ #hashes-and-length ] bi* [ double-hashcodes ] [ #hashes-and-length ] bi*
[ -rot ] dip '[ _ _ combine-hashcodes _ mod ] ; inline [ -rot ] dip $[ _ _ combine-hashcodes _ mod ] ; inline
PRIVATE> PRIVATE>

View File

@ -39,7 +39,7 @@ PRIVATE>
: purge-cache ( cache -- ) : purge-cache ( cache -- )
[ assoc>> ] [ max-age>> ] bi V{ } clone [ [ assoc>> ] [ max-age>> ] bi V{ } clone [
'[ $[
nip dup age>> 1 + [ >>age ] keep nip dup age>> 1 + [ >>age ] keep
_ < [ drop t ] [ _ dispose-to f ] if _ < [ drop t ] [ _ dispose-to f ] if
] assoc-filter! drop ] assoc-filter! drop

View File

@ -79,4 +79,4 @@ PRIVATE>
[ clone ] dip [ <circular-iterator> ] dip (circular-while) ; inline [ clone ] dip [ <circular-iterator> ] dip (circular-while) ; inline
: circular-loop ( ... circular quot: ( ... obj -- ... ? ) -- ... ) : circular-loop ( ... circular quot: ( ... obj -- ... ? ) -- ... )
[ clone ] dip '[ [ first @ ] [ rotate-circular ] bi ] curry loop ; inline [ clone ] dip $[ [ first @ ] [ rotate-circular ] bi ] curry loop ; inline

View File

@ -132,17 +132,17 @@ M: end-of-stream cursor-stream-ended? drop t ; inline
! !
: -each ( ... begin end quot: ( ... cursor -- ... ) -- ... ) : -each ( ... begin end quot: ( ... cursor -- ... ) -- ... )
[ '[ dup _ cursor>= ] ] [ $[ dup _ cursor>= ] ]
[ '[ _ keep inc-cursor ] ] bi* until drop ; inline [ $[ _ keep inc-cursor ] ] bi* until drop ; inline
: -find ( ... begin end quot: ( ... cursor -- ... ? ) -- ... cursor ) : -find ( ... begin end quot: ( ... cursor -- ... ? ) -- ... cursor )
'[ dup _ cursor>= [ t ] [ dup @ ] if ] [ inc-cursor ] until ; inline $[ dup _ cursor>= [ t ] [ dup @ ] if ] [ inc-cursor ] until ; inline
: -in- ( quot -- quot' ) : -in- ( quot -- quot' )
'[ cursor-value-unsafe @ ] ; inline $[ cursor-value-unsafe @ ] ; inline
: -out- ( quot -- quot' ) : -out- ( quot -- quot' )
'[ _ keep set-cursor-value-unsafe ] ; inline $[ _ keep set-cursor-value-unsafe ] ; inline
: -out ( ... begin end quot: ( ... cursor -- ... value ) -- ... ) : -out ( ... begin end quot: ( ... cursor -- ... value ) -- ... )
-out- -each ; inline -out- -each ; inline
@ -366,7 +366,7 @@ M: map-cursor set-cursor-value-unsafe to>> set-cursor-value-unsafe ; inline
M: map-cursor set-cursor-value to>> set-cursor-value ; inline M: map-cursor set-cursor-value to>> set-cursor-value ; inline
: -map- ( begin end quot to -- begin' end' quot' ) : -map- ( begin end quot to -- begin' end' quot' )
swap [ '[ _ <map-cursor> ] bi@ ] dip '[ from>> @ ] -out- ; inline swap [ $[ _ <map-cursor> ] bi@ ] dip $[ from>> @ ] -out- ; inline
: -map ( begin end quot to -- begin' end' quot' ) : -map ( begin end quot to -- begin' end' quot' )
-map- -each ; inline -map- -each ; inline
@ -424,7 +424,7 @@ M: forward-cursor new-sequence-cursor
! !
: -assoc- ( quot -- quot' ) : -assoc- ( quot -- quot' )
'[ cursor-key-value @ ] ; inline $[ cursor-key-value @ ] ; inline
: assoc- ( assoc quot -- begin end quot' ) : assoc- ( assoc quot -- begin end quot' )
all- -assoc- ; inline all- -assoc- ; inline
@ -549,7 +549,7 @@ ALIAS: -2in- -assoc- ;
! !
: -unzip- ( quot -- quot' ) : -unzip- ( quot -- quot' )
'[ [ keys>> cursor-value-unsafe ] [ values>> ] bi @ ] ; inline $[ [ keys>> cursor-value-unsafe ] [ values>> ] bi @ ] ; inline
MACRO: nzip-cursors ( n -- quot ) 1 - [ zip-cursors ] n*quot ; MACRO: nzip-cursors ( n -- quot ) 1 - [ zip-cursors ] n*quot ;
@ -573,7 +573,7 @@ MACRO: -nin- ( n -- quot )
! !
: -with- ( invariant begin end quot -- begin end quot' ) : -with- ( invariant begin end quot -- begin end quot' )
[ rot ] dip '[ [ _ ] dip @ ] ; inline [ rot ] dip $[ [ _ ] dip @ ] ; inline
: -2with- ( invariant invariant begin end quot -- begin end quot' ) : -2with- ( invariant invariant begin end quot -- begin end quot' )
-with- -with- ; inline -with- -with- ; inline

View File

@ -33,13 +33,13 @@ ERROR: empty-deque ;
push-front* drop ; inline push-front* drop ; inline
: push-all-front ( seq deque -- ) : push-all-front ( seq deque -- )
'[ _ push-front ] each ; $[ _ push-front ] each ;
: push-back ( obj deque -- ) : push-back ( obj deque -- )
push-back* drop ; inline push-back* drop ; inline
: push-all-back ( seq deque -- ) : push-all-back ( seq deque -- )
'[ _ push-back ] each ; $[ _ push-back ] each ;
: pop-front ( deque -- obj ) : pop-front ( deque -- obj )
[ peek-front ] [ pop-front* ] bi ; inline [ peek-front ] [ pop-front* ] bi ; inline
@ -48,8 +48,8 @@ ERROR: empty-deque ;
[ peek-back ] [ pop-back* ] bi ; inline [ peek-back ] [ pop-back* ] bi ; inline
: slurp-deque ( ... deque quot: ( ... obj -- ... ) -- ... ) : slurp-deque ( ... deque quot: ( ... obj -- ... ) -- ... )
[ drop '[ _ deque-empty? ] ] [ drop $[ _ deque-empty? ] ]
[ '[ _ pop-back @ ] ] [ $[ _ pop-back @ ] ]
2bi until ; inline 2bi until ; inline
MIXIN: deque MIXIN: deque

View File

@ -12,7 +12,7 @@ TUPLE: disjoint-set
PRIVATE< PRIVATE<
: add-count ( p a disjoint-set -- ) : add-count ( p a disjoint-set -- )
counts>> [ at '[ _ + ] ] [ swap change-at ] bi ; inline counts>> [ at $[ _ + ] ] [ swap change-at ] bi ; inline
: set-parent ( p a disjoint-set -- ) : set-parent ( p a disjoint-set -- )
parents>> set-at ; inline parents>> set-at ; inline
@ -38,10 +38,10 @@ M:: disjoint-set representative ( a disjoint-set -- p )
PRIVATE< PRIVATE<
: representatives ( a b disjoint-set -- r r ) : representatives ( a b disjoint-set -- r r )
'[ _ representative ] bi@ ; inline $[ _ representative ] bi@ ; inline
: ranks ( a b disjoint-set -- r r ) : ranks ( a b disjoint-set -- r r )
'[ _ ranks>> at ] bi@ ; inline $[ _ ranks>> at ] bi@ ; inline
:: branch ( a b neg zero pos -- ) :: branch ( a b neg zero pos -- )
a b = zero [ a b < neg pos if ] if ; inline a b = zero [ a b < neg pos if ] if ; inline
@ -59,7 +59,7 @@ M: disjoint-set add-atom
[ [ 1 ] 2dip counts>> set-at ] [ [ 1 ] 2dip counts>> set-at ]
2tri ; 2tri ;
: add-atoms ( seq disjoint-set -- ) '[ _ add-atom ] each ; : add-atoms ( seq disjoint-set -- ) $[ _ add-atom ] each ;
GENERIC: disjoint-set-member? ( a disjoint-set -- ? ) ; GENERIC: disjoint-set-member? ( a disjoint-set -- ? ) ;
@ -89,7 +89,7 @@ M:: disjoint-set equate ( a b disjoint-set -- )
] if ; ] if ;
: equate-all-with ( seq a disjoint-set -- ) : equate-all-with ( seq a disjoint-set -- )
'[ _ _ equate ] each ; $[ _ _ equate ] each ;
: equate-all ( seq disjoint-set -- ) : equate-all ( seq disjoint-set -- )
over empty? [ 2drop ] [ over empty? [ 2drop ] [
@ -102,6 +102,6 @@ M: disjoint-set clone
: assoc>disjoint-set ( assoc -- disjoint-set ) : assoc>disjoint-set ( assoc -- disjoint-set )
<disjoint-set> [ <disjoint-set> [
[ '[ drop _ add-atom ] assoc-each ] [ $[ drop _ add-atom ] assoc-each ]
[ '[ _ equate ] assoc-each ] 2bi [ $[ _ equate ] assoc-each ] 2bi
] keep ; ] keep ;

View File

@ -84,7 +84,7 @@ M: dlist equal?
dlist-find-node [ prev>> ] [ f ] if* ; inline dlist-find-node [ prev>> ] [ f ] if* ; inline
: dlist-each-node ( ... dlist quot: ( ... node -- ... ) -- ... ) : dlist-each-node ( ... dlist quot: ( ... node -- ... ) -- ... )
'[ @ f ] dlist-find-node drop ; inline $[ @ f ] dlist-find-node drop ; inline
: unlink-node ( dlist-node -- ) : unlink-node ( dlist-node -- )
[ prev>> ] [ next>> ] bi [ prev>> ] [ next>> ] bi
@ -131,7 +131,7 @@ M: dlist pop-back*
] change-back normalize-front ; ] change-back normalize-front ;
: dlist-find ( ... dlist quot: ( ... value -- ... ? ) -- ... obj/f ? ) : dlist-find ( ... dlist quot: ( ... value -- ... ? ) -- ... obj/f ? )
'[ obj>> @ ] dlist-find-node [ obj>> t ] [ f f ] if* ; inline $[ obj>> @ ] dlist-find-node [ obj>> t ] [ f f ] if* ; inline
: dlist-any? ( ... dlist quot: ( ... value -- ... ? ) -- ... ? ) : dlist-any? ( ... dlist quot: ( ... value -- ... ? ) -- ... ? )
dlist-find nip ; inline dlist-find nip ; inline
@ -160,7 +160,7 @@ M: dlist delete-node
] if* ; inline ] if* ; inline
: delete-node-if ( ... dlist quot: ( ... value -- ... ? ) -- ... obj/f ) : delete-node-if ( ... dlist quot: ( ... value -- ... ? ) -- ... obj/f )
'[ obj>> @ ] delete-node-if* drop ; inline $[ obj>> @ ] delete-node-if* drop ; inline
M: dlist clear-deque M: dlist clear-deque
f >>front f >>back drop ; f >>front f >>back drop ;
@ -171,23 +171,23 @@ M: dlist clear-deque
] dlist-find-node drop ; flushable ] dlist-find-node drop ; flushable
: dlist-each ( ... dlist quot: ( ... value -- ... ) -- ... ) : dlist-each ( ... dlist quot: ( ... value -- ... ) -- ... )
'[ obj>> @ ] dlist-each-node ; inline $[ obj>> @ ] dlist-each-node ; inline
: dlist>sequence ( dlist -- seq ) : dlist>sequence ( dlist -- seq )
[ ] collector [ dlist-each ] dip ; [ ] collector [ dlist-each ] dip ;
: >dlist ( seq -- dlist ) : >dlist ( seq -- dlist )
<dlist> [ '[ _ push-back ] each ] keep ; <dlist> [ $[ _ push-back ] each ] keep ;
: 1dlist ( obj -- dlist ) <dlist> [ push-front ] keep ; : 1dlist ( obj -- dlist ) <dlist> [ push-front ] keep ;
: dlist-filter ( ... dlist quot: ( ... value -- ... ? ) -- ... dlist' ) : dlist-filter ( ... dlist quot: ( ... value -- ... ? ) -- ... dlist' )
<dlist> [ <dlist> [
'[ _ keep swap [ _ push-back ] [ drop ] if ] dlist-each $[ _ keep swap [ _ push-back ] [ drop ] if ] dlist-each
] keep ; inline ] keep ; inline
M: dlist clone M: dlist clone
<dlist> [ '[ _ push-back ] dlist-each ] keep ; <dlist> [ $[ _ push-back ] dlist-each ] keep ;
PRIVATE< PRIVATE<

View File

@ -29,7 +29,7 @@ IN: documents.tests
[ [
{ 1 10 } { 2 11 } { 1 10 } { 2 11 }
t f t f
'[ [ _ _ ] dip 3array , ] each-line $[ [ _ _ ] dip 3array , ] each-line
] { } make ] { } make
] unit-test ] unit-test

View File

@ -152,7 +152,7 @@ PRIVATE>
] unless ; ] unless ;
: change-doc-range ( from to document quot -- ) : change-doc-range ( from to document quot -- )
'[ doc-range @ ] 3keep set-doc-range ; inline $[ doc-range @ ] 3keep set-doc-range ; inline
: remove-doc-range ( from to document -- ) : remove-doc-range ( from to document -- )
[ "" ] 3dip set-doc-range ; [ "" ] 3dip set-doc-range ;
@ -189,7 +189,7 @@ PRIVATE>
PRIVATE< PRIVATE<
: undo/redo-edit ( edit document string-quot to-quot -- ) : undo/redo-edit ( edit document string-quot to-quot -- )
'[ [ _ [ from>> ] _ tri ] dip set-doc-range ] with-undo ; inline $[ [ _ [ from>> ] _ tri ] dip set-doc-range ] with-undo ; inline
: undo-edit ( edit document -- ) : undo-edit ( edit document -- )
[ old-string>> ] [ new-to>> ] undo/redo-edit ; [ old-string>> ] [ new-to>> ] undo/redo-edit ;
@ -199,7 +199,7 @@ PRIVATE<
: undo/redo ( document source-quot dest-quot do-quot -- ) : undo/redo ( document source-quot dest-quot do-quot -- )
[ dupd call [ drop ] ] 2dip [ dupd call [ drop ] ] 2dip
'[ pop swap [ @ push ] _ 2bi ] if-empty ; inline $[ pop swap [ @ push ] _ 2bi ] if-empty ; inline
PRIVATE> PRIVATE>

View File

@ -59,7 +59,7 @@ PRIVATE<
2dup ?nth blank? ; 2dup ?nth blank? ;
: break-detector ( ? -- quot ) : break-detector ( ? -- quot )
'[ blank? _ xor ] ; inline $[ blank? _ xor ] ; inline
: prev-word ( col str ? -- col ) : prev-word ( col str ? -- col )
break-detector find-last-from drop ?1+ ; break-detector find-last-from drop ?1+ ;

View File

@ -11,7 +11,7 @@ TUPLE: grid-mesh dim buffer row-length ;
PRIVATE< PRIVATE<
: vertex-array-row ( range z0 z1 -- vertices ) : vertex-array-row ( range z0 z1 -- vertices )
'[ _ _ [ 0.0 swap 1.0 float-4-boa ] bi-curry@ bi ] $[ _ _ [ 0.0 swap 1.0 float-4-boa ] bi-curry@ bi ]
data-map( object -- float-4[2] ) ; inline data-map( object -- float-4[2] ) ; inline
: vertex-array ( dim -- vertices ) : vertex-array ( dim -- vertices )

View File

@ -20,7 +20,7 @@ IN: grouping.extras
MACRO: nclump-map-as ( seq quot exemplar n -- result ) MACRO: nclump-map-as ( seq quot exemplar n -- result )
[ nip [1,b) [ [ short tail-slice ] curry ] map swap ] 2keep [ nip [1,b) [ [ short tail-slice ] curry ] map swap ] 2keep
'[ _ dup _ cleave _ _ _ nmap-as ] ; $[ _ dup _ cleave _ _ _ nmap-as ] ;
: nclump-map ( seq quot n -- result ) : nclump-map ( seq quot n -- result )
{ } swap nclump-map-as ; inline { } swap nclump-map-as ; inline
@ -61,4 +61,4 @@ PRIVATE<
PRIVATE> PRIVATE>
: group-by ( seq quot: ( elt -- key ) -- groups ) : group-by ( seq quot: ( elt -- key ) -- groups )
'[ dup _ call( x -- y ) (group-by) ] V{ } clone swap reduce ; $[ dup _ call( x -- y ) (group-by) ] V{ } clone swap reduce ;

View File

@ -80,7 +80,7 @@ M: max-heap heap-compare
drop { entry entry } declare [ key>> ] bi@ before? ; inline drop { entry entry } declare [ key>> ] bi@ before? ; inline
: data-compare ( m n heap -- ? ) : data-compare ( m n heap -- ? )
[ '[ _ data-nth ] bi@ ] [ heap-compare ] bi ; inline [ $[ _ data-nth ] bi@ ] [ heap-compare ] bi ; inline
PRIVATE> PRIVATE>
@ -116,7 +116,7 @@ M: heap heap-push*
heap-push* drop ; heap-push* drop ;
: heap-push-all ( assoc heap -- ) : heap-push-all ( assoc heap -- )
'[ swap _ heap-push ] assoc-each ; $[ swap _ heap-push ] assoc-each ;
PRIVATE< PRIVATE<
@ -149,8 +149,8 @@ M: heap heap-pop
: slurp-heap ( ... heap quot: ( ... value key -- ... ) -- ... ) : slurp-heap ( ... heap quot: ( ... value key -- ... ) -- ... )
[ check-heap ] dip [ check-heap ] dip
[ drop '[ _ heap-empty? ] ] [ drop $[ _ heap-empty? ] ]
[ '[ _ heap-pop @ ] until ] 2bi ; inline [ $[ _ heap-pop @ ] until ] 2bi ; inline
: heap-pop-all ( heap -- alist ) : heap-pop-all ( heap -- alist )
[ heap-size <vector> ] keep [ heap-size <vector> ] keep

View File

@ -107,5 +107,5 @@ PRIVATE>
: <interval-and> ( set1 set2 -- set ) : <interval-and> ( set1 set2 -- set )
2dup interval-max 2dup interval-max
[ '[ _ <interval-not> ] bi@ <interval-or> ] keep [ $[ _ <interval-not> ] bi@ <interval-or> ] keep
<interval-not> ; <interval-not> ;

View File

@ -24,8 +24,8 @@ COMPILE<
1 - 8 * 0 swap 8 <range> ; inline 1 - 8 * 0 swap 8 <range> ; inline
: reassemble-bytes ( range -- quot ) : reassemble-bytes ( range -- quot )
[ [ [ ] ] [ '[ _ shift ] ] if-zero ] map [ [ [ ] ] [ $[ _ shift ] ] if-zero ] map
'[ [ _ spread ] [ bitor ] reduce-outputs ] ; inline $[ [ _ spread ] [ bitor ] reduce-outputs ] ; inline
MACRO: reassemble-be ( n -- quot ) be-range reassemble-bytes ; MACRO: reassemble-be ( n -- quot ) be-range reassemble-bytes ;

View File

@ -16,7 +16,7 @@ PRIVATE<
char: > over index [ 1 + head-slice ] when* >string ; char: > over index [ 1 + head-slice ] when* >string ;
: prolog-encoding ( string -- iana-encoding ) : prolog-encoding ( string -- iana-encoding )
'[ $[
_ "encoding=" over start _ "encoding=" over start
10 + swap [ [ 1 - ] dip nth ] [ index-from ] [ swapd subseq ] 2tri 10 + swap [ [ 1 - ] dip nth ] [ index-from ] [ swapd subseq ] 2tri
] [ drop "UTF-8" ] recover ; ] [ drop "UTF-8" ] recover ;

View File

@ -9,7 +9,7 @@ IN: io.random
PRIVATE< PRIVATE<
: each-numbered-line ( ... quot: ( ... line number -- ... ) -- ... ) : each-numbered-line ( ... quot: ( ... line number -- ... ) -- ... )
[ 1 ] dip '[ swap [ @ ] [ 1 + ] bi ] each-line drop ; inline [ 1 ] dip $[ swap [ @ ] [ 1 + ] bi ] each-line drop ; inline
PRIVATE> PRIVATE>

View File

@ -57,7 +57,7 @@ intensities |[ r i |
[ 255 * round >integer ] tri@ 3array ; [ 255 * round >integer ] tri@ 3array ;
: color>256color ( color -- 256color ) : color>256color ( color -- 256color )
color>rgb '[ _ distance ] color>rgb $[ _ distance ]
256colors [ keys swap infimum-by ] [ at ] bi ; 256colors [ keys swap infimum-by ] [ at ] bi ;
: color>foreground ( color -- string ) : color>foreground ( color -- string )

View File

@ -37,7 +37,7 @@ CONSTANT: colors H{
[ 255 * round >integer ] tri@ 3array ; [ 255 * round >integer ] tri@ 3array ;
: color>ansi ( color -- ansi bold? ) : color>ansi ( color -- ansi bold? )
color>rgb '[ _ distance ] color>rgb $[ _ distance ]
colors [ keys swap infimum-by ] [ at ] bi colors [ keys swap infimum-by ] [ at ] bi
dup 8 >= [ 8 - t ] [ f ] if ; dup 8 >= [ 8 - t ] [ f ] if ;

View File

@ -20,7 +20,7 @@ M: linked-assoc at*
PRIVATE< PRIVATE<
: (delete-at) ( key assoc dlist -- ) : (delete-at) ( key assoc dlist -- )
'[ at [ _ delete-node ] when* ] [ delete-at ] 2bi ; inline $[ at [ _ delete-node ] when* ] [ delete-at ] 2bi ; inline
PRIVATE> PRIVATE>
@ -36,7 +36,7 @@ PRIVATE>
M: linked-assoc set-at M: linked-assoc set-at
[ assoc>> ] [ dlist>> ] bi [ assoc>> ] [ dlist>> ] bi
'[ _ 2over key? [ 3dup (delete-at) ] when nip add-to-dlist ] $[ _ 2over key? [ 3dup (delete-at) ] when nip add-to-dlist ]
[ set-at ] 2bi ; [ set-at ] 2bi ;
M: linked-assoc >alist M: linked-assoc >alist

View File

@ -17,7 +17,7 @@ M: linked-set clear-set
PRIVATE< PRIVATE<
: (delete-at) ( key assoc dlist -- ) : (delete-at) ( key assoc dlist -- )
'[ at [ _ delete-node ] when* ] [ delete-at ] 2bi ; inline $[ at [ _ delete-node ] when* ] [ delete-at ] 2bi ; inline
PRIVATE> PRIVATE>
@ -28,7 +28,7 @@ M: linked-set cardinality assoc>> assoc-size ;
M: linked-set adjoin M: linked-set adjoin
[ assoc>> ] [ dlist>> ] bi [ assoc>> ] [ dlist>> ] bi
'[ _ 2over key? [ 3dup (delete-at) ] when nip push-back* ] $[ _ 2over key? [ 3dup (delete-at) ] when nip push-back* ]
[ set-at ] 2bi ; [ set-at ] 2bi ;
M: linked-set members M: linked-set members

View File

@ -10,7 +10,7 @@ TUPLE: pool
: <pool> ( size class -- pool ) : <pool> ( size class -- pool )
[ nip new ] [ nip new ]
[ '[ _ new ] V{ } replicate-as ] 2bi [ $[ _ new ] V{ } replicate-as ] 2bi
pool boa ; pool boa ;
: pool-size ( pool -- size ) : pool-size ( pool -- size )
@ -50,4 +50,4 @@ PRIVATE>
dup class-of class-pool pool-free ; dup class-of class-pool pool-free ;
SYNTAX: \ POOL: SYNTAX: \ POOL:
scan-word scan-word ";" expect '[ _ swap <pool> ] [ swap set-class-pool ] bi ; scan-word scan-word ";" expect $[ _ swap <pool> ] [ swap set-class-pool ] bi ;

View File

@ -86,7 +86,7 @@ COMPILE>
: random-assocs ( n -- hash phash ) : random-assocs ( n -- hash phash )
[ random-string ] replicate [ random-string ] replicate
[ H{ } clone [ '[ swap _ set-at ] each-index ] keep ] [ H{ } clone [ $[ swap _ set-at ] each-index ] keep ]
[ PH{ } clone swap |[ ph elt i | i elt ph new-at ] each-index ] [ PH{ } clone swap |[ ph elt i | i elt ph new-at ] each-index ]
bi ; bi ;

View File

@ -8,7 +8,7 @@ persistent.hashtables.nodes.leaf ;
IN: persistent.hashtables.nodes.collision IN: persistent.hashtables.nodes.collision
: find-index ( key hashcode collision-node -- n leaf-node ) : find-index ( key hashcode collision-node -- n leaf-node )
leaves>> -rot '[ [ _ _ ] dip matching-key? ] find ; inline leaves>> -rot $[ [ _ _ ] dip matching-key? ] find ; inline
M:: collision-node (entry-at) ( key hashcode collision-node -- leaf-node ) M:: collision-node (entry-at) ( key hashcode collision-node -- leaf-node )
key hashcode collision-node find-index nip ; key hashcode collision-node find-index nip ;

View File

@ -195,4 +195,4 @@ M: quadtree clear-assoc ( assoc -- )
: swizzle ( sequence quot -- sequence' ) : swizzle ( sequence quot -- sequence' )
[ dup ] dip map [ dup ] dip map
[ zip ] [ rect-containing <quadtree> ] bi [ zip ] [ rect-containing <quadtree> ] bi
[ '[ first2 _ set-at ] each ] [ values ] bi ; inline [ $[ first2 _ set-at ] each ] [ values ] bi ; inline

View File

@ -9,10 +9,10 @@ PRIVATE<
dup length [1,b] [ head ] with map ; dup length [1,b] [ head ] with map ;
: (abbrev) ( seq -- assoc ) : (abbrev) ( seq -- assoc )
[ prefixes ] keep 1array '[ _ ] H{ } map>assoc ; [ prefixes ] keep 1array $[ _ ] H{ } map>assoc ;
: assoc-merge ( assoc1 assoc2 -- assoc3 ) : assoc-merge ( assoc1 assoc2 -- assoc3 )
[ '[ over _ at [ append ] when* ] assoc-map ] keep swap assoc-union ; [ $[ over _ at [ append ] when* ] assoc-map ] keep swap assoc-union ;
PRIVATE> PRIVATE>

View File

@ -93,8 +93,8 @@ M: cord vandn [ vandn ] [ call-next-method ] cord-2map ;
M: cord vor [ vor ] [ call-next-method ] cord-2map ; inline M: cord vor [ vor ] [ call-next-method ] cord-2map ; inline
M: cord vxor [ vxor ] [ call-next-method ] cord-2map ; inline M: cord vxor [ vxor ] [ call-next-method ] cord-2map ; inline
M: cord vnot [ vnot ] cord-map ; inline M: cord vnot [ vnot ] cord-map ; inline
M: cord vlshift '[ _ vlshift ] cord-map ; inline M: cord vlshift $[ _ vlshift ] cord-map ; inline
M: cord vrshift '[ _ vrshift ] cord-map ; inline M: cord vrshift $[ _ vrshift ] cord-map ; inline
M: cord (vmerge-head) [ head>> ] bi@ (vmerge) cord-append ; inline M: cord (vmerge-head) [ head>> ] bi@ (vmerge) cord-append ; inline
M: cord (vmerge-tail) [ tail>> ] bi@ (vmerge) cord-append ; inline M: cord (vmerge-tail) [ tail>> ] bi@ (vmerge) cord-append ; inline
M: cord v<= [ v<= ] [ call-next-method ] cord-2map ; inline M: cord v<= [ v<= ] [ call-next-method ] cord-2map ; inline
@ -114,10 +114,10 @@ M: cord n+v [ n+v ] with cord-map ; inline
M: cord n-v [ n-v ] with cord-map ; inline M: cord n-v [ n-v ] with cord-map ; inline
M: cord n*v [ n*v ] with cord-map ; inline M: cord n*v [ n*v ] with cord-map ; inline
M: cord n/v [ n/v ] with cord-map ; inline M: cord n/v [ n/v ] with cord-map ; inline
M: cord v+n '[ _ v+n ] cord-map ; inline M: cord v+n $[ _ v+n ] cord-map ; inline
M: cord v-n '[ _ v-n ] cord-map ; inline M: cord v-n $[ _ v-n ] cord-map ; inline
M: cord v*n '[ _ v*n ] cord-map ; inline M: cord v*n $[ _ v*n ] cord-map ; inline
M: cord v/n '[ _ v/n ] cord-map ; inline M: cord v/n $[ _ v/n ] cord-map ; inline
M: cord norm-sq [ norm-sq ] cord-both + ; inline M: cord norm-sq [ norm-sq ] cord-both + ; inline
M: cord distance v- norm ; inline M: cord distance v- norm ; inline

View File

@ -14,14 +14,14 @@ M: object branch? drop f ;
: deep-each ( ... obj quot: ( ... elt -- ... ) -- ... ) : deep-each ( ... obj quot: ( ... elt -- ... ) -- ... )
[ call ] 2keep over branch? [ call ] 2keep over branch?
[ '[ _ deep-each ] each ] [ 2drop ] if ; inline recursive [ $[ _ deep-each ] each ] [ 2drop ] if ; inline recursive
: deep-reduce ( ... obj identity quot: ( ... prev elt -- ... next ) -- ... result ) : deep-reduce ( ... obj identity quot: ( ... prev elt -- ... next ) -- ... result )
swapd deep-each ; inline swapd deep-each ; inline
: deep-map ( ... obj quot: ( ... elt -- ... elt' ) -- ... newobj ) : deep-map ( ... obj quot: ( ... elt -- ... elt' ) -- ... newobj )
[ call ] keep over branch? [ call ] keep over branch?
[ '[ _ deep-map ] map ] [ drop ] if ; inline recursive [ $[ _ deep-map ] map ] [ drop ] if ; inline recursive
: deep-filter-as ( ... obj quot: ( ... elt -- ... ? ) exemplar -- ... seq ) : deep-filter-as ( ... obj quot: ( ... elt -- ... ? ) exemplar -- ... seq )
[ selector [ deep-each ] dip ] dip [ like ] when* ; inline recursive [ selector [ deep-each ] dip ] dip [ like ] when* ; inline recursive
@ -32,7 +32,7 @@ M: object branch? drop f ;
: (deep-find) ( ... obj quot: ( ... elt -- ... ? ) -- ... elt ? ) : (deep-find) ( ... obj quot: ( ... elt -- ... ? ) -- ... elt ? )
[ call ] 2keep rot [ drop t ] [ [ call ] 2keep rot [ drop t ] [
over branch? [ over branch? [
[ f ] 2dip '[ nip _ (deep-find) ] any? [ f ] 2dip $[ nip _ (deep-find) ] any?
] [ 2drop f f ] if ] [ 2drop f f ] if
] if ; inline recursive ] if ; inline recursive
@ -41,21 +41,21 @@ M: object branch? drop f ;
: deep-any? ( ... obj quot: ( ... elt -- ... ? ) -- ... ? ) (deep-find) nip ; inline : deep-any? ( ... obj quot: ( ... elt -- ... ? ) -- ... ? ) (deep-find) nip ; inline
: deep-all? ( ... obj quot: ( ... elt -- ... ? ) -- ... ? ) : deep-all? ( ... obj quot: ( ... elt -- ... ? ) -- ... ? )
'[ @ not ] deep-any? not ; inline $[ @ not ] deep-any? not ; inline
: deep-member? ( obj seq -- ? ) : deep-member? ( obj seq -- ? )
swap '[ swap $[
_ swap dup branch? [ member? ] [ 2drop f ] if _ swap dup branch? [ member? ] [ 2drop f ] if
] deep-find >boolean ; ] deep-find >boolean ;
: deep-subseq? ( subseq seq -- ? ) : deep-subseq? ( subseq seq -- ? )
swap '[ swap $[
_ swap dup branch? [ subseq? ] [ 2drop f ] if _ swap dup branch? [ subseq? ] [ 2drop f ] if
] deep-find >boolean ; ] deep-find >boolean ;
: deep-map! ( ... obj quot: ( ... elt -- ... elt' ) -- ... obj ) : deep-map! ( ... obj quot: ( ... elt -- ... elt' ) -- ... obj )
over branch? [ over branch? [
'[ _ [ call ] keep over [ deep-map! drop ] dip ] map! $[ _ [ call ] keep over [ deep-map! drop ] dip ] map!
] [ drop ] if ; inline recursive ] [ drop ] if ; inline recursive
: flatten ( obj -- seq ) : flatten ( obj -- seq )

View File

@ -36,7 +36,7 @@ IN: sequences.extras
[ 0 len clamp ] bi@ dupd max seq subseq ; [ 0 len clamp ] bi@ dupd max seq subseq ;
: safe-subseq ( from to seq -- subseq ) : safe-subseq ( from to seq -- subseq )
[ length '[ 0 _ clamp ] bi@ ] keep subseq ; [ length $[ 0 _ clamp ] bi@ ] keep subseq ;
: all-subseqs ( seq -- seqs ) : all-subseqs ( seq -- seqs )
dup length [1,b] [ clump ] with map concat ; dup length [1,b] [ clump ] with map concat ;
@ -52,11 +52,11 @@ IN: sequences.extras
] each ; inline ] each ; inline
: map-like ( seq exemplar -- seq' ) : map-like ( seq exemplar -- seq' )
'[ _ like ] map ; inline $[ _ like ] map ; inline
: filter-all-subseqs-range ( ... seq range quot: ( ... x -- ... ) -- seq ) : filter-all-subseqs-range ( ... seq range quot: ( ... x -- ... ) -- seq )
[ [
'[ <clumps> _ filter ] with map concat $[ <clumps> _ filter ] with map concat
] 3keep 2drop map-like ; inline ] 3keep 2drop map-like ; inline
: filter-all-subseqs ( ... seq quot: ( ... x -- ... ) -- seq ) : filter-all-subseqs ( ... seq quot: ( ... x -- ... ) -- seq )
@ -184,14 +184,14 @@ PRIVATE>
: slices-touch? ( slice1 slice2 -- ? ) : slices-touch? ( slice1 slice2 -- ? )
unordered-slices-touch? ; unordered-slices-touch? ;
ERROR: slices-don't-touch slice1 slice2 ; ERROR: slices-dont-touch slice1 slice2 ;
: merge-slices ( slice1 slice2 -- slice/* ) : merge-slices ( slice1 slice2 -- slice/* )
slice-order-by-from slice-order-by-from
2dup ordered-slices-touch? [ 2dup ordered-slices-touch? [
[ from>> ] [ [ to>> ] [ seq>> ] bi ] bi* <slice> [ from>> ] [ [ to>> ] [ seq>> ] bi ] bi* <slice>
] [ ] [
slices-don't-touch slices-dont-touch
] if ; ] if ;
: rotate ( seq n -- seq' ) : rotate ( seq n -- seq' )
@ -265,7 +265,7 @@ PRIVATE>
PRIVATE< PRIVATE<
: (setup-each-from) ( i seq -- n quot ) : (setup-each-from) ( i seq -- n quot )
[ length over [-] swap ] keep '[ _ + _ nth-unsafe ] ; inline [ length over [-] swap ] keep $[ _ + _ nth-unsafe ] ; inline
: setup-each-from ( i seq quot -- n quot' ) : setup-each-from ( i seq quot -- n quot' )
[ (setup-each-from) ] dip compose ; inline [ (setup-each-from) ] dip compose ; inline
@ -452,7 +452,7 @@ PRIVATE>
: nth? ( ... n seq quot: ( ... elt -- ... ? ) -- ... ? ) [ nth ] dip call ; inline : nth? ( ... n seq quot: ( ... elt -- ... ? ) -- ... ? ) [ nth ] dip call ; inline
: loop>sequence ( quot exemplar -- seq ) : loop>sequence ( quot exemplar -- seq )
[ '[ [ @ [ [ , ] when* ] keep ] loop ] ] dip make ; inline [ $[ [ @ [ [ , ] when* ] keep ] loop ] ] dip make ; inline
: loop>array ( quot -- seq ) : loop>array ( quot -- seq )
{ } loop>sequence ; inline { } loop>sequence ; inline
@ -473,14 +473,14 @@ PRIVATE>
: insert-nth! ( elt n seq -- ) : insert-nth! ( elt n seq -- )
[ length ] keep ensure swap pick (a,b] [ length ] keep ensure swap pick (a,b]
over '[ [ 1 + ] keep _ move-unsafe ] each over $[ [ 1 + ] keep _ move-unsafe ] each
set-nth-unsafe ; set-nth-unsafe ;
: set-nths ( value indices seq -- ) : set-nths ( value indices seq -- )
swapd '[ _ swap _ set-nth ] each ; inline swapd $[ _ swap _ set-nth ] each ; inline
: set-nths-unsafe ( value indices seq -- ) : set-nths-unsafe ( value indices seq -- )
swapd '[ _ swap _ set-nth-unsafe ] each ; inline swapd $[ _ swap _ set-nth-unsafe ] each ; inline
: flatten1 ( obj -- seq ) : flatten1 ( obj -- seq )
[ [
@ -502,7 +502,7 @@ PRIVATE>
: map-find-index ( ... seq quot: ( ... elt index -- ... result/f ) -- ... result i elt ) : map-find-index ( ... seq quot: ( ... elt index -- ... result/f ) -- ... result i elt )
[ find-index ] (map-find-index) ; inline [ find-index ] (map-find-index) ; inline
: filter-length ( seq n -- seq' ) '[ length _ = ] filter ; : filter-length ( seq n -- seq' ) $[ length _ = ] filter ;
: all-shortest ( seqs -- seqs' ) dup shortest length filter-length ; : all-shortest ( seqs -- seqs' ) dup shortest length filter-length ;
@ -589,7 +589,7 @@ PRIVATE>
[ drop length 1 - ] [ change-nth-unsafe ] 2bi ; inline [ drop length 1 - ] [ change-nth-unsafe ] 2bi ; inline
: replicate-into ( ... seq quot: ( ... -- ... newelt ) -- ... ) : replicate-into ( ... seq quot: ( ... -- ... newelt ) -- ... )
over [ length ] 2dip '[ _ dip _ set-nth-unsafe ] each-integer ; inline over [ length ] 2dip $[ _ dip _ set-nth-unsafe ] each-integer ; inline
: count* ( ... seq quot: ( ... elt -- ... ? ) -- ... % ) : count* ( ... seq quot: ( ... elt -- ... ? ) -- ... % )
over [ count ] [ length ] bi* / ; inline over [ count ] [ length ] bi* / ; inline
@ -618,10 +618,10 @@ PRIVATE>
start-all* length ; inline start-all* length ; inline
: map-zip ( quot: ( x -- y ) -- alist ) : map-zip ( quot: ( x -- y ) -- alist )
'[ _ keep swap ] map>alist ; inline $[ _ keep swap ] map>alist ; inline
: map-keys ( assoc quot: ( key -- key' ) -- assoc ) : map-keys ( assoc quot: ( key -- key' ) -- assoc )
'[ _ dip ] assoc-map ; inline $[ _ dip ] assoc-map ; inline
: map-values ( assoc quot: ( value -- value' ) -- assoc ) : map-values ( assoc quot: ( value -- value' ) -- assoc )
'[ swap _ dip swap ] assoc-map ; inline $[ swap _ dip swap ] assoc-map ; inline

View File

@ -116,7 +116,7 @@ TUPLE: sequence-parser sequence n ;
[ take-rest-slice ] [ sequence>> like ] bi f like ; [ take-rest-slice ] [ sequence>> like ] bi f like ;
: take-until-object ( sequence-parser obj -- sequence ) : take-until-object ( sequence-parser obj -- sequence )
'[ current _ = ] take-until ; $[ current _ = ] take-until ;
: parse-sequence ( sequence quot -- ) : parse-sequence ( sequence quot -- )
[ <sequence-parser> ] dip call ; inline [ <sequence-parser> ] dip call ; inline
@ -139,7 +139,7 @@ COMPILE< "length" [ length ] define-sorting COMPILE>
: take-first-matching ( sequence-parser seq -- seq ) : take-first-matching ( sequence-parser seq -- seq )
swap swap
'[ _ [ swap take-sequence ] with-sequence-parser ] find nip ; $[ _ [ swap take-sequence ] with-sequence-parser ] find nip ;
: take-longest ( sequence-parser seq -- seq ) : take-longest ( sequence-parser seq -- seq )
sort-tokens take-first-matching ; sort-tokens take-first-matching ;

View File

@ -3,7 +3,7 @@ USING: combinators.short-circuit fry make math kernel sequences ;
IN: sequences.squish IN: sequences.squish
: (squish) ( seq quot: ( obj -- ? ) -- ) : (squish) ( seq quot: ( obj -- ? ) -- )
2dup call [ '[ _ (squish) ] each ] [ drop , ] if ; inline recursive 2dup call [ $[ _ (squish) ] each ] [ drop , ] if ; inline recursive
: squish ( seq quot exemplar -- seq' ) : squish ( seq quot exemplar -- seq' )
[ [ (squish) ] ] dip make ; inline [ [ (squish) ] ] dip make ; inline

View File

@ -6,14 +6,14 @@ IN: sequences.unrolled
PRIVATE< PRIVATE<
: (unrolled-each-integer) ( quot n -- ) : (unrolled-each-integer) ( quot n -- )
swap '[ _ call( i -- ) ] each-integer ; swap $[ _ call( i -- ) ] each-integer ;
COMPILE< \ (unrolled-each-integer) [ COMPILE< \ (unrolled-each-integer) [
iota [ '[ _ swap call( i -- ) ] ] [ ] map-as '[ _ cleave ] iota [ $[ _ swap call( i -- ) ] ] [ ] map-as $[ _ cleave ]
] 1 define-partial-eval COMPILE> ] 1 define-partial-eval COMPILE>
: (unrolled-collect) ( quot into -- quot' ) : (unrolled-collect) ( quot into -- quot' )
'[ dup @ swap _ set-nth-unsafe ] ; inline $[ dup @ swap _ set-nth-unsafe ] ; inline
PRIVATE> PRIVATE>
@ -42,13 +42,13 @@ PRIVATE<
[ xseq yseq len quot ] if ; inline [ xseq yseq len quot ] if ; inline
: (unrolled-each) ( seq len quot -- len quot ) : (unrolled-each) ( seq len quot -- len quot )
swapd '[ _ nth-unsafe @ ] ; inline swapd $[ _ nth-unsafe @ ] ; inline
: (unrolled-each-index) ( seq len quot -- len quot ) : (unrolled-each-index) ( seq len quot -- len quot )
swapd '[ dup _ nth-unsafe swap @ ] ; inline swapd $[ dup _ nth-unsafe swap @ ] ; inline
: (unrolled-2each) ( xseq yseq len quot -- len quot ) : (unrolled-2each) ( xseq yseq len quot -- len quot )
[ '[ _ ] 2dip ] dip (2each) nip ; inline [ $[ _ ] 2dip ] dip (2each) nip ; inline
: unrolled-each-unsafe ( seq len quot: ( x -- ) -- ) : unrolled-each-unsafe ( seq len quot: ( x -- ) -- )
(unrolled-each) unrolled-each-integer ; inline (unrolled-each) unrolled-each-integer ; inline

View File

@ -45,4 +45,4 @@ M: windowed-sequence length
[ infimum ] rolling-map ; [ infimum ] rolling-map ;
: rolling-count ( ... u n quot: ( ... elt -- ... ? ) -- ... v ) : rolling-count ( ... u n quot: ( ... elt -- ... ? ) -- ... v )
'[ _ count ] rolling-map ; inline $[ _ count ] rolling-map ; inline

View File

@ -128,7 +128,7 @@ PRIVATE>
: define-array-vocab ( type -- vocab ) : define-array-vocab ( type -- vocab )
underlying-type underlying-type
[ specialized-array-vocab ] [ '[ _ define-array ] ] bi [ specialized-array-vocab ] [ $[ _ define-array ] ] bi
generate-vocab ; generate-vocab ;
ERROR: specialized-array-vocab-not-loaded c-type ; ERROR: specialized-array-vocab-not-loaded c-type ;

View File

@ -73,7 +73,7 @@ PRIVATE>
: define-vector-vocab ( type -- vocab ) : define-vector-vocab ( type -- vocab )
underlying-type underlying-type
[ specialized-vector-vocab ] [ '[ _ define-vector ] ] bi [ specialized-vector-vocab ] [ $[ _ define-vector ] ] bi
generate-vocab ; generate-vocab ;
SYNTAX: \ SPECIALIZED-VECTORS: SYNTAX: \ SPECIALIZED-VECTORS:

View File

@ -6,10 +6,10 @@ IN: strings.tables
PRIVATE< PRIVATE<
: format-row ( seq -- seq ) : format-row ( seq -- seq )
dup longest length '[ _ "" pad-tail ] map! ; dup longest length $[ _ "" pad-tail ] map! ;
: format-column ( seq -- seq ) : format-column ( seq -- seq )
dup longest length '[ _ char: \s pad-tail ] map! ; dup longest length $[ _ char: \s pad-tail ] map! ;
PRIVATE> PRIVATE>

View File

@ -16,10 +16,10 @@ PRIVATE<
[ prefix<=> ] with search drop ; [ prefix<=> ] with search drop ;
: query-from ( index begin suffix-array -- from ) : query-from ( index begin suffix-array -- from )
swap '[ _ head? not ] find-last-from drop [ 1 + ] [ 0 ] if* ; swap $[ _ head? not ] find-last-from drop [ 1 + ] [ 0 ] if* ;
: query-to ( index begin suffix-array -- to ) : query-to ( index begin suffix-array -- to )
[ swap '[ _ head? not ] find-from drop ] [ length or ] bi ; [ swap $[ _ head? not ] find-from drop ] [ length or ] bi ;
: query-range ( index begin suffix-array -- from to ) : query-range ( index begin suffix-array -- from to )
[ query-from ] [ query-to ] 3bi [ min ] keep ; [ query-from ] [ query-to ] 3bi [ min ] keep ;
@ -35,4 +35,4 @@ PRIVATE>
SYNTAX: \ SA{ \ } [ >suffix-array ] parse-literal ; SYNTAX: \ SA{ \ } [ >suffix-array ] parse-literal ;
: query ( begin suffix-array -- matches ) : query ( begin suffix-array -- matches )
[ find-index ] 2keep '[ _ _ (query) ] [ { } ] if* ; [ find-index ] 2keep $[ _ _ (query) ] [ { } ] if* ;

View File

@ -20,7 +20,7 @@ TUPLE: avl-node < node balance ;
0 >>balance ; inline 0 >>balance ; inline
: increase-balance ( node amount -- node ) : increase-balance ( node amount -- node )
'[ _ + ] change-balance ; $[ _ + ] change-balance ;
: rotate ( node -- node ) : rotate ( node -- node )
dup dup

View File

@ -89,7 +89,7 @@ f initialize-test set-global
! Generate callbacks until the whole callback-heap is full, then free ! Generate callbacks until the whole callback-heap is full, then free
! them. Do it ten times in a row for good measure. ! them. Do it ten times in a row for good measure.
: produce-until-error ( quot -- error seq ) : produce-until-error ( quot -- error seq )
'[ [ @ t ] [ f ] recover ] [ ] produce ; inline $[ [ @ t ] [ f ] recover ] [ ] produce ; inline
SYMBOL: foo SYMBOL: foo

View File

@ -124,7 +124,7 @@ MACRO: alien-copy-value ( c-type -- quot: ( c-ptr offset -- value ) )
[ c-type-getter ] [ c-type-copier ] [ c-type-boxer-quot ] tri 3append ; [ c-type-getter ] [ c-type-copier ] [ c-type-boxer-quot ] tri 3append ;
MACRO: set-alien-value ( c-type -- quot: ( value c-ptr offset -- ) ) MACRO: set-alien-value ( c-type -- quot: ( value c-ptr offset -- ) )
[ c-type-unboxer-quot [ [ ] ] [ '[ _ 2dip ] ] if-empty ] [ c-type-unboxer-quot [ [ ] ] [ $[ _ 2dip ] ] if-empty ]
[ c-type-setter ] [ c-type-setter ]
bi append ; bi append ;
@ -213,7 +213,7 @@ PRIVATE<
] [ drop t ] if ; ] [ drop t ] if ;
: (pointer-c-type) ( void* type -- void*' ) : (pointer-c-type) ( void* type -- void*' )
[ clone ] dip c-type-boxer-quot '[ _ [ f ] if* ] >>boxer-quot ; [ clone ] dip c-type-boxer-quot $[ _ [ f ] if* ] >>boxer-quot ;
PRIVATE> PRIVATE>

View File

@ -58,7 +58,7 @@ M: library dispose dll>> [ dispose ] when* ;
[ swap path>> = ] [ swap abi>> = ] bi-curry* bi and ; [ swap path>> = ] [ swap abi>> = ] bi-curry* bi and ;
: add-library? ( name path abi -- ? ) : add-library? ( name path abi -- ? )
[ lookup-library ] 2dip '[ _ _ same-library? not ] [ t ] if* ; [ lookup-library ] 2dip $[ _ _ same-library? not ] [ t ] if* ;
: add-library ( name path abi -- ) : add-library ( name path abi -- )
3dup add-library? [ 3dup add-library? [

View File

@ -108,7 +108,7 @@ IN: bootstrap.syntax
"::" "M::" "MEMO:" "MEMO::" "MACRO:" "MACRO::" "IDENTITY-MEMO:" "IDENTITY-MEMO::" "TYPED:" "TYPED::" "::" "M::" "MEMO:" "MEMO::" "MACRO:" "MACRO::" "IDENTITY-MEMO:" "IDENTITY-MEMO::" "TYPED:" "TYPED::"
":>" "|[" "let[" "MEMO[" ":>" "|[" "let[" "MEMO["
"'[" "$["
"_" "_"
"@" "@"
"IH{" "IH{"

View File

@ -6,18 +6,18 @@ PRIVATE<
MACRO: keeping ( n quot -- quot' ) MACRO: keeping ( n quot -- quot' )
swap dup 1 + swap dup 1 +
'[ _ _ nkeep _ nrot ] ; $[ _ _ nkeep _ nrot ] ;
PRIVATE> PRIVATE>
MACRO: n&& ( quots n -- quot ) MACRO: n&& ( quots n -- quot )
[ [
[ [ f ] ] 2dip swap [ [ [ f ] ] 2dip swap [
[ '[ drop _ _ keeping dup not ] ] [ $[ drop _ _ keeping dup not ] ]
[ drop '[ drop _ ndrop f ] ] [ drop $[ drop _ ndrop f ] ]
2bi 2array 2bi 2array
] with map ] with map
] [ '[ _ nnip ] suffix 1array ] bi ] [ $[ _ nnip ] suffix 1array ] bi
[ cond ] 3append ; [ cond ] 3append ;
PRIVATE< PRIVATE<
@ -35,11 +35,11 @@ PRIVATE>
MACRO: n|| ( quots n -- quot ) MACRO: n|| ( quots n -- quot )
[ [
[ [ f ] ] 2dip swap [ [ [ f ] ] 2dip swap [
[ '[ drop _ _ keeping dup ] ] [ $[ drop _ _ keeping dup ] ]
[ drop '[ _ nnip ] ] [ drop $[ _ nnip ] ]
2bi 2array 2bi 2array
] with map ] with map
] [ '[ drop _ ndrop t ] [ f ] 2array suffix 1array ] bi ] [ $[ drop _ ndrop t ] [ f ] 2array suffix 1array ] bi
[ cond ] 3append ; [ cond ] 3append ;
PRIVATE< PRIVATE<

View File

@ -13,6 +13,6 @@ ERROR: cannot-determine-arity ;
PRIVATE> PRIVATE>
MACRO: && ( quots -- quot ) dup arity '[ _ _ n&& ] ; MACRO: && ( quots -- quot ) dup arity $[ _ _ n&& ] ;
MACRO: || ( quots -- quot ) dup arity '[ _ _ n|| ] ; MACRO: || ( quots -- quot ) dup arity $[ _ _ n|| ] ;

View File

@ -29,7 +29,7 @@ SYMBOL: command-line
home prepend-path ; home prepend-path ;
: try-user-init ( file -- ) : try-user-init ( file -- )
"user-init" get swap '[ "user-init" get swap $[
_ [ ?run-file ] [ _ [ ?run-file ] [
<user-init-error> <user-init-error>
swap user-init-errors get set-at swap user-init-errors get set-at

View File

@ -45,7 +45,7 @@ T{ error-type-holder
\ linkage-error <definition-error> ; \ linkage-error <definition-error> ;
: set-linkage-error ( name message word class -- ) : set-linkage-error ( name message word class -- )
'[ _ boa ] dip <linkage-error> dup asset>> linkage-errors get set-at ; inline $[ _ boa ] dip <linkage-error> dup asset>> linkage-errors get set-at ; inline
T{ error-type-holder T{ error-type-holder
{ type +linkage-error+ } { type +linkage-error+ }

View File

@ -18,7 +18,7 @@ IN: compiler.units.tests
gensym "b" set gensym "b" set
[ [
"a" get [ "A" ] define "a" get [ "A" ] define
"b" get "a" get '[ _ execute ] define "b" get "a" get $[ _ execute ] define
] with-compilation-unit ] with-compilation-unit
"b" get execute "b" get execute
[ [

View File

@ -50,11 +50,11 @@ IN: continuations.tests
gc gc
] unit-test ] unit-test
: don't-compile-me ( -- ) ; : dont-compile-me ( -- ) ;
: foo ( -- ) get-callstack "c" set don't-compile-me ; : foo ( -- ) get-callstack "c" set dont-compile-me ;
: bar ( -- a b ) 1 foo 2 ; : bar ( -- a b ) 1 foo 2 ;
COMPILE< { don't-compile-me foo bar } [ t "no-compile" set-word-prop ] each COMPILE> COMPILE< { dont-compile-me foo bar } [ t "no-compile" set-word-prop ] each COMPILE>
{ 1 2 } [ bar ] unit-test { 1 2 } [ bar ] unit-test

View File

@ -65,10 +65,10 @@ M: consult-method reset-word
GENERIC#: (consult-method-quot) 2 ( consultation quot word -- object ) ; GENERIC#: (consult-method-quot) 2 ( consultation quot word -- object ) ;
M: consultation (consult-method-quot) M: consultation (consult-method-quot)
'[ _ call _ execute ] nip ; $[ _ call _ execute ] nip ;
M: broadcast (consult-method-quot) M: broadcast (consult-method-quot)
'[ _ call [ _ execute ] each ] nip ; $[ _ call [ _ execute ] each ] nip ;
: consult-method-quot ( consultation word -- object ) : consult-method-quot ( consultation word -- object )
[ dup quot>> ] dip [ dup quot>> ] dip
@ -85,7 +85,7 @@ M: broadcast (consult-method-quot)
: register-consult ( consultation -- ) : register-consult ( consultation -- )
[ group>> "protocol-consult" ] [ ] [ class>> ] tri [ group>> "protocol-consult" ] [ ] [ class>> ] tri
'[ [ _ _ ] dip ?set-at ] change-word-prop ; $[ [ _ _ ] dip ?set-at ] change-word-prop ;
: consult-methods ( consultation -- ) : consult-methods ( consultation -- )
[ define-consult-method ] each-generic ; [ define-consult-method ] each-generic ;

View File

@ -48,7 +48,7 @@ IN: effects.tests
[ error>> invalid-row-variable? ] must-fail-with [ error>> invalid-row-variable? ] must-fail-with
[ "( ..a: integer b c -- d )" eval( -- effect ) ] [ "( ..a: integer b c -- d )" eval( -- effect ) ]
[ error>> row-variable-can't-have-type? ] must-fail-with [ error>> row-variable-cant-have-type? ] must-fail-with
! test curry-effect ! test curry-effect
{ ( -- x ) } [ ( c -- d ) curry-effect ] unit-test { ( -- x ) } [ ( c -- d ) curry-effect ] unit-test

View File

@ -9,7 +9,7 @@ DEFER: parse-effect
ERROR: bad-effect ; ERROR: bad-effect ;
ERROR: invalid-row-variable ; ERROR: invalid-row-variable ;
ERROR: row-variable-can't-have-type ; ERROR: row-variable-cant-have-type ;
ERROR: stack-effect-omits-dashes ; ERROR: stack-effect-omits-dashes ;
SYMBOL: effect-var SYMBOL: effect-var
@ -23,7 +23,7 @@ PRIVATE<
: parse-effect-var ( first? var name -- var ) : parse-effect-var ( first? var name -- var )
nip nip
[ ":" ?tail [ row-variable-can't-have-type ] when ] curry [ ":" ?tail [ row-variable-cant-have-type ] when ] curry
[ invalid-row-variable ] if ; [ invalid-row-variable ] if ;
: parse-effect-value ( token -- value ) : parse-effect-value ( token -- value )
@ -67,11 +67,11 @@ PRIVATE>
CONSTANT: in-definition HS{ } ; CONSTANT: in-definition HS{ } ;
ERROR: can't-nest-definitions word ; ERROR: cant-nest-definitions word ;
: set-in-definition ( -- ) : set-in-definition ( -- )
manifest get current-vocab>> t or in-definition ?adjoin manifest get current-vocab>> t or in-definition ?adjoin
[ last-word can't-nest-definitions ] unless ; [ last-word cant-nest-definitions ] unless ;
: unset-in-definition ( -- ) : unset-in-definition ( -- )
manifest get current-vocab>> t or in-definition delete ; manifest get current-vocab>> t or in-definition delete ;

View File

@ -12,67 +12,67 @@ HELP: @
HELP: fry HELP: fry
{ $values { "quot" quotation } { "quot'" quotation } } { $values { "quot" quotation } { "quot'" quotation } }
{ $description "Outputs a quotation that when called, fries " { $snippet "quot" } " by taking values from the stack and substituting them in." } { $description "Outputs a quotation that when called, fries " { $snippet "quot" } " by taking values from the stack and substituting them in." }
{ $notes "This word is used to implement " { $link postpone\ '[ } "; the following two lines are equivalent:" { $notes "This word is used to implement " { $link postpone\ $[ } "; the following two lines are equivalent:"
{ $code "[ X ] fry call" "'[ X ]" } { $code "[ X ] fry call" "$[ X ]" }
} }
{ $examples "See " { $link "fry.examples" } "." } ; { $examples "See " { $link "fry.examples" } "." } ;
HELP: \ '[ HELP: \ $[
{ $syntax "'[ code... ]" } { $syntax "$[ code... ]" }
{ $description "Literal fried quotation. Expands into code which takes values from the stack and substitutes them in place of the fry specifiers " { $link _ } " and " { $link @ } "." } { $description "Literal fried quotation. Expands into code which takes values from the stack and substitutes them in place of the fry specifiers " { $link _ } " and " { $link @ } "." }
{ $examples "See " { $link "fry.examples" } "." } ; { $examples "See " { $link "fry.examples" } "." } ;
HELP: >r/r>-in-fry-error HELP: >r/r>-in-fry-error
{ $error-description "Thrown by " { $link postpone\ '[ } " if the fried quotation contains calls to retain stack manipulation primitives." } ; { $error-description "Thrown by " { $link postpone\ $[ } " if the fried quotation contains calls to retain stack manipulation primitives." } ;
ARTICLE: "fry.examples" "Examples of fried quotations" ARTICLE: "fry.examples" "Examples of fried quotations"
"The easiest way to understand fried quotations is to look at some examples." "The easiest way to understand fried quotations is to look at some examples."
$nl $nl
"If a quotation does not contain any fry specifiers, then " { $link postpone\ '[ } " behaves just like " { $link postpone\ [ } ":" "If a quotation does not contain any fry specifiers, then " { $link postpone\ $[ } " behaves just like " { $link postpone\ [ } ":"
{ $code "{ 10 20 30 } '[ . ] each" } { $code "{ 10 20 30 } $[ . ] each" }
"Occurrences of " { $link _ } " on the left map directly to " { $link curry } ". That is, the following three lines are equivalent:" "Occurrences of " { $link _ } " on the left map directly to " { $link curry } ". That is, the following three lines are equivalent:"
{ $code { $code
"{ 10 20 30 } 5 '[ _ + ] map" "{ 10 20 30 } 5 $[ _ + ] map"
"{ 10 20 30 } 5 [ + ] curry map" "{ 10 20 30 } 5 [ + ] curry map"
"{ 10 20 30 } [ 5 + ] map" "{ 10 20 30 } [ 5 + ] map"
} }
"Occurrences of " { $link _ } " in the middle of a quotation map to more complex quotation composition patterns. The following three lines are equivalent:" "Occurrences of " { $link _ } " in the middle of a quotation map to more complex quotation composition patterns. The following three lines are equivalent:"
{ $code { $code
"{ 10 20 30 } 5 '[ 3 _ / ] map" "{ 10 20 30 } 5 $[ 3 _ / ] map"
"{ 10 20 30 } 5 [ 3 ] swap [ / ] curry compose map" "{ 10 20 30 } 5 [ 3 ] swap [ / ] curry compose map"
"{ 10 20 30 } [ 3 5 / ] map" "{ 10 20 30 } [ 3 5 / ] map"
} }
"Occurrences of " { $link @ } " are simply syntax sugar for " { $snippet "_ call" } ". The following four lines are equivalent:" "Occurrences of " { $link @ } " are simply syntax sugar for " { $snippet "_ call" } ". The following four lines are equivalent:"
{ $code { $code
"{ 10 20 30 } [ sq ] '[ @ . ] each" "{ 10 20 30 } [ sq ] $[ @ . ] each"
"{ 10 20 30 } [ sq ] [ call . ] curry each" "{ 10 20 30 } [ sq ] [ call . ] curry each"
"{ 10 20 30 } [ sq ] [ . ] compose each" "{ 10 20 30 } [ sq ] [ . ] compose each"
"{ 10 20 30 } [ sq . ] each" "{ 10 20 30 } [ sq . ] each"
} }
"The " { $link _ } " and " { $link @ } " specifiers may be freely mixed, and the result is considerably more concise and readable than the version using " { $link curry } " and " { $link compose } " directly:" "The " { $link _ } " and " { $link @ } " specifiers may be freely mixed, and the result is considerably more concise and readable than the version using " { $link curry } " and " { $link compose } " directly:"
{ $code { $code
"{ 8 13 14 27 } [ even? ] 5 '[ @ dup _ ? ] map" "{ 8 13 14 27 } [ even? ] 5 $[ @ dup _ ? ] map"
"{ 8 13 14 27 } [ even? ] 5 [ dup ] swap [ ? ] curry compose compose map" "{ 8 13 14 27 } [ even? ] 5 [ dup ] swap [ ? ] curry compose compose map"
"{ 8 13 14 27 } [ even? dup 5 ? ] map" "{ 8 13 14 27 } [ even? dup 5 ? ] map"
} }
"The following is a no-op:" "The following is a no-op:"
{ $code "'[ @ ]" } { $code "$[ @ ]" }
"Here are some built-in combinators rewritten in terms of fried quotations:" "Here are some built-in combinators rewritten in terms of fried quotations:"
{ $table { $table
{ { $link literalize } { $snippet ": literalize '[ _ ] ;" } } { { $link literalize } { $snippet ": literalize $[ _ ] ;" } }
{ { $link curry } { $snippet ": curry '[ _ @ ] ;" } } { { $link curry } { $snippet ": curry $[ _ @ ] ;" } }
{ { $link compose } { $snippet ": compose '[ @ @ ] ;" } } { { $link compose } { $snippet ": compose $[ @ @ ] ;" } }
} ; } ;
ARTICLE: "fry.philosophy" "Fried quotation philosophy" ARTICLE: "fry.philosophy" "Fried quotation philosophy"
"Fried quotations generalize quotation-building words such as " { $link curry } " and " { $link compose } ". They can clean up code with lots of currying and composition, particularly when quotations are nested:" "Fried quotations generalize quotation-building words such as " { $link curry } " and " { $link compose } ". They can clean up code with lots of currying and composition, particularly when quotations are nested:"
{ $code { $code
"'[ [ _ key? ] all? ] filter" "$[ [ _ key? ] all? ] filter"
"[ [ key? ] curry all? ] curry filter" "[ [ key? ] curry all? ] curry filter"
} }
"There is a mapping from fried quotations to lexical closures as defined in the " { $vocab-link "locals" } " vocabulary. Namely, a fried quotation is equivalent to a " { $snippet "|[ | ]" } " form where each local binding is only used once, and bindings are used in the same order in which they are defined. The following two lines are equivalent:" "There is a mapping from fried quotations to lexical closures as defined in the " { $vocab-link "locals" } " vocabulary. Namely, a fried quotation is equivalent to a " { $snippet "|[ | ]" } " form where each local binding is only used once, and bindings are used in the same order in which they are defined. The following two lines are equivalent:"
{ $code { $code
"'[ 3 _ + 4 _ / ]" "$[ 3 _ + 4 _ / ]"
"|[ a b | 3 a + 4 b / ]" "|[ a b | 3 a + 4 b / ]"
} ; } ;
@ -80,7 +80,7 @@ ARTICLE: "fry" "Fried quotations"
"The " { $vocab-link "fry" } " vocabulary implements " { $emphasis "fried quotation" } ". Conceptually, fried quotations are quotations with “holes” (more formally, " { $emphasis "fry specifiers" } "), and the holes are filled in when the fried quotation is pushed on the stack." "The " { $vocab-link "fry" } " vocabulary implements " { $emphasis "fried quotation" } ". Conceptually, fried quotations are quotations with “holes” (more formally, " { $emphasis "fry specifiers" } "), and the holes are filled in when the fried quotation is pushed on the stack."
$nl $nl
"Fried quotations are started by a special parsing word:" "Fried quotations are started by a special parsing word:"
{ $subsections postpone\ '[ } { $subsections postpone\ $[ }
"There are two types of fry specifiers; the first can hold a value, and the second “splices” a quotation, as if it were inserted without surrounding brackets:" "There are two types of fry specifiers; the first can hold a value, and the second “splices” a quotation, as if it were inserted without surrounding brackets:"
{ $subsections { $subsections
_ _

View File

@ -6,85 +6,85 @@ IN: fry.tests
SYMBOLS: a b c d e f g h ; SYMBOLS: a b c d e f g h ;
{ [ ] } [ '[ ] ] unit-test { [ ] } [ $[ ] ] unit-test
{ [ + ] } [ '[ + ] ] unit-test { [ + ] } [ $[ + ] ] unit-test
{ [ 1 ] } [ 1 '[ _ ] ] unit-test { [ 1 ] } [ 1 $[ _ ] ] unit-test
{ [ 1 ] } [ [ 1 ] '[ @ ] ] unit-test { [ 1 ] } [ [ 1 ] $[ @ ] ] unit-test
{ [ 1 2 ] } [ [ 1 ] [ 2 ] '[ @ @ ] ] unit-test { [ 1 2 ] } [ [ 1 ] [ 2 ] $[ @ @ ] ] unit-test
{ [ 1 2 a ] } [ 1 2 '[ _ _ a ] ] unit-test { [ 1 2 a ] } [ 1 2 $[ _ _ a ] ] unit-test
{ [ 1 2 ] } [ 1 2 '[ _ _ ] ] unit-test { [ 1 2 ] } [ 1 2 $[ _ _ ] ] unit-test
{ [ a 1 2 ] } [ 1 2 '[ a _ _ ] ] unit-test { [ a 1 2 ] } [ 1 2 $[ a _ _ ] ] unit-test
{ [ 1 2 a ] } [ [ 1 ] [ 2 ] '[ @ @ a ] ] unit-test { [ 1 2 a ] } [ [ 1 ] [ 2 ] $[ @ @ a ] ] unit-test
{ [ 1 a 2 b ] } [ 1 2 '[ _ a _ b ] ] unit-test { [ 1 a 2 b ] } [ 1 2 $[ _ a _ b ] ] unit-test
{ [ 1 a 2 b ] } [ 1 [ 2 ] '[ _ a @ b ] ] unit-test { [ 1 a 2 b ] } [ 1 [ 2 ] $[ _ a @ b ] ] unit-test
{ [ a 1 b ] } [ 1 '[ a _ b ] ] unit-test { [ a 1 b ] } [ 1 $[ a _ b ] ] unit-test
{ [ a 1 b ] } [ [ 1 ] '[ a @ b ] ] unit-test { [ a 1 b ] } [ [ 1 ] $[ a @ b ] ] unit-test
{ [ a 1 2 ] } [ [ 1 ] [ 2 ] '[ a @ @ ] ] unit-test { [ a 1 2 ] } [ [ 1 ] [ 2 ] $[ a @ @ ] ] unit-test
{ [ a [ 1 ] b ] } [ 1 '[ a [ _ ] b ] ] unit-test { [ a [ 1 ] b ] } [ 1 $[ a [ _ ] b ] ] unit-test
{ [ a 1 b [ c 2 d ] e 3 f ] } [ 1 2 3 '[ a _ b [ c _ d ] e _ f ] ] unit-test { [ a 1 b [ c 2 d ] e 3 f ] } [ 1 2 3 $[ a _ b [ c _ d ] e _ f ] ] unit-test
{ [ a 1 b [ c 2 d [ e 3 f ] ] g 4 h ] } [ 1 2 3 4 '[ a _ b [ c _ d [ e _ f ] ] g _ h ] ] unit-test { [ a 1 b [ c 2 d [ e 3 f ] ] g 4 h ] } [ 1 2 3 4 $[ a _ b [ c _ d [ e _ f ] ] g _ h ] ] unit-test
{ [ a 1 b [ [ c 2 d ] e 3 f ] g 4 h ] } [ 1 2 3 4 '[ a _ b [ [ c _ d ] e _ f ] g _ h ] ] unit-test { [ a 1 b [ [ c 2 d ] e 3 f ] g 4 h ] } [ 1 2 3 4 $[ a _ b [ [ c _ d ] e _ f ] g _ h ] ] unit-test
{ [ 3 + ] } [ 3 '[ _ + ] ] unit-test { [ 3 + ] } [ 3 $[ _ + ] ] unit-test
{ [ 1 3 + ] } [ 1 3 '[ _ _ + ] ] unit-test { [ 1 3 + ] } [ 1 3 $[ _ _ + ] ] unit-test
{ [ 1 + ] } [ 1 [ + ] '[ _ @ ] ] unit-test { [ 1 + ] } [ 1 [ + ] $[ _ @ ] ] unit-test
{ [ 1 + . ] } [ 1 [ + ] '[ _ @ . ] ] unit-test { [ 1 + . ] } [ 1 [ + ] $[ _ @ . ] ] unit-test
{ [ + - ] } [ [ + ] [ - ] '[ @ @ ] ] unit-test { [ + - ] } [ [ + ] [ - ] $[ @ @ ] ] unit-test
{ [ "a" write "b" print ] } { [ "a" write "b" print ] }
[ "a" "b" '[ _ write _ print ] ] unit-test [ "a" "b" $[ _ write _ print ] ] unit-test
{ 1/2 } [ { 1/2 } [
1 '[ [ _ ] dip / ] 2 swap call 1 $[ [ _ ] dip / ] 2 swap call
] unit-test ] unit-test
{ { { 1 "a" "A" } { 1 "b" "B" } { 1 "c" "C" } } } [ { { { 1 "a" "A" } { 1 "b" "B" } { 1 "c" "C" } } } [
1 '[ [ _ ] 2dip 3array ] 1 $[ [ _ ] 2dip 3array ]
{ "a" "b" "c" } { "A" "B" "C" } rot 2map { "a" "b" "c" } { "A" "B" "C" } rot 2map
] unit-test ] unit-test
{ { { 1 "a" } { 1 "b" } { 1 "c" } } } [ { { { 1 "a" } { 1 "b" } { 1 "c" } } } [
'[ [ 1 ] dip 2array ] $[ [ 1 ] dip 2array ]
{ "a" "b" "c" } swap map { "a" "b" "c" } swap map
] unit-test ] unit-test
{ { { 1 "a" 2 } { 1 "b" 2 } { 1 "c" 2 } } } [ { { { 1 "a" 2 } { 1 "b" 2 } { 1 "c" 2 } } } [
1 2 '[ [ _ ] dip _ 3array ] 1 2 $[ [ _ ] dip _ 3array ]
{ "a" "b" "c" } swap map { "a" "b" "c" } swap map
] unit-test ] unit-test
: funny-dip ( obj quot -- ) '[ [ @ ] dip ] call ; inline : funny-dip ( obj quot -- ) $[ [ @ ] dip ] call ; inline
{ "hi" 3 } [ "h" "i" 3 [ append ] funny-dip ] unit-test { "hi" 3 } [ "h" "i" 3 [ append ] funny-dip ] unit-test
{ { 1 2 3 } } [ { { 1 2 3 } } [
3 1 '[ _ iota [ _ + ] map ] call 3 1 $[ _ iota [ _ + ] map ] call
] unit-test ] unit-test
{ { 1 { 2 { 3 } } } } [ { { 1 { 2 { 3 } } } } [
1 2 3 '[ _ [ _ [ _ 1array ] call 2array ] call 2array ] call 1 2 3 $[ _ [ _ [ _ 1array ] call 2array ] call 2array ] call
] unit-test ] unit-test
{ 1 1 } [ '[ [ [ _ ] ] ] ] must-infer-as { 1 1 } [ $[ [ [ _ ] ] ] ] must-infer-as
{ { { { 3 } } } } [ { { { { 3 } } } } [
3 '[ [ [ _ 1array ] call 1array ] call 1array ] call 3 $[ [ [ _ 1array ] call 1array ] call 1array ] call
] unit-test ] unit-test
{ { { { 3 } } } } [ { { { { 3 } } } } [
3 '[ [ [ _ 1array ] call 1array ] call 1array ] call 3 $[ [ [ _ 1array ] call 1array ] call 1array ] call
] unit-test ] unit-test
[ "USING: fry locals.backend ; f '[ load-local _ ]" eval( -- quot ) ] [ "USING: fry locals.backend ; f $[ load-local _ ]" eval( -- quot ) ]
[ error>> >r/r>-in-fry-error? ] must-fail-with [ error>> >r/r>-in-fry-error? ] must-fail-with
{ { { "a" 1 } { "b" 2 } { "c" 3 } { "d" 4 } } } [ { { { "a" 1 } { "b" 2 } { "c" 3 } { "d" 4 } } } [
1 2 3 4 '[ "a" _ 2array "b" _ 2array "c" _ 2array "d" _ 2array 4array ] call 1 2 3 4 $[ "a" _ 2array "b" _ 2array "c" _ 2array "d" _ 2array 4array ] call
] unit-test ] unit-test

View File

@ -30,9 +30,9 @@ IN: generalizations.tests
{ 1 2 3 4 } [ 2 3 4 [ 1 ] 3 ndip ] unit-test { 1 2 3 4 } [ 2 3 4 [ 1 ] 3 ndip ] unit-test
[ 1 2 3 4 5 [ drop drop drop drop drop 2 ] 5 nkeep ] must-infer [ 1 2 3 4 5 [ drop drop drop drop drop 2 ] 5 nkeep ] must-infer
[ 1 2 3 4 5 2 '[ drop drop drop drop drop _ ] 5 nkeep ] must-infer [ 1 2 3 4 5 2 $[ drop drop drop drop drop _ ] 5 nkeep ] must-infer
{ 2 1 2 3 4 5 } [ 1 2 3 4 5 [ drop drop drop drop drop 2 ] 5 nkeep ] unit-test { 2 1 2 3 4 5 } [ 1 2 3 4 5 [ drop drop drop drop drop 2 ] 5 nkeep ] unit-test
{ 2 1 2 3 4 5 } [ 1 2 3 4 5 2 '[ drop drop drop drop drop _ ] 5 nkeep ] unit-test { 2 1 2 3 4 5 } [ 1 2 3 4 5 2 $[ drop drop drop drop drop _ ] 5 nkeep ] unit-test
{ [ 1 2 3 + ] } [ 1 2 3 [ + ] 3 ncurry ] unit-test { [ 1 2 3 + ] } [ 1 2 3 [ + ] 3 ncurry ] unit-test
{ "HELLO" } [ "hello" [ >upper ] 1 napply ] unit-test { "HELLO" } [ "hello" [ >upper ] 1 napply ] unit-test
@ -66,7 +66,7 @@ IN: generalizations.tests
{ 1 2 3 4 1 2 3 } [ nover-test ] unit-test { 1 2 3 4 1 2 3 } [ nover-test ] unit-test
[ '[ number>string _ append ] 4 napply ] must-infer [ $[ number>string _ append ] 4 napply ] must-infer
{ 6 8 10 12 } [ { 6 8 10 12 } [
1 2 3 4 1 2 3 4

View File

@ -15,7 +15,7 @@ COMPILE<
ALIAS: n*quot (n*quot) ; ALIAS: n*quot (n*quot) ;
MACRO: call-n ( n -- quot ) MACRO: call-n ( n -- quot )
[ call ] <repetition> '[ _ cleave ] ; [ call ] <repetition> $[ _ cleave ] ;
: repeat ( n obj quot -- ) swapd times ; inline : repeat ( n obj quot -- ) swapd times ; inline
@ -30,36 +30,36 @@ MACRO: npick ( n -- quot )
{ {
{ [ dup 0 <= ] [ nonpositive-npick ] } { [ dup 0 <= ] [ nonpositive-npick ] }
{ [ dup 1 = ] [ drop [ dup ] ] } { [ dup 1 = ] [ drop [ dup ] ] }
[ 1 - [ dup ] [ '[ _ dip swap ] ] repeat ] [ 1 - [ dup ] [ $[ _ dip swap ] ] repeat ]
} cond ; } cond ;
MACRO: nover ( n -- quot ) MACRO: nover ( n -- quot )
dup 1 + '[ _ npick ] n*quot ; dup 1 + $[ _ npick ] n*quot ;
: ndup ( n -- ) : ndup ( n -- )
[ '[ _ npick ] ] keep call-n ; inline [ $[ _ npick ] ] keep call-n ; inline
MACRO: dupn ( n -- quot ) MACRO: dupn ( n -- quot )
[ [ drop ] ] [ [ drop ] ]
[ 1 - [ dup ] n*quot ] if-zero ; [ 1 - [ dup ] n*quot ] if-zero ;
MACRO: nrot ( n -- quot ) MACRO: nrot ( n -- quot )
1 - [ ] [ '[ _ dip swap ] ] repeat ; 1 - [ ] [ $[ _ dip swap ] ] repeat ;
MACRO: -nrot ( n -- quot ) MACRO: -nrot ( n -- quot )
1 - [ ] [ '[ swap _ dip ] ] repeat ; 1 - [ ] [ $[ swap _ dip ] ] repeat ;
: ndrop ( n -- ) : ndrop ( n -- )
[ drop ] swap call-n ; inline [ drop ] swap call-n ; inline
: nnip ( n -- ) : nnip ( n -- )
'[ _ ndrop ] dip ; inline $[ _ ndrop ] dip ; inline
: ndip ( n -- ) : ndip ( n -- )
[ [ dip ] curry ] swap call-n call ; inline [ [ dip ] curry ] swap call-n call ; inline
: nkeep ( n -- ) : nkeep ( n -- )
dup '[ [ _ ndup ] dip _ ndip ] call ; inline dup $[ [ _ ndup ] dip _ ndip ] call ; inline
: ncurry ( n -- ) : ncurry ( n -- )
[ curry ] swap call-n ; inline [ curry ] swap call-n ; inline
@ -71,7 +71,7 @@ MACRO: -nrot ( n -- quot )
[ nip nkeep ] [ drop nip call ] 3bi ; inline [ nip nkeep ] [ drop nip call ] 3bi ; inline
MACRO: ncleave ( quots n -- quot ) MACRO: ncleave ( quots n -- quot )
[ '[ _ '[ _ _ nkeep ] ] map [ ] join ] [ '[ _ ndrop ] ] bi [ $[ _ $[ _ _ nkeep ] ] map [ ] join ] [ $[ _ ndrop ] ] bi
compose ; compose ;
MACRO: nspread ( quots n -- quot ) MACRO: nspread ( quots n -- quot )
@ -79,12 +79,12 @@ MACRO: nspread ( quots n -- quot )
[ [ but-last ] dip ] [ [ but-last ] dip ]
[ [ last ] dip ] 2bi [ [ last ] dip ] 2bi
swap swap
'[ [ _ _ nspread ] _ ndip @ ] $[ [ _ _ nspread ] _ ndip @ ]
] if ; ] if ;
MACRO: spread* ( n -- quot ) MACRO: spread* ( n -- quot )
[ [ ] ] [ [ [ ] ] [
[1,b) [ '[ [ [ _ ndip ] curry ] dip compose ] ] map [ ] concat-as [1,b) [ $[ [ [ _ ndip ] curry ] dip compose ] ] map [ ] concat-as
[ call ] compose [ call ] compose
] if-zero ; ] if-zero ;
@ -93,7 +93,7 @@ MACRO: nspread* ( m n -- quot )
[ * 0 ] [ drop neg ] 2bi [ * 0 ] [ drop neg ] 2bi
<range> rest >array dup length iota <reversed> <range> rest >array dup length iota <reversed>
[ [
'[ [ [ _ ndip ] curry ] _ ndip ] $[ [ [ _ ndip ] curry ] _ ndip ]
] 2map dup rest-slice [ [ compose ] compose ] map! drop ] 2map dup rest-slice [ [ compose ] compose ] map! drop
[ ] concat-as [ call ] compose [ ] concat-as [ call ] compose
] if-zero ; ] if-zero ;
@ -119,14 +119,14 @@ MACRO: cleave* ( n -- quot )
[ [curry] ] swap [ napply ] [ spread* ] bi ; inline [ [curry] ] swap [ napply ] [ spread* ] bi ; inline
MACRO: mnswap ( m n -- quot ) MACRO: mnswap ( m n -- quot )
1 + '[ _ -nrot ] swap '[ _ _ napply ] ; 1 + $[ _ -nrot ] swap $[ _ _ napply ] ;
MACRO: nweave ( n -- quot ) MACRO: nweave ( n -- quot )
[ dup iota <reversed> [ '[ _ _ mnswap ] ] with map ] keep [ dup iota <reversed> [ $[ _ _ mnswap ] ] with map ] keep
'[ _ _ ncleave ] ; $[ _ _ ncleave ] ;
: nbi-curry ( n -- ) : nbi-curry ( n -- )
[ bi-curry ] swap call-n ; inline [ bi-curry ] swap call-n ; inline
MACRO: map-compose ( quots quot -- quot' ) MACRO: map-compose ( quots quot -- quot' )
'[ _ compose ] map '[ _ ] ; $[ _ compose ] map $[ _ ] ;

View File

@ -63,7 +63,7 @@ M: clumps group@
PRIVATE< PRIVATE<
: map-like ( seq n quot -- seq ) : map-like ( seq n quot -- seq )
2keep drop '[ _ like ] map ; inline 2keep drop $[ _ like ] map ; inline
PRIVATE> PRIVATE>
@ -77,7 +77,7 @@ PRIVATE>
[ first2-unsafe ] dip call [ first2-unsafe ] dip call
] [ ] [
[ [ first-unsafe 1 ] [ (setup-each) ] bi ] dip [ [ first-unsafe 1 ] [ (setup-each) ] bi ] dip
'[ @ _ keep swap ] (all-integers?) nip $[ @ _ keep swap ] (all-integers?) nip
] if ] if
] if ; inline ] if ; inline

View File

@ -49,7 +49,7 @@ sets sorting tools.test ;
{ t } [ { t } [
100 iota 100 iota
[ [ <hash-set> ] map ] [ [ <hash-set> ] map ]
[ [ HS{ } clone [ '[ _ adjoin ] each-integer ] keep ] map ] bi [ [ HS{ } clone [ $[ _ adjoin ] each-integer ] keep ] map ] bi
[ [ array>> length ] bi@ = ] 2all? [ [ array>> length ] bi@ = ] 2all?
] unit-test ] unit-test

View File

@ -146,7 +146,7 @@ H{ } "x" set
{ t } [ { t } [
100 iota 100 iota
[ [ <hashtable> ] map ] [ [ <hashtable> ] map ]
[ [ H{ } clone [ '[ dup _ set-at ] each-integer ] keep ] map ] bi [ [ H{ } clone [ $[ dup _ set-at ] each-integer ] keep ] map ] bi
[ [ array>> length ] bi@ = ] 2all? [ [ array>> length ] bi@ = ] 2all?
] unit-test ] unit-test

View File

@ -12,7 +12,7 @@ GENERIC: specializer-predicate ( spec -- quot ) ;
M: class specializer-predicate predicate-def ; M: class specializer-predicate predicate-def ;
M: object specializer-predicate '[ _ eq? ] ; M: object specializer-predicate $[ _ eq? ] ;
GENERIC: specializer-declaration ( spec -- class ) ; GENERIC: specializer-declaration ( spec -- class ) ;
@ -35,7 +35,7 @@ M: object specializer-declaration class-of ;
: specializer-cases ( quot specializer -- alist ) : specializer-cases ( quot specializer -- alist )
dup [ array? ] all? [ 1array ] unless [ dup [ array? ] all? [ 1array ] unless [
[ nip make-specializer ] [ nip make-specializer ]
[ [ specializer-declaration ] map swap '[ _ declare @ ] ] 2bi [ [ specializer-declaration ] map swap $[ _ declare @ ] ] 2bi
] with { } map>assoc ; ] with { } map>assoc ;
: specialize-quot ( quot specializer -- quot' ) : specialize-quot ( quot specializer -- quot' )

View File

@ -274,7 +274,7 @@ CONSTANT: pt-array-1
[ [
"resource:core" normalize-path "resource:core" normalize-path
[ cwd = ] [ cd ] [ cwd = ] tri [ cwd = ] [ cd ] [ cwd = ] tri
] cwd '[ _ dup cd cwd = ] [ ] cleanup ] cwd $[ _ dup cd cwd = ] [ ] cleanup
] unit-test ] unit-test
{ t } [ { t } [

View File

@ -11,7 +11,7 @@ M: lambda count-inputs body>> count-inputs ;
M: lambda fry M: lambda fry
clone [ [ count-inputs ] [ fry ] bi ] change-body clone [ [ count-inputs ] [ fry ] bi ] change-body
[ [ vars>> length ] keep '[ _ _ mnswap _ call ] ] [ [ vars>> length ] keep $[ _ _ mnswap _ call ] ]
[ drop [ncurry] curry [ call ] compose ] 2bi ; [ drop [ncurry] curry [ call ] compose ] 2bi ;
M: let fry M: let fry

View File

@ -217,7 +217,7 @@ ARTICLE: "locals-fry" "Lexical variables and fry"
"Lexical variables integrate with " { $link "fry" } " so that mixing variables with fried quotations gives intuitive results." "Lexical variables integrate with " { $link "fry" } " so that mixing variables with fried quotations gives intuitive results."
$nl $nl
"The following two code snippets are equivalent:" "The following two code snippets are equivalent:"
{ $code "'[ sq _ + ]" } { $code "$[ sq _ + ]" }
{ $code "[ [ sq ] dip + ] curry" } { $code "[ [ sq ] dip + ] curry" }
"The semantics of " { $link dip } " and " { $link curry } " are such that the first example behaves as if the top of the stack as “inserted” in the “hole” in the quotation's second element." "The semantics of " { $link dip } " and " { $link curry } " are such that the first example behaves as if the top of the stack as “inserted” in the “hole” in the quotation's second element."
$nl $nl
@ -228,8 +228,8 @@ $nl
{ $code "3 |[ a b | a b - ] curry" } { $code "3 |[ a b | a b - ] curry" }
{ $code "|[ a | a 3 - ]" } { $code "|[ a | a 3 - ]" }
"Because of this, the behavior of " { $snippet "fry" } " changes when applied to such a quotation to ensure that fry conceptually behaves the same as with normal quotations, placing the fried values “underneath” the variable bindings. Thus, the following snippets are no longer equivalent:" "Because of this, the behavior of " { $snippet "fry" } " changes when applied to such a quotation to ensure that fry conceptually behaves the same as with normal quotations, placing the fried values “underneath” the variable bindings. Thus, the following snippets are no longer equivalent:"
{ $code "'[ |[ a | _ a - ] ]" } { $code "$[ |[ a | _ a - ] ]" }
{ $code "'[ |[ a | a - ] curry ] call" } { $code "$[ |[ a | a - ] curry ] call" }
"Instead, the first line above expands into something like the following:" "Instead, the first line above expands into something like the following:"
{ $code "[ [ swap |[ a | a - ] ] curry call ]" } { $code "[ [ swap |[ a | a - ] ] curry call ]" }
$nl $nl
@ -241,7 +241,7 @@ ARTICLE: "locals-limitations" "Limitations of lexical variables"
"The expansion of a macro cannot reference lexical variables bound in the outer scope. For example, the following macro is invalid:" "The expansion of a macro cannot reference lexical variables bound in the outer scope. For example, the following macro is invalid:"
{ $code "MACRO:: twice ( quot -- ) [ quot call quot call ] ;" } { $code "MACRO:: twice ( quot -- ) [ quot call quot call ] ;" }
"The following is fine, though:" "The following is fine, though:"
{ $code "MACRO:: twice ( quot -- ) quot quot '[ @ @ ] ;" } { $code "MACRO:: twice ( quot -- ) quot quot $[ @ @ ] ;" }
{ $heading "Static stack effect inference and macros" } { $heading "Static stack effect inference and macros" }
"A macro will only expand at compile-time if all of its inputs are literal. Likewise, the word containing the macro will only have a static stack effect and compile successfully if the macro's inputs are literal. When lexical variables are used in a macro's literal arguments, there is an additional restriction: The literals must immediately precede the macro call lexically." "A macro will only expand at compile-time if all of its inputs are literal. Likewise, the word containing the macro will only have a static stack effect and compile successfully if the macro's inputs are literal. When lexical variables are used in a macro's literal arguments, there is an additional restriction: The literals must immediately precede the macro call lexically."
$nl $nl

View File

@ -388,7 +388,7 @@ M:: integer lambda-method-forget-test ( a -- b ) a ;
{ 10 } [ 10 |[ A | { [ A ] } ] call first call ] unit-test { 10 } [ 10 |[ A | { [ A ] } ] call first call ] unit-test
[ [
"USING: locals fry math ; 1 '[ let[ 10 :> A A _ + ] ]" "USING: locals fry math ; 1 $[ let[ 10 :> A A _ + ] ]"
eval( -- ) call eval( -- ) call
] [ error>> >r/r>-in-fry-error? ] must-fail-with ] [ error>> >r/r>-in-fry-error? ] must-fail-with
@ -431,31 +431,31 @@ M:: integer lambda-method-forget-test ( a -- b ) a ;
{ 3 } [ 2 |[ | :> a! a 1 + a! a ] call ] unit-test { 3 } [ 2 |[ | :> a! a 1 + a! a ] call ] unit-test
: fry-locals-test-1 ( -- n ) : fry-locals-test-1 ( -- n )
let[ 6 '[ let[ 4 :> A A _ + ] ] call ] ; let[ 6 $[ let[ 4 :> A A _ + ] ] call ] ;
\ fry-locals-test-1 def>> must-infer \ fry-locals-test-1 def>> must-infer
{ 10 } [ fry-locals-test-1 ] unit-test { 10 } [ fry-locals-test-1 ] unit-test
:: fry-locals-test-2 ( -- n ) :: fry-locals-test-2 ( -- n )
let[ 6 '[ let[ 4 :> A A _ + ] ] call ] ; let[ 6 $[ let[ 4 :> A A _ + ] ] call ] ;
\ fry-locals-test-2 def>> must-infer \ fry-locals-test-2 def>> must-infer
{ 10 } [ fry-locals-test-2 ] unit-test { 10 } [ fry-locals-test-2 ] unit-test
{ 1 } [ 3 4 |[ | '[ [ _ swap - ] call ] call ] call ] unit-test { 1 } [ 3 4 |[ | $[ [ _ swap - ] call ] call ] call ] unit-test
{ -1 } [ 3 4 |[ | |[ a | a - ] call ] call ] unit-test { -1 } [ 3 4 |[ | |[ a | a - ] call ] call ] unit-test
{ -1 } [ 3 4 |[ | |[ a | a - ] curry call ] call ] unit-test { -1 } [ 3 4 |[ | |[ a | a - ] curry call ] call ] unit-test
{ -1 } [ 3 4 |[ a | a - ] curry call ] unit-test { -1 } [ 3 4 |[ a | a - ] curry call ] unit-test
{ 1 } [ 3 4 |[ | '[ |[ a | _ a - ] call ] call ] call ] unit-test { 1 } [ 3 4 |[ | $[ |[ a | _ a - ] call ] call ] call ] unit-test
{ -1 } [ 3 4 |[ | '[ |[ a | a _ - ] call ] call ] call ] unit-test { -1 } [ 3 4 |[ | $[ |[ a | a _ - ] call ] call ] call ] unit-test
{ { 1 2 3 4 } } [ { { 1 2 3 4 } } [
1 3 2 4 1 3 2 4
|[ | '[ |[ a b | a _ b _ 4array ] call ] call ] call |[ | $[ |[ a b | a _ b _ 4array ] call ] call ] call
] unit-test ] unit-test
{ 10 } [ { 10 } [
|[ | 0 '[ let[ 10 :> A A _ + ] ] call ] call |[ | 0 $[ let[ 10 :> A A _ + ] ] call ] call
] unit-test ] unit-test
! littledan found this problem ! littledan found this problem
@ -475,7 +475,7 @@ M:: integer lambda-method-forget-test ( a -- b ) a ;
{ 3 } [ 3 t erg's-:>-bug ] unit-test { 3 } [ 3 t erg's-:>-bug ] unit-test
:: erg's-:>-bug-2 ( n ? -- n ) ? n '[ _ :> n n ] [ n :> b b ] if ; :: erg's-:>-bug-2 ( n ? -- n ) ? n $[ _ :> n n ] [ n :> b b ] if ;
{ 3 } [ 3 f erg's-:>-bug-2 ] unit-test { 3 } [ 3 f erg's-:>-bug-2 ] unit-test

View File

@ -32,7 +32,7 @@ ERROR: invalid-local-name name ;
SINGLETON: lambda-parser SINGLETON: lambda-parser
: with-lambda-scope ( assoc reader-quot: ( -- quot ) -- quot ) : with-lambda-scope ( assoc reader-quot: ( -- quot ) -- quot )
'[ $[
in-lambda? on in-lambda? on
lambda-parser quotation-parser set lambda-parser quotation-parser set
use-words @ use-words @

View File

@ -9,7 +9,7 @@ IN: locals.rewrite.point-free
! retain stack manipulation ! retain stack manipulation
: local-index ( args obj -- n ) : local-index ( args obj -- n )
2dup '[ unquote _ eq? ] find drop 2dup $[ unquote _ eq? ] find drop
[ 2nip ] [ bad-local ] if* ; [ 2nip ] [ bad-local ] if* ;
: read-local-quot ( args obj -- quot ) : read-local-quot ( args obj -- quot )

View File

@ -53,7 +53,7 @@ GENERIC: rewrite-element ( obj -- ) ;
[ rewrite-element ] each ; [ rewrite-element ] each ;
: rewrite-sequence ( seq -- ) : rewrite-sequence ( seq -- )
[ rewrite-elements ] [ length ] [ 0 head ] tri '[ _ _ nsequence ] % ; [ rewrite-elements ] [ length ] [ 0 head ] tri $[ _ _ nsequence ] % ;
M: array rewrite-element M: array rewrite-element
dup rewrite-literal? [ rewrite-sequence ] [ , ] if ; dup rewrite-literal? [ rewrite-sequence ] [ , ] if ;
@ -66,7 +66,7 @@ M: hashtable rewrite-element
M: tuple rewrite-element M: tuple rewrite-element
dup rewrite-literal? [ dup rewrite-literal? [
[ tuple-slots rewrite-elements ] [ class-of ] bi '[ _ boa ] % [ tuple-slots rewrite-elements ] [ class-of ] bi $[ _ boa ] %
] [ , ] if ; ] [ , ] if ;
M: quotation rewrite-element rewrite-sugar* ; M: quotation rewrite-element rewrite-sugar* ;

View File

@ -40,16 +40,16 @@ M: wrapper expand-macros* wrapped>> literal ;
: expand-dispatch ( -- ) : expand-dispatch ( -- )
stack get pop end stack get pop end
[ [ expand-macros ] [ ] map-as '[ _ dip ] % ] [ [ expand-macros ] [ ] map-as $[ _ dip ] % ]
[ [
length iota [ <reversed> ] keep length iota [ <reversed> ] keep
[ '[ _ ndrop _ nnip call ] [ ] like ] 2map , \ dispatch , [ $[ _ ndrop _ nnip call ] [ ] like ] 2map , \ dispatch ,
] bi ; ] bi ;
: word, ( word -- ) end , ; : word, ( word -- ) end , ;
: expand-macro ( word quot -- ) : expand-macro ( word quot -- )
'[ $[
drop drop
stack [ _ with-datastack >vector ] change stack [ _ with-datastack >vector ] change
stack get pop >quotation end stack get pop >quotation end

View File

@ -18,7 +18,7 @@ HELP: \ MACRO:
{ $code { $code
"USING: fry generalizations kernel macros stack-checker ;" "USING: fry generalizations kernel macros stack-checker ;"
"MACRO: preserving ( quot -- quot' )" "MACRO: preserving ( quot -- quot' )"
" [ inputs ] keep '[ _ ndup @ ] ;" " [ inputs ] keep $[ _ ndup @ ] ;"
} }
"Using this macro, we can define a variant of " { $link if } " which takes a predicate quotation instead of a boolean; any values consumed by the predicate quotation are restored immediately after:" "Using this macro, we can define a variant of " { $link if } " which takes a predicate quotation instead of a boolean; any values consumed by the predicate quotation are restored immediately after:"
{ $code { $code

View File

@ -19,8 +19,8 @@ PRIVATE>
{ {
[ nip check-macro-effect ] [ nip check-macro-effect ]
[ [
[ '[ _ _ call-effect ] ] keep [ $[ _ _ call-effect ] ] keep
[ memoize-quot '[ @ call ] ] keep [ memoize-quot $[ @ call ] ] keep
define-declared define-declared
] ]
[ drop "macro" set-word-prop ] [ drop "macro" set-word-prop ]

View File

@ -34,7 +34,7 @@ $nl
"For example," "For example,"
{ $code "[ 2 , , \\ + , ] [ ] make" } { $code "[ 2 , , \\ + , ] [ ] make" }
"is better expressed as" "is better expressed as"
{ $code "'[ 2 _ + ]" } ; { $code "$[ 2 _ + ]" } ;
ARTICLE: "namespaces-make" "Making sequences with variables" ARTICLE: "namespaces-make" "Making sequences with variables"
"The " { $vocab-link "make" } " vocabulary implements a facility for constructing " { $link sequence } "s and " { $link assoc } "s by holding a collector object in a variable. Storing the collector object in a variable rather than the stack may allow code to be written with less stack manipulation." "The " { $vocab-link "make" } " vocabulary implements a facility for constructing " { $link sequence } "s and " { $link assoc } "s by holding a collector object in a variable. Storing the collector object in a variable rather than the stack may allow code to be written with less stack manipulation."

View File

@ -13,7 +13,7 @@ PRIVATE<
<repetition> [ ] concat-as ; <repetition> [ ] concat-as ;
: [nsequence] ( length exemplar -- quot ) : [nsequence] ( length exemplar -- quot )
[ [ [ 1 - ] keep ] dip '[ _ _ _ new-sequence ] ] [ [ [ 1 - ] keep ] dip $[ _ _ _ new-sequence ] ]
[ drop [ [ set-nth-unsafe ] 2keep [ 1 - ] dip ] (n*quot) ] 2bi [ drop [ [ set-nth-unsafe ] 2keep [ 1 - ] dip ] (n*quot) ] 2bi
[ nip ] 3append ; [ nip ] 3append ;
@ -40,11 +40,11 @@ PRIVATE<
[ in>> unpacker ] [ out>> packer ] bi surround ; [ in>> unpacker ] [ out>> packer ] bi surround ;
: make/n ( table quot effect -- quot ) : make/n ( table quot effect -- quot )
[ unpack/pack '[ _ _ cache ] ] keep pack/unpack ; [ unpack/pack $[ _ _ cache ] ] keep pack/unpack ;
: make/0 ( table quot effect -- quot ) : make/0 ( table quot effect -- quot )
out>> [ out>> [
packer '[ packer $[
_ dup first-unsafe _ dup first-unsafe
[ nip ] [ @ @ [ 0 rot set-nth-unsafe ] keep ] if* [ nip ] [ @ @ [ 0 rot set-nth-unsafe ] keep ] if*
] ]
@ -57,7 +57,7 @@ PRIVATE>
: (define-memoized) ( word quot effect hashtable -- ) : (define-memoized) ( word quot effect hashtable -- )
[ [ drop "memo-quot" set-word-prop ] ] dip [ [ drop "memo-quot" set-word-prop ] ] dip
'[ 2drop _ "memoize" set-word-prop ] $[ 2drop _ "memoize" set-word-prop ]
[ [ [ dup "memoize" word-prop ] 2dip make-memoizer ] keep define-declared ] [ [ [ dup "memoize" word-prop ] 2dip make-memoizer ] keep define-declared ]
3tri ; 3tri ;

View File

@ -41,7 +41,7 @@ ERROR: unexpected-end n string ;
:: slice-til-either ( n string tokens -- n'/f string slice/f ch/f ) :: slice-til-either ( n string tokens -- n'/f string slice/f ch/f )
n [ n [
n string '[ tokens member? ] find-from n string $[ tokens member? ] find-from
dup "\s\r\n" member? [ dup "\s\r\n" member? [
:> ( n' ch ) :> ( n' ch )
n' string n' string
@ -74,7 +74,7 @@ ERROR: unexpected-end n string ;
:: slice-til-separator-inclusive ( n string tokens -- n' string slice/f ch/f ) :: slice-til-separator-inclusive ( n string tokens -- n' string slice/f ch/f )
n string '[ tokens member? ] find-from [ dup [ 1 + ] when ] dip :> ( n' ch ) n string $[ tokens member? ] find-from [ dup [ 1 + ] when ] dip :> ( n' ch )
n' string n' string
n n' string ?<slice> n n' string ?<slice>
ch ; inline ch ; inline
@ -126,11 +126,11 @@ ERROR: unexpected-end n string ;
[ lex-til-whitespace drop 2nip ] dip merge-slices ; [ lex-til-whitespace drop 2nip ] dip merge-slices ;
: peek-merge-til-whitespace ( lexer slice -- slice' ) : peek-merge-til-whitespace ( lexer slice -- slice' )
'[ _ merge-lex-til-whitespace ] with-lexer-rollback ; $[ _ merge-lex-til-whitespace ] with-lexer-rollback ;
:: slice-til-eol ( n string -- n'/f string slice/f ch/f ) :: slice-til-eol ( n string -- n'/f string slice/f ch/f )
n [ n [
n string '[ "\r\n" member? ] find-from :> ( n' ch ) n string $[ "\r\n" member? ] find-from :> ( n' ch )
n' string n' string
n n' string ?<slice> n n' string ?<slice>
ch ch

Some files were not shown because too many files have changed in this diff Show More