factor/extra/spider/spider.factor

128 lines
3.8 KiB
Factor
Raw Normal View History

2008-10-01 19:17:31 -04:00
! Copyright (C) 2008 Doug Coleman.
! See http://factorcode.org/license.txt for BSD license.
USING: accessors fry html.parser html.parser.analyzer
http.client kernel tools.time sets assocs sequences
concurrency.combinators io threads namespaces math multiline
2009-03-31 19:21:15 -04:00
math.parser inspector urls logging combinators.short-circuit
2009-04-04 12:57:23 -04:00
continuations calendar prettyprint dlists deques locals
2009-04-06 19:47:46 -04:00
spider.unique-deque combinators concurrency.semaphores ;
2008-10-01 19:17:31 -04:00
IN: spider
2008-10-01 20:29:18 -04:00
TUPLE: spider base count max-count sleep max-depth initial-links
2009-04-03 22:16:08 -04:00
filters spidered todo nonmatching quiet currently-spidering
2009-04-06 19:47:46 -04:00
#threads semaphore follow-robots? robots ;
2008-10-01 19:17:31 -04:00
2009-04-03 22:16:08 -04:00
TUPLE: spider-result url depth headers
fetched-in parsed-html links processed-in fetched-at ;
2008-10-01 19:17:31 -04:00
: <spider> ( base -- spider )
>url
spider new
over >>base
over >>currently-spidering
2009-03-31 19:23:02 -04:00
swap 0 <unique-deque> [ push-url ] keep >>todo
2009-03-31 19:21:15 -04:00
<unique-deque> >>nonmatching
2008-10-01 19:17:31 -04:00
0 >>max-depth
0 >>count
1/0. >>max-count
2009-04-03 22:16:08 -04:00
H{ } clone >>spidered
2009-04-06 19:47:46 -04:00
1 [ >>#threads ] [ <semaphore> >>semaphore ] bi ;
: <spider-result> ( url depth -- spider-result )
spider-result new
swap >>depth
swap >>url ;
2008-10-01 19:17:31 -04:00
<PRIVATE
: apply-filters ( links spider -- links' )
2009-03-31 19:21:15 -04:00
filters>> [ '[ [ _ 1&& ] filter ] call( seq -- seq' ) ] when* ;
2008-10-01 19:17:31 -04:00
2009-03-31 19:21:15 -04:00
: push-links ( links level unique-deque -- )
2009-03-31 19:23:02 -04:00
'[ _ _ push-url ] each ;
2008-10-01 19:17:31 -04:00
: add-todo ( links level spider -- )
todo>> push-links ;
2008-10-01 19:17:31 -04:00
: add-nonmatching ( links level spider -- )
nonmatching>> push-links ;
2008-10-01 19:17:31 -04:00
2009-03-31 19:21:15 -04:00
: filter-base-links ( spider spider-result -- base-links nonmatching-links )
2008-10-01 19:17:31 -04:00
[ base>> host>> ] [ links>> prune ] bi*
[ host>> = ] with partition ;
: add-spidered ( spider spider-result -- )
[ [ 1 + ] change-count ] dip
2008-10-01 19:17:31 -04:00
2dup [ spidered>> ] [ dup url>> ] bi* rot set-at
2009-03-31 19:21:15 -04:00
[ filter-base-links ] 2keep
depth>> 1 + swap
2008-10-01 19:17:31 -04:00
[ add-nonmatching ]
2009-11-05 23:22:21 -05:00
[ dup '[ _ apply-filters ] curry 2dip add-todo ] 2bi ;
2008-10-01 19:17:31 -04:00
2009-04-03 22:16:08 -04:00
: normalize-hrefs ( base links -- links' )
[ derive-url ] with map ;
2008-10-01 19:17:31 -04:00
2009-04-06 19:47:46 -04:00
: print-spidering ( spider-result -- )
[ url>> ] [ depth>> ] bi
"depth: " write number>string write
", spidering: " write . yield ;
2009-04-06 19:47:46 -04:00
:: fill-spidered-result ( spider spider-result -- )
f spider-result url>> spider spidered>> set-at
[ spider-result url>> http-get ] benchmark :> ( headers html fetched-in )
2009-03-31 19:21:15 -04:00
[
2009-04-03 22:16:08 -04:00
html parse-html
spider currently-spidering>>
over find-all-links normalize-hrefs
] benchmark :> ( parsed-html links processed-in )
2009-04-06 19:47:46 -04:00
spider-result
headers >>headers
fetched-in >>fetched-in
parsed-html >>parsed-html
links >>links
processed-in >>processed-in
now >>fetched-at drop ;
:: spider-page ( spider spider-result -- )
spider quiet>> [ spider-result print-spidering ] unless
spider spider-result fill-spidered-result
spider quiet>> [ spider-result describe ] unless
spider spider-result add-spidered ;
2008-10-01 19:17:31 -04:00
\ spider-page ERROR add-error-logging
2009-04-03 22:16:08 -04:00
: spider-sleep ( spider -- ) sleep>> [ sleep ] when* ;
2009-03-31 19:21:15 -04:00
2009-04-03 22:16:08 -04:00
: queue-initial-links ( spider -- )
[
[ currently-spidering>> ] [ initial-links>> ] bi normalize-hrefs 0
] keep add-todo ;
2008-10-01 19:17:31 -04:00
2009-03-31 19:21:15 -04:00
: spider-page? ( spider -- ? )
{
[ todo>> deque>> deque-empty? not ]
[ [ todo>> peek-url depth>> ] [ max-depth>> ] bi < ]
[ [ count>> ] [ max-count>> ] bi < ]
2009-03-31 19:21:15 -04:00
} 1&& ;
2008-10-01 20:29:18 -04:00
2009-04-06 19:47:46 -04:00
: setup-next-url ( spider -- spider spider-result )
2009-04-03 22:16:08 -04:00
dup todo>> peek-url url>> >>currently-spidering
2009-04-06 19:47:46 -04:00
dup todo>> pop-url [ url>> ] [ depth>> ] bi <spider-result> ;
2009-03-31 19:21:15 -04:00
: spider-next-page ( spider -- )
setup-next-url spider-page ;
2008-10-01 21:54:58 -04:00
2008-10-01 19:17:31 -04:00
PRIVATE>
2009-03-31 19:21:15 -04:00
: run-spider-loop ( spider -- )
dup spider-page? [
[ spider-next-page ] [ spider-sleep ] [ run-spider-loop ] tri
2009-03-31 19:21:15 -04:00
] [
drop
] if ;
2008-10-01 19:17:31 -04:00
: run-spider ( spider -- spider )
"spider" [
2009-04-03 22:16:08 -04:00
dup queue-initial-links [ run-spider-loop ] keep
2008-10-01 19:17:31 -04:00
] with-logging ;