pub struct Crawler<S: Spider, C> { /* private fields */ }Implementations§
Source§impl<S, C> Crawler<S, C>
impl<S, C> Crawler<S, C>
Sourcepub async fn start_crawl(self) -> Result<(), SpiderError>
pub async fn start_crawl(self) -> Result<(), SpiderError>
Starts the crawl.
Examples found in repository?
examples/quotes_scraper.rs (line 115)
83async fn main() -> Result<(), SpiderError> {
84 tracing_subscriber::fmt()
85 .with_env_filter("info,spider_lib=debug")
86 .without_time()
87 .init();
88
89 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
90 .add_pipeline(DeduplicationPipeline::new(&["text"]))
91 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
92 .add_middleware(HttpCacheMiddleware::builder().build()?)
93 .add_middleware(
94 RateLimitMiddleware::builder()
95 .use_token_bucket_limiter(5)
96 .build(),
97 )
98 .add_middleware(RetryMiddleware::default().max_retries(2))
99 .add_middleware(UserAgentMiddleware::builder().build()?)
100 .add_middleware(RobotsTxtMiddleware::new())
101 .add_middleware(
102 RefererMiddleware::new()
103 .same_origin_only(true)
104 .max_chain_length(100)
105 .include_fragment(false),
106 )
107 .with_checkpoint_path("output/quotes.bin")
108 .with_checkpoint_interval(Duration::from_secs(15))
109 .max_concurrent_downloads(5)
110 .max_parser_workers(2)
111 .max_concurrent_pipelines(2)
112 .build()
113 .await?;
114
115 crawler.start_crawl().await?;
116
117 Ok(())
118}Auto Trait Implementations§
impl<S, C> Freeze for Crawler<S, C>
impl<S, C> !RefUnwindSafe for Crawler<S, C>
impl<S, C> Send for Crawler<S, C>
impl<S, C> Sync for Crawler<S, C>
impl<S, C> Unpin for Crawler<S, C>
impl<S, C> !UnwindSafe for Crawler<S, C>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more