pub struct CrawlerBuilder<S: Spider, D = ReqwestClientDownloader>where
D: Downloader,{ /* private fields */ }Implementations§
Source§impl<S: Spider, D: Downloader> CrawlerBuilder<S, D>
impl<S: Spider, D: Downloader> CrawlerBuilder<S, D>
Sourcepub fn new(spider: S) -> Selfwhere
D: Default,
pub fn new(spider: S) -> Selfwhere
D: Default,
Creates a new CrawlerBuilder for a given spider.
Examples found in repository?
examples/quotes_scraper.rs (line 79)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub fn max_concurrent_downloads(self, limit: usize) -> Self
pub fn max_concurrent_downloads(self, limit: usize) -> Self
Sets the maximum number of concurrent downloads.
Examples found in repository?
examples/quotes_scraper.rs (line 93)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub fn max_parser_workers(self, limit: usize) -> Self
pub fn max_parser_workers(self, limit: usize) -> Self
Sets the maximum number of concurrent parser workers.
Examples found in repository?
examples/quotes_scraper.rs (line 94)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub fn max_concurrent_pipelines(self, limit: usize) -> Self
pub fn max_concurrent_pipelines(self, limit: usize) -> Self
Sets the maximum number of concurrent pipelines.
Examples found in repository?
examples/quotes_scraper.rs (line 95)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub fn downloader(self, downloader: D) -> Self
pub fn downloader(self, downloader: D) -> Self
Sets a custom downloader for the crawler.
Sourcepub fn add_middleware<M>(self, middleware: M) -> Self
pub fn add_middleware<M>(self, middleware: M) -> Self
Adds a middleware to the crawler.
Examples found in repository?
examples/quotes_scraper.rs (line 82)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub fn add_pipeline<P>(self, pipeline: P) -> Self
pub fn add_pipeline<P>(self, pipeline: P) -> Self
Adds an item pipeline to the crawler.
Examples found in repository?
examples/quotes_scraper.rs (line 80)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub fn with_checkpoint_path<P: AsRef<Path>>(self, path: P) -> Self
pub fn with_checkpoint_path<P: AsRef<Path>>(self, path: P) -> Self
Enables checkpointing and sets the path for the checkpoint file.
Examples found in repository?
examples/quotes_scraper.rs (line 91)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub fn with_checkpoint_interval(self, interval: Duration) -> Self
pub fn with_checkpoint_interval(self, interval: Duration) -> Self
Sets the interval for periodic checkpointing.
Examples found in repository?
examples/quotes_scraper.rs (line 92)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Sourcepub async fn build(self) -> Result<Crawler<S, D::Client>, SpiderError>
pub async fn build(self) -> Result<Crawler<S, D::Client>, SpiderError>
Builds the Crawler instance.
Examples found in repository?
examples/quotes_scraper.rs (line 96)
73async fn main() -> Result<(), SpiderError> {
74 tracing_subscriber::fmt()
75 .with_env_filter("info,spider_lib=debug")
76 .without_time()
77 .init();
78
79 let crawler = CrawlerBuilder::<_, ReqwestClientDownloader>::new(QuotesSpider)
80 .add_pipeline(DeduplicationPipeline::new(&["text"]))
81 .add_pipeline(CsvExporterPipeline::<QuoteItem>::new("output/quotes.csv")?)
82 .add_middleware(HttpCacheMiddleware::builder().build()?)
83 .add_middleware(UserAgentMiddleware::builder().build()?)
84 .add_middleware(RobotsTxtMiddleware::new())
85 .add_middleware(
86 RefererMiddleware::new()
87 .same_origin_only(true)
88 .max_chain_length(100)
89 .include_fragment(false),
90 )
91 .with_checkpoint_path("output/quotes.bin")
92 .with_checkpoint_interval(Duration::from_secs(15))
93 .max_concurrent_downloads(5)
94 .max_parser_workers(2)
95 .max_concurrent_pipelines(2)
96 .build()
97 .await?;
98
99 crawler.start_crawl().await?;
100
101 Ok(())
102}Trait Implementations§
Auto Trait Implementations§
impl<S, D> Freeze for CrawlerBuilder<S, D>
impl<S, D = ReqwestClientDownloader> !RefUnwindSafe for CrawlerBuilder<S, D>
impl<S, D> Send for CrawlerBuilder<S, D>
impl<S, D> Sync for CrawlerBuilder<S, D>
impl<S, D> Unpin for CrawlerBuilder<S, D>
impl<S, D = ReqwestClientDownloader> !UnwindSafe for CrawlerBuilder<S, D>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more