Expand description
§scrapfly-sdk
Async Rust client for the Scrapfly API. See the crate-level
Client and the examples/ directory for usage.
use scrapfly_sdk::{Client, ScrapeConfig};
let client = Client::builder().api_key("scp-...").build()?;
let result = client
.scrape(&ScrapeConfig::builder("https://httpbin.dev/html").build()?)
.await?;
println!("{}", result.result.status_code);Re-exports§
pub use client::Client;pub use client::ClientBuilder;pub use client::OnRequest;pub use cloud_browser::BrowserConfig;pub use cloud_browser::UnblockConfig;pub use cloud_browser::UnblockResult;pub use config::crawler::CrawlerConfig;pub use config::extraction::ExtractionConfig;pub use config::scrape::ScrapeConfig;pub use config::screenshot::ScreenshotConfig;pub use crawler::Crawl;pub use crawler::WaitOptions;pub use error::ApiError;pub use error::ScrapflyError;pub use monitoring::CloudBrowserMonitoringOptions;pub use monitoring::MonitoringAggregation;pub use monitoring::MonitoringDataFormat;pub use monitoring::MonitoringMetricsOptions;pub use monitoring::MonitoringPeriod;pub use monitoring::MonitoringTargetMetricsOptions;pub use result::account::AccountData;pub use result::account::VerifyApiKeyResult;pub use result::crawler::CrawlContent;pub use result::crawler::CrawlerArtifact;pub use result::crawler::CrawlerArtifactType;pub use result::crawler::CrawlerContents;pub use result::crawler::CrawlerStartResponse;pub use result::crawler::CrawlerStatus;pub use result::crawler::CrawlerUrlEntry;pub use result::crawler::CrawlerUrls;pub use result::extraction::ExtractionResult;pub use result::scrape::ScrapeResult;pub use result::screenshot::ScreenshotMetadata;pub use result::screenshot::ScreenshotResult;pub use schedule::CreateScheduleRequest;pub use schedule::ListSchedulesOptions;pub use schedule::Schedule;pub use schedule::ScheduleEnd;pub use schedule::ScheduleRecurrence;pub use schedule::UpdateScheduleRequest;pub use enums::*;
Modules§
- batch
- Streaming multipart/mixed parser for POST /scrape/batch.
- client
- HTTP client for the Scrapfly API.
- cloud_
browser - Cloud Browser API — port of
sdk/go/cloud_browser.go. - config
- Typed config builders for every Scrapfly endpoint.
- crawler
- High-level
Crawlwrapper — port ofsdk/go/crawl.go. - enums
- Strongly-typed enums mirroring
sdk/go/enums.go. - error
- Error types — 1:1 port of
sdk/go/errors.go+crawler.go::parseAPIError. - monitoring
- Monitoring API — aggregated + per-target metrics.
- result
- Strongly-typed result objects for every Scrapfly endpoint.
- schedule
- Public schedule client — wraps
/scrape/schedules,/screenshot/schedules,/crawl/schedulesand the cross-kind/schedulesendpoints.