1#![doc = include_str!("../README.md")]
2#[doc(inline)]
3pub use nutrition_information::NutritionInformation;
4#[doc(inline)]
5pub use recipe_information_provider::RecipeInformationProvider;
6#[doc(inline)]
7pub use restricted_diet::RestrictedDiet;
8use schema_scraper::SchemaScraper;
9use std::error::Error;
10
11pub mod nutrition_information;
12pub mod recipe_information_provider;
13pub mod restricted_diet;
14mod schema_scraper;
15
16pub trait RecipeScraper {
18 fn scrape_recipe(
19 &self,
20 html: &str,
21 ) -> Result<Box<dyn RecipeInformationProvider>, serde_json::Error>;
22}
23
24pub fn scrape_recipe(html: &str) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
26 let scraper = SchemaScraper {};
27 custom_scrape_recipe(html, &scraper)
28}
29
30pub fn custom_scrape_recipe(
35 html: &str,
36 scraper: &impl RecipeScraper,
37) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
38 Ok(scraper.scrape_recipe(html)?)
39}
40
41#[cfg(feature = "blocking")]
43pub fn scrape_recipe_from_url_blocking(
44 url: &str,
45) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
46 let res = ureq::get(url).call()?.into_string()?;
47 scrape_recipe(&res)
48}
49
50#[cfg(feature = "blocking")]
52pub fn custom_scrape_recipe_from_url_blocking(
53 url: &str,
54 scraper: &impl RecipeScraper,
55) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
56 let res = ureq::get(url).call()?.into_string()?;
57 custom_scrape_recipe(&res, scraper)
58}
59
60#[cfg(feature = "async")]
62pub async fn scrape_recipe_from_url(
63 url: &str,
64) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
65 let res = reqwest::get(url).await?.text().await?;
66 scrape_recipe(&res)
67}
68
69#[cfg(feature = "async")]
71pub async fn custom_scrape_recipe_from_url(
72 url: &str,
73 scraper: &impl RecipeScraper,
74) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
75 let res = reqwest::get(url).await?.text().await?;
76 custom_scrape_recipe(&res, scraper)
77}