1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#![doc = include_str!("../README.md")]
#[doc(inline)]
pub use nutrition_information::NutritionInformation;
#[doc(inline)]
pub use recipe_information_provider::RecipeInformationProvider;
#[doc(inline)]
pub use restricted_diet::RestrictedDiet;
use schema_scraper::SchemaScraper;
use std::error::Error;

pub mod nutrition_information;
pub mod recipe_information_provider;
pub mod restricted_diet;
mod schema_scraper;

/// Provides methods to scrape HTML for recipe information.
pub trait RecipeScraper {
    fn scrape_recipe(
        self,
        html: &str,
    ) -> Result<Box<dyn RecipeInformationProvider>, serde_json::Error>;
}

/// Takes some HTML and attempts to parse a recipe to a RecipeInformationProvider
pub fn scrape_recipe(html: &str) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
    let scraper = SchemaScraper {};
    custom_scrape_recipe(html, scraper)
}

/// Uses a custom scraper to retrieve a recipe
/// # Arguments
/// - `html` - The HTML of the recipe page.
/// - `scraper` - The custom scraper to use.
pub fn custom_scrape_recipe(
    html: &str,
    scraper: impl RecipeScraper,
) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
    Ok(scraper.scrape_recipe(html)?)
}

/// Uses ureq to scrape a recipe from a URL
#[cfg(feature = "blocking")]
pub fn scrape_recipe_from_url_blocking(
    url: &str,
) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
    let res = ureq::get(url).call()?.into_string()?;
    scrape_recipe(&res)
}

/// Uses reqwest to scrape a recipe from a URL
#[cfg(feature = "async")]
pub async fn scrape_recipe_from_url(
    url: &str,
) -> Result<Box<dyn RecipeInformationProvider>, Box<dyn Error>> {
    let res = reqwest::get(url).await?.text().await?;
    scrape_recipe(&res)
}