use knee_scraper::{recursive_scrape, fetch_robots_txt, check_open_directories, fetch_with_cookies};
use reqwest::Client;
use std::collections::HashSet;
use tokio::time::sleep;
use std::time::Duration;
#[tokio::main]
async fn main() {
let url = "https://example.com";
let client = Client::new();
let mut visited = HashSet::new();
println!("Fetching robots.txt...");
fetch_robots_txt(url, &client).await;
println!("Checking open directories...");
check_open_directories(url, &client).await;
println!("Fetching page with cookies...");
fetch_with_cookies(url, &client).await;
println!("Starting recursive scrape...");
recursive_scrape(url, &client, &mut visited).await;
println!("Delaying to mimic human behavior...");
sleep(Duration::from_secs(3)).await;
println!("Scraping complete.");
}