use crate::site::SiteConfig;
use std::fs::File;
use std::io::Write;
use std::path::Path;
pub fn generate_robots(
site_config: &SiteConfig,
output_dir: &Path,
) -> Result<(), Box<dyn std::error::Error>> {
let robots_content = generate_robots_content(site_config);
let robots_path = output_dir.join("robots.txt");
let mut file = File::create(&robots_path)?;
file.write_all(robots_content.as_bytes())?;
Ok(())
}
pub fn generate_robots_content(site_config: &SiteConfig) -> String {
let mut robots = String::new();
robots.push_str("User-agent: *\n");
robots.push_str("Allow: /\n");
robots.push_str("\n# Disallow common non-content directories\n");
robots.push_str("Disallow: /.*\n"); robots.push_str("Disallow: /_*\n");
robots.push_str("\n# Disallow temporary and system files\n");
robots.push_str("Disallow: /*.tmp$\n");
robots.push_str("Disallow: /*.bak$\n");
robots.push_str("Disallow: /*.log$\n");
robots.push_str("\n# Sitemap location\n");
if let Some(ref base_url) = site_config.base_url {
robots.push_str(&format!(
"Sitemap: {}/sitemap.xml\n",
base_url.trim_end_matches('/')
));
} else {
robots.push_str("Sitemap: /sitemap.xml\n");
}
robots.push_str("\n# Crawl delay (optional - be nice to servers)\n");
robots.push_str("Crawl-delay: 1\n");
robots.push_str("\n# Generated by Krik Static Site Generator\n");
robots.push_str("# https://github.com/mcaserta/krik\n");
robots
}