typstify_generator/
robots.rs

1//! Robots.txt generation.
2//!
3//! Generates the robots.txt file for search engine crawlers.
4
5use std::{fs::File, io::Write, path::Path};
6
7use thiserror::Error;
8use tracing::info;
9use typstify_core::Config;
10
11/// Robots generation errors.
12#[derive(Debug, Error)]
13pub enum RobotsError {
14    /// IO error.
15    #[error("IO error: {0}")]
16    Io(#[from] std::io::Error),
17}
18
19/// Result type for robots generation.
20pub type Result<T> = std::result::Result<T, RobotsError>;
21
22/// Robots.txt generator.
23#[derive(Debug)]
24pub struct RobotsGenerator {
25    config: Config,
26}
27
28impl RobotsGenerator {
29    /// Create a new robots generator.
30    #[must_use]
31    pub fn new(config: Config) -> Self {
32        Self { config }
33    }
34
35    /// Generate robots.txt.
36    pub fn generate(&self, output_dir: &Path) -> Result<()> {
37        if !self.config.robots.enabled {
38            return Ok(());
39        }
40
41        info!("generating robots.txt");
42
43        let path = output_dir.join("robots.txt");
44        let mut file = File::create(path)?;
45
46        writeln!(file, "User-agent: *")?;
47
48        for path in &self.config.robots.disallow {
49            writeln!(file, "Disallow: {path}")?;
50        }
51
52        for path in &self.config.robots.allow {
53            writeln!(file, "Allow: {path}")?;
54        }
55
56        // Add sitemap reference if configured (defaulting to sitemap.xml in root)
57        let sitemap_url = format!("{}/sitemap.xml", self.config.site.base_url);
58        writeln!(file, "Sitemap: {sitemap_url}")?;
59
60        Ok(())
61    }
62}