robotparser_fork/model/
fetched_robots_txt.rs

1use crate::model::robots_txt::RobotsTxt;
2use std::time::SystemTime;
3
4#[derive(Debug, Clone)]
5pub(crate) enum FetchedRobotsTxtContainer {
6    FetchDenied,
7    FetchFailed,
8    Fetched(RobotsTxt),
9}
10
11#[derive(Debug, Clone)]
12/// A model of the robots.txt file that was downloaded over the network.
13/// This model takes into account HTTP response codes when loading the robots.txt file.
14/// To work with this model you should use the trait `robotparser::service::RobotsTxtService`.
15/// To create this structure you should use the `robotparser::parser::parse_fetched_robots_txt`.
16pub struct FetchedRobotsTxt {
17    fetched_at: SystemTime,
18    container: FetchedRobotsTxtContainer,
19}
20
21impl FetchedRobotsTxt {
22    pub(crate) fn new(container: FetchedRobotsTxtContainer) -> FetchedRobotsTxt {
23        FetchedRobotsTxt {
24            fetched_at: SystemTime::now(),
25            container,
26        }
27    }
28    pub(crate) fn get_container(&self) -> &FetchedRobotsTxtContainer {
29        &self.container
30    }
31
32    /// Returns the system time when the robots.txt file was downloaded over the network.
33    pub fn get_fetched_at(&self) -> &SystemTime {
34        &self.fetched_at
35    }
36}