robotxt 0.5.0

The implementation of the Robots.txt (or URL exclusion) protocol with the support of crawl-delay, sitemap and universal match extensions.
Documentation
#![forbid(unsafe_code)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("./README.md")]

#[derive(Debug, thiserror::Error)]
pub enum Error {
    ///
    #[error("url parsing error: {0}")]
    Url(#[from] url::ParseError),
}

mod paths;
pub use paths::*;

#[cfg(feature = "builder")]
#[cfg_attr(docsrs, doc(cfg(feature = "builder")))]
mod build;
#[cfg(feature = "builder")]
pub use build::*;

#[cfg(feature = "parser")]
#[cfg_attr(docsrs, doc(cfg(feature = "parser")))]
mod parse;
#[cfg(feature = "parser")]
pub use parse::*;

// Re-exports
pub use url;

#[doc(hidden)]
pub mod prelude {
    pub use super::paths::*;
    pub use super::Error;

    #[cfg(feature = "builder")]
    pub use super::build::*;
    #[cfg(feature = "parser")]
    pub use super::parse::*;
}