robotxt 0.4.6

The implementation of the Robots.txt (or URL exclusion) protocol with the support of crawl-delay, sitemap and universal match extensions.
Documentation
#![forbid(unsafe_code)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("./README.md")]

mod paths;
pub use paths::*;

#[cfg(feature = "builder")]
#[cfg_attr(docsrs, doc(cfg(feature = "builder")))]
mod build;
#[cfg(feature = "builder")]
pub use build::*;

#[cfg(feature = "parser")]
#[cfg_attr(docsrs, doc(cfg(feature = "parser")))]
mod parse;
#[cfg(feature = "parser")]
pub use parse::*;

// Re-exports
pub use url;