robotxt 0.6.1

The implementation of the Robots.txt (or URL exclusion) protocol with the support of crawl-delay, sitemap and universal match extensions.
Documentation
[dependencies.bstr]
optional = true
version = "1.9.1"

[dependencies.nom]
optional = true
version = "7.1.3"

[dependencies.percent-encoding]
version = "2.3.1"

[dependencies.regex]
optional = true
version = "1.10.3"

[dependencies.serde]
optional = true
version = "1.0.197"

[dependencies.thiserror]
version = "1.0.57"

[dependencies.url]
version = "2.5.0"
[dev-dependencies.serde_json]
version = "1.0.114"

[features]
builder = []
default = ["builder", "parser"]
full = ["builder", "parser", "optimal", "serde"]
optimal = []
parser = ["dep:nom", "dep:bstr", "dep:regex"]
serde = ["dep:serde", "url/serde", "serde/derive", "serde/rc"]

[lib]
path = "./lib.rs"

[package]
authors = ["Oleh Martsokha <o.martsokha@gmail.com>"]
categories = ["asynchronous", "web-programming"]
description = "The implementation of the Robots.txt (or URL exclusion) protocol with\nthe support of crawl-delay, sitemap and universal match extensions.\n"
documentation = "https://docs.rs/robotxt"
edition = "2021"
homepage = "https://github.com/spire-rs/kit/exclusion"
keywords = ["crawler", "scraper", "web", "framework"]
license = "MIT"
name = "robotxt"
readme = "./README.md"
repository = "https://github.com/spire-rs/kit/exclusion"
version = "0.6.1"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]