webpuppet 0.1.5-alpha

Web browser programmatic automation and control library for research, testing, and workflow automation
Documentation
[package]
name = "webpuppet"
version = "0.1.5-alpha"
edition = "2021"
rust-version = "1.75.0"
authors = ["Tyler Zervas <tz-dev@vectorweight.com>"]
description = "Web browser programmatic automation and control library for research, testing, and workflow automation"
license = "MIT"
repository = "https://github.com/tzervas/webpuppet-rs"
documentation = "https://docs.rs/webpuppet"
readme = "README.md"
keywords = ["browser-automation", "ai", "puppeteer", "web-scraping"]
categories = ["web-programming", "development-tools"]

[features]
default = ["chromium", "all-providers"]
chromium = ["dep:chromiumoxide"]
firefox = ["dep:fantoccini"]
# Enable specific providers
grok = []
claude = []
gemini = []
chatgpt = []
perplexity = []
notebooklm = []
kaggle = []
# Provider groups
all-providers = ["grok", "claude", "gemini", "chatgpt", "perplexity", "notebooklm", "kaggle"]
core-providers = ["grok", "claude", "gemini", "chatgpt"]
search-providers = ["grok", "perplexity", "chatgpt"]
secrecy = []
# TLS backend selection
rustls-tls = ["reqwest/rustls-tls"]

[dependencies]
# Browser automation - chromiumoxide is pure Rust, async-first
chromiumoxide = { version = "^0.7", default-features = false, features = ["tokio-runtime"], optional = true }
fantoccini = { version = "^0.21", optional = true }

# Async runtime
tokio = { version = "^1.49", features = ["full"] }
futures = "^0.3"

# HTTP and networking - use rustls to avoid OpenSSL dep
reqwest = { version = "^0.12", default-features = false, features = ["json", "cookies", "rustls-tls"] }

# Serialization
serde = { version = "^1.0", features = ["derive"] }
serde_json = "^1.0"
toml = "^0.8"
humantime-serde = "^1.1"

# Error handling
thiserror = "^2.0"
anyhow = "^1.0"

# Logging
tracing = "^0.1"

# Security - credential handling  
keyring = { version = "^3.6", optional = true }
secrecy = { version = "^0.8" }

# Rate limiting
parking_lot = "^0.12"

# HTML parsing for response extraction
scraper = "^0.25"

# Regex for security screening
regex = "^1.12"

# Async utilities
async-trait = "^0.1"

# Time handling
chrono = { version = "^0.4", features = ["serde"] }

# Utilities
uuid = { version = "^1.19", features = ["v4"] }
dirs = "^5.0"
fastrand = "^2.3"
urlencoding = "^2.1"
url = "^2.5"
aes-gcm = "^0.10"
pbkdf2 = "^0.12"
chacha20poly1305 = "^0.10"
sha2 = "^0.10"
rand = "^0.9"
base64 = "^0.22"

[dev-dependencies]
tokio-test = "^0.4"
tempfile = "^3.24"
anyhow = "^1.0"
tracing-subscriber = "^0.3"

[[example]]
name = "research_runner"
required-features = ["all-providers"]