ferryllm 0.1.4

Universal LLM protocol middleware for OpenAI, Anthropic, Claude Code, and OpenAI-compatible backends.
Documentation
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.

[package]
edition = "2021"
name = "ferryllm"
version = "0.1.4"
build = false
exclude = [
    ".claude/",
    ".codex/",
    ".agents/",
    "target/",
    "session-export.json",
]
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Universal LLM protocol middleware for OpenAI, Anthropic, Claude Code, and OpenAI-compatible backends."
homepage = "https://github.com/caomengxuan666/ferryllm"
documentation = "https://docs.rs/ferryllm"
readme = "README.md"
keywords = [
    "llm",
    "openai",
    "anthropic",
    "proxy",
    "gateway",
]
categories = [
    "web-programming::http-server",
    "api-bindings",
    "command-line-utilities",
]
license = "MIT"
repository = "https://github.com/caomengxuan666/ferryllm"

[features]
default = [
    "http",
    "prompt-observability",
    "openai-responses",
]
http = []
openai-responses = []
prompt-observability = ["dep:tiktoken-rs"]

[lib]
name = "ferryllm"
path = "src/lib.rs"

[[bin]]
name = "ferryllm"
path = "src/bin/ferryllm.rs"

[[example]]
name = "basic_server"
path = "examples/basic_server.rs"

[[example]]
name = "codexapis_server"
path = "examples/codexapis_server.rs"

[[example]]
name = "codexapis_test"
path = "examples/codexapis_test.rs"

[[example]]
name = "full_test"
path = "examples/full_test.rs"

[[example]]
name = "load_test"
path = "examples/load_test.rs"

[[example]]
name = "mock_openai_upstream"
path = "examples/mock_openai_upstream.rs"

[dependencies.async-trait]
version = "0.1"

[dependencies.axum]
version = "0.8"
features = ["macros"]

[dependencies.futures]
version = "0.3"

[dependencies.reqwest]
version = "0.12"
features = [
    "json",
    "stream",
    "rustls-tls",
]
default-features = false

[dependencies.serde]
version = "1"
features = ["derive"]

[dependencies.serde_json]
version = "1"

[dependencies.thiserror]
version = "2"

[dependencies.tiktoken-rs]
version = "0.11"
optional = true

[dependencies.tokio]
version = "1"
features = ["full"]

[dependencies.tokio-stream]
version = "0.1"

[dependencies.toml]
version = "0.9"

[dependencies.tracing]
version = "0.1"

[dependencies.tracing-subscriber]
version = "0.3"
features = [
    "env-filter",
    "fmt",
    "json",
]