chat2response 0.1.1

Translate and proxy OpenAI Chat Completions requests to the Responses API.
Documentation
[package]
name = "chat2response"
version = "0.1.1"
edition = "2021"
description = "Translate and proxy OpenAI Chat Completions requests to the Responses API."
license = "Apache-2.0"
readme = "README.md"
homepage = "https://github.com/labiium/chat2response"
repository = "https://github.com/labiium/chat2response"
authors = ["Chat2Response Contributors"]
categories = ["api-bindings", "web-programming::http-server"]
keywords = ["openai", "responses", "chat-completions", "proxy", "converter"]


[lib]
name = "chat2response"
path = "src/lib.rs"

[[bin]]
name = "chat2response"
path = "src/main.rs"


[dependencies]
axum = { version = "0.7", features = ["macros", "json"] }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "process", "io-util"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_with = "3"
thiserror = "1"
tower = "0.5"
tower-http = { version = "0.5", features = ["cors", "trace"] }
uuid = { version = "1", features = ["v4", "serde"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
once_cell = "1"
dotenvy = "0.15"
anyhow = "1"

rand = "0.8"

# HTTP client
reqwest = { version = "0.12", features = [
    "stream",
    "json",
    "gzip",
    "deflate",
    "brotli",
] }

# MCP client (simplified implementation)
async-trait = "0.1"

# Networking and HTTP primitives
hyper = { version = "1", features = ["server", "http1", "http2"] }
http = "1"
bytes = "1"
futures-util = "0.3"
sled = "0.34"
r2d2 = "0.8"
redis = "0.24"


[dev-dependencies]
# nothing additional yet; tests use the main crate and serde_json which is already a dependency

[features]
default = ["sled"]
sled = []