[package]
edition = "2024"
rust-version = "1.85.0"
name = "llm-stack-openai"
version = "0.5.0"
authors = ["nazq"]
build = false
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "OpenAI GPT provider for the llm-stack SDK"
homepage = "https://github.com/nazq-org/llm-stack"
documentation = "https://docs.rs/llm-stack-openai"
readme = "README.md"
keywords = [
"llm",
"ai",
"anthropic",
"openai",
"ollama",
]
categories = [
"api-bindings",
"asynchronous",
]
license = "Apache-2.0"
repository = "https://github.com/nazq-org/llm-stack"
resolver = "2"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = [
"--cfg",
"docsrs",
]
[lib]
name = "llm_stack_openai"
path = "src/lib.rs"
[[test]]
name = "integration"
path = "tests/integration.rs"
[dependencies.futures]
version = "0.3"
[dependencies.http]
version = "1"
[dependencies.llm-stack]
version = "0.5"
[dependencies.reqwest]
version = "0.12"
features = [
"json",
"stream",
"rustls-tls",
]
default-features = false
[dependencies.serde]
version = "1"
features = ["derive"]
[dependencies.serde_json]
version = "1"
[dependencies.tracing]
version = "0.1"
[dev-dependencies.tokio]
version = "1"
features = [
"macros",
"rt-multi-thread",
]
[lints.clippy]
missing_errors_doc = "allow"
missing_panics_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
[lints.clippy.all]
level = "warn"
priority = -1
[lints.clippy.pedantic]
level = "warn"
priority = -1