[package]
edition = "2021"
name = "lmkit"
version = "0.1.0"
authors = ["Zoranner"]
build = false
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Multi-provider AI API client (OpenAI, Anthropic, Google Gemini, Aliyun, Ollama, Zhipu; chat, embed incl. Gemini, rerank, image, audio stubs)"
readme = "README.md"
license = "MIT"
[features]
aliyun = []
all-modalities = [
"chat",
"embed",
"rerank",
"image",
"audio",
]
all-providers = [
"aliyun",
"anthropic",
"google",
"ollama",
"openai",
"zhipu",
]
anthropic = []
audio = []
chat = []
default = [
"openai",
"chat",
"embed",
]
embed = []
full = [
"all-providers",
"all-modalities",
]
google = []
image = []
ollama = []
openai = []
rerank = []
zhipu = []
[lib]
name = "lmkit"
path = "src/lib.rs"
[[example]]
name = "stream_chat"
path = "examples/stream_chat.rs"
[dependencies.async-trait]
version = "0.1"
[dependencies.base64]
version = "0.22"
[dependencies.bytes]
version = "1"
[dependencies.futures]
version = "0.3"
[dependencies.regex]
version = "1"
[dependencies.reqwest]
version = "0.12"
features = [
"json",
"rustls-tls",
"stream",
]
default-features = false
[dependencies.serde]
version = "1"
features = ["derive"]
[dependencies.serde_json]
version = "1"
[dependencies.thiserror]
version = "2"
[dependencies.tracing]
version = "0.1"
[dev-dependencies.tokio]
version = "1"
features = [
"rt-multi-thread",
"macros",
]
[dev-dependencies.wiremock]
version = "0.6"