[[bin]]
name = "llm-link"
path = "src/main.rs"
[dependencies.anyhow]
version = "1.0"
[dependencies.axum]
version = "0.7"
[dependencies.chrono]
features = ["serde"]
version = "0.4"
[dependencies.clap]
features = ["derive"]
version = "4.0"
[dependencies.futures]
version = "0.3"
[dependencies.futures-util]
version = "0.3"
[dependencies.llm-connector]
features = ["streaming"]
version = "0.5.1"
[dependencies.regex]
version = "1.0"
[dependencies.reqwest]
features = ["json", "stream"]
version = "0.11"
[dependencies.serde]
features = ["derive"]
version = "1.0"
[dependencies.serde_json]
version = "1.0"
[dependencies.serde_yaml]
version = "0.9"
[dependencies.thiserror]
version = "1.0"
[dependencies.tokio]
features = ["full"]
version = "1.0"
[dependencies.tokio-stream]
version = "0.1"
[dependencies.tower]
version = "0.4"
[dependencies.tower-http]
features = ["cors", "trace"]
version = "0.5"
[dependencies.tracing]
version = "0.1"
[dependencies.tracing-subscriber]
features = ["env-filter"]
version = "0.3"
[dependencies.uuid]
features = ["v4"]
version = "1.0"
[dev-dependencies.tempfile]
version = "3.0"
[lib]
name = "llm_link"
path = "src/lib.rs"
[package]
authors = ["LLM Link Contributors"]
autobenches = false
autobins = false
autoexamples = false
autolib = false
autotests = false
build = false
categories = ["command-line-utilities", "web-programming", "api-bindings"]
description = "A universal LLM proxy supporting 7 providers (OpenAI, Anthropic, Zhipu, Aliyun, Volcengine, Tencent, Ollama) with Ollama-compatible API"
documentation = "https://github.com/lipish/llm-link/blob/master/README.md"
edition = "2021"
exclude = ["tests/*", "keys.yaml", ".gitignore", ".git/*"]
homepage = "https://github.com/lipish/llm-link"
keywords = ["llm", "proxy", "ollama", "openai", "anthropic"]
license = "MIT"
name = "llm-link"
readme = "README.md"
repository = "https://github.com/lipish/llm-link"
version = "0.2.2"