multi-llm 1.0.0

Unified multi-provider LLM client with support for OpenAI, Anthropic, Ollama, and LMStudio
Documentation
[dependencies.anyhow]
version = "1.0"

[dependencies.async-trait]
version = "0.1"

[dependencies.backoff]
features = ["tokio"]
version = "0.4"

[dependencies.chrono]
features = ["serde"]
version = "0.4"

[dependencies.fastrand]
version = "2.0"

[dependencies.futures-util]
version = "0.3"

[dependencies.governor]
version = "0.10"

[dependencies.once_cell]
version = "1.20"

[dependencies.regex]
version = "1.11"

[dependencies.reqwest]
features = ["json", "stream", "rustls-tls"]
version = "~0.12.22"

[dependencies.serde]
features = ["derive"]
version = "1.0"

[dependencies.serde_json]
version = "1.0"

[dependencies.thiserror]
version = "2.0"

[dependencies.tiktoken-rs]
version = "0.9.1"

[dependencies.tokio]
features = ["full"]
version = "1"

[dependencies.tokio-stream]
version = "0.1"

[dependencies.tower]
features = ["timeout", "limit"]
version = "0.5"

[dependencies.tracing]
version = "0.1"

[dependencies.uuid]
features = ["v4", "serde"]
version = "1.11"

[dev-dependencies.mockall]
version = "0.13"

[dev-dependencies.serial_test]
version = "3.2"

[dev-dependencies.tokio]
features = ["full", "test-util"]
version = "1"

[dev-dependencies.wiremock]
version = "0.6"

[[example]]
name = "basic_anthropic"
path = "examples/basic_anthropic.rs"

[[example]]
name = "basic_lmstudio"
path = "examples/basic_lmstudio.rs"

[[example]]
name = "basic_ollama"
path = "examples/basic_ollama.rs"

[[example]]
name = "basic_openai"
path = "examples/basic_openai.rs"

[[example]]
name = "error_handling"
path = "examples/error_handling.rs"

[[example]]
name = "multi_instance"
path = "examples/multi_instance.rs"

[[example]]
name = "prompt_caching"
path = "examples/prompt_caching.rs"

[[example]]
name = "provider_switching"
path = "examples/provider_switching.rs"

[[example]]
name = "tool_calling"
path = "examples/tool_calling.rs"

[features]
default = []
events = []

[lib]
name = "multi_llm"
path = "src/lib.rs"

[package]
authors = ["Rick Duff <rgduff@gmail.com>"]
autobenches = false
autobins = false
autoexamples = false
autolib = false
autotests = false
build = false
categories = ["api-bindings", "asynchronous"]
description = "Unified multi-provider LLM client with support for OpenAI, Anthropic, Ollama, and LMStudio"
documentation = "https://docs.rs/multi-llm"
edition = "2021"
keywords = ["llm", "openai", "anthropic", "ai", "unified"]
license = "Apache-2.0"
name = "multi-llm"
readme = "README.md"
repository = "https://github.com/darval/multi-llm"
rust-version = "1.75"
version = "1.0.0"

[[test]]
name = "anthropic_provider_integration_tests"
path = "tests/anthropic_provider_integration_tests.rs"

[[test]]
name = "common"
path = "tests/common.rs"

[[test]]
name = "http_client_integration_tests"
path = "tests/http_client_integration_tests.rs"

[[test]]
name = "lmstudio_provider_integration_tests"
path = "tests/lmstudio_provider_integration_tests.rs"

[[test]]
name = "ollama_provider_integration_tests"
path = "tests/ollama_provider_integration_tests.rs"

[[test]]
name = "openai_provider_integration_tests"
path = "tests/openai_provider_integration_tests.rs"

[[test]]
name = "provider_trait_compliance_tests"
path = "tests/provider_trait_compliance_tests.rs"

[[test]]
name = "token_integration_tests"
path = "tests/token_integration_tests.rs"