Expand description
§LLMG Providers
LLM provider implementations for the LLMG ecosystem. Each module contains a
client that implements the llmg_core::provider::Provider trait.
§Usage
use llmg_providers::openai::OpenAiClient;
use llmg_core::provider::Provider;
let client = OpenAiClient::from_env().unwrap();Re-exports§
pub use ai21::Ai21Client;pub use aiml::AimlClient;pub use aleph_alpha::AlephAlphaClient;pub use anthropic::AnthropicClient;pub use antigravity::AntigravityClient;pub use anyscale::AnyscaleClient;pub use apertis_ai::ApertisAiClient;pub use aws_sagemaker::AwsSagemakerClient;pub use azure::AzureOpenAiClient;pub use azure_ai::AzureAiClient;pub use bedrock::BedrockClient;pub use cerebras::CerebrasClient;pub use chatjimmy::ChatJimmyClient;pub use chutes::ChutesClient;pub use cohere::CohereClient;pub use comet::CometClient;pub use compactifai::CompactifAiClient;pub use custom_llm_server::CustomLlmServerClient;pub use deepgram::DeepgramClient;pub use deepinfra::DeepInfraClient;pub use deepseek::DeepseekClient;pub use docker_runner::DockerRunnerClient;pub use elevenlabs::ElevenLabsClient;pub use fal_ai::FalAiClient;pub use featherless_ai::FeatherlessAiClient;pub use firecrawl::FirecrawlClient;pub use fireworks_ai::FireworksAiClient;pub use friendliai::FriendliaiClient;pub use github_copilot::GitHubCopilotClient;pub use groq::GroqClient;pub use helicone::HeliconeClient;pub use heroku::HerokuClient;pub use huggingface::HuggingFaceClient;pub use hyperbolic::HyperbolicClient;pub use infinity::InfinityClient;pub use jina::JinaClient;pub use langgraph::LangGraphClient;pub use litellm_proxy::LitellmProxyClient;pub use llamafile::LlamafileClient;pub use lm_studio::LmStudioClient;pub use meta_llama::MetaLlamaClient;pub use milvus::MilvusClient;pub use minimax::MiniMaxClient;pub use mistral::MistralClient;pub use nano_gpt::NanoGptClient;pub use nscale::NscaleClient;pub use octoai::OctoAiClient;pub use ollama::OllamaClient;pub use oobabooga::OobaboogaClient;pub use openai::OpenAiClient;pub use openrouter::OpenRouterClient;pub use perplexity::PerplexityClient;pub use petals::PetalsClient;pub use poe::PoeClient;pub use polly::PollyClient;pub use publicai::PublicAiClient;pub use pydantic_ai_agent::PydanticAiAgentClient;pub use runway::RunwayClient;pub use sambanova::SambaNovaClient;pub use stability::StabilityAiClient;pub use synthetic::SyntheticClient;pub use together_ai::TogetherAiClient;pub use triton::TritonClient;pub use v0::V0Client;pub use vertex_ai::VertexAiClient;pub use vllm::VllmClient;pub use volcano::VolcanoClient;pub use voyageai::VoyageaiClient;pub use watsonx::WatsonxClient;pub use xai::XaiClient;pub use xinference::XinferenceClient;pub use z_ai::ZaiClient;
Modules§
- ai21
- aiml
- aleph_
alpha - anthropic
- antigravity
- Google Antigravity API client for LLMG
- anyscale
- apertis_
ai - aws_
sagemaker - azure
- azure_
ai - bedrock
- cerebras
- chatjimmy
- ChatJimmy API client for LLMG
- chutes
- cohere
- Cohere API client for LLMG
- comet
- compactifai
- custom_
llm_ server - Custom LLM Server provider for LLMG
- deepgram
- deepinfra
- deepseek
- docker_
runner - Docker Model Runner provider for LLMG
- elevenlabs
- fal_ai
- featherless_
ai - firecrawl
- fireworks_
ai - friendliai
- github_
copilot - GitHub Copilot API client for LLMG
- groq
- helicone
- heroku
- huggingface
- hyperbolic
- infinity
- jina
- langgraph
- litellm_
proxy - LiteLLM Proxy client for LLMG
- llamafile
- Llamafile local LLM provider for LLMG
- lm_
studio - LM Studio local LLM provider for LLMG
- meta_
llama - milvus
- minimax
- mistral
- nano_
gpt - nscale
- octoai
- ollama
- oobabooga
- oobabooga (Text Generation WebUI) local LLM provider for LLMG
- openai
- openrouter
- OpenRouter API client for LLMG
- perplexity
- petals
- Petals decentralized LLM provider for LLMG
- poe
- polly
- publicai
- pydantic_
ai_ agent - runway
- sambanova
- stability
- synthetic
- together_
ai - triton
- Triton Inference Server provider for LLMG
- utils
- v0
- vertex_
ai - vllm
- vLLM local LLM provider for LLMG
- volcano
- voyageai
- watsonx
- xai
- xinference
- z_ai