multi_llm/providers/
mod.rs

1//! LLM Provider implementations
2//!
3//! This module contains implementations for different LLM providers:
4//!
5//! - **anthropic**: Anthropic Claude provider with native API format
6//! - **openai**: OpenAI provider using OpenAI-compatible API
7//! - **lmstudio**: LM Studio provider using OpenAI-compatible API
8//! - **openai_shared**: Shared structures and utilities for OpenAI-compatible providers
9//!
10//! ## Architecture
11//!
12//! The providers are organized to highlight code reuse:
13//!
14//! ```text
15//! openai_shared.rs    <- Shared OpenAI-compatible structures and utilities
16//!      |        |        |
17//!      |        |        |
18//! openai.rs  lmstudio.rs  ollama.rs  <- All use OpenAI-compatible API
19//!
20//! anthropic.rs        <- Uses Anthropic's native API format
21//! ```
22
23pub mod anthropic;
24pub mod lmstudio;
25pub mod ollama;
26pub mod openai;
27pub mod openai_shared;
28
29// Tests will be rewritten following the research checklist and unit test template
30// to test the current LlmProvider trait API (execute_llm, not execute_chat_with_model)
31#[cfg(test)]
32mod tests;
33
34// Re-export the provider structs
35pub use anthropic::AnthropicProvider;
36pub use lmstudio::LMStudioProvider;
37pub use ollama::OllamaProvider;
38pub use openai::OpenAIProvider;