//! Built-in LLM provider implementations.
//!
//! Each provider implements the `LlmProvider` + `LlmProviderOps` traits from
//! `super::provider`. The three main families are:
//!
//! - **Anthropic** — Claude models via the Anthropic Messages API
//! - **OpenAI-compatible** — OpenAI, OpenRouter, Together, Groq, DeepSeek,
//! Fireworks, HuggingFace, local vLLM/SGLang servers, etc.
//! - **Ollama** — local Ollama server with NDJSON streaming
//! - **Mock** — deterministic test responses without any network I/O
pub
pub
pub use AnthropicProvider;
pub use MockProvider;
pub use OllamaProvider;
pub use OpenAiCompatibleProvider;