walrus_model/lib.rs
1//! Model crate — LLM provider implementations, enum dispatch, configuration,
2//! construction, and runtime management.
3//!
4//! Merges all provider backends (OpenAI, Claude, Local) with the `Provider`
5//! enum, `ProviderManager`, and `ProviderConfig` into a single crate. Config
6//! uses `ApiStandard` (OpenAI or Anthropic) to select the API protocol.
7//! All OpenAI-compatible providers route through the OpenAI backend.
8
9pub mod config;
10pub mod manager;
11mod provider;
12
13#[path = "../remote/mod.rs"]
14pub mod remote;
15
16#[cfg(feature = "local")]
17#[path = "../local/mod.rs"]
18pub mod local;
19
20/// Default model name when none is configured.
21///
22/// When the `local` feature is enabled, uses the platform-optimal model
23/// from the built-in registry. Otherwise falls back to `"deepseek-chat"`.
24pub fn default_model() -> &'static str {
25 #[cfg(feature = "local")]
26 {
27 local::registry::default_model().model_id
28 }
29 #[cfg(not(feature = "local"))]
30 {
31 "deepseek-chat"
32 }
33}
34
35pub use config::{ApiStandard, HfModelConfig, ModelConfig, ProviderConfig};
36pub use manager::ProviderManager;
37pub use provider::{Provider, build_provider};
38pub use reqwest::Client;