pub struct LlmConfig {
pub provider: LlmProviderType,
pub endpoint: String,
pub model: String,
pub api_key: Option<String>,
pub timeout_secs: u64,
}Expand description
Configuration for connecting to an LLM endpoint.
Fields§
§provider: LlmProviderType§endpoint: String§model: String§api_key: Option<String>§timeout_secs: u64Implementations§
Source§impl LlmConfig
impl LlmConfig
Sourcepub fn from_args(
provider: &str,
endpoint: Option<&str>,
model: Option<&str>,
api_key: Option<&str>,
timeout: u64,
) -> Self
pub fn from_args( provider: &str, endpoint: Option<&str>, model: Option<&str>, api_key: Option<&str>, timeout: u64, ) -> Self
Build configuration from CLI arguments with sensible defaults.
- Ollama default endpoint:
http://localhost:11434, model:llama3.2 - OpenAI-compatible default endpoint:
http://localhost:1234, model:default
Trait Implementations§
Auto Trait Implementations§
impl Freeze for LlmConfig
impl RefUnwindSafe for LlmConfig
impl Send for LlmConfig
impl Sync for LlmConfig
impl Unpin for LlmConfig
impl UnsafeUnpin for LlmConfig
impl UnwindSafe for LlmConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more