pub struct LlmConfig {
pub default_provider: Option<String>,
pub openai: Option<OpenAIConfig>,
pub llama: Option<LlamaConfig>,
pub default_model: Option<String>,
pub default_temperature: f32,
pub default_max_tokens: Option<u32>,
}Expand description
LLM provider configuration
Fields§
§default_provider: Option<String>Default provider to use
openai: Option<OpenAIConfig>OpenAI configuration
llama: Option<LlamaConfig>Llama.cpp configuration
default_model: Option<String>Default model name
default_temperature: f32Default temperature
default_max_tokens: Option<u32>Default max tokens
Trait Implementations§
Source§impl<'de> Deserialize<'de> for LlmConfig
impl<'de> Deserialize<'de> for LlmConfig
Source§fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Auto Trait Implementations§
impl Freeze for LlmConfig
impl RefUnwindSafe for LlmConfig
impl Send for LlmConfig
impl Sync for LlmConfig
impl Unpin for LlmConfig
impl UnwindSafe for LlmConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more