pub struct LlmConfig {
pub provider: String,
pub model: String,
pub system_prompt: Option<String>,
pub prompt_template: String,
pub temperature: Option<f64>,
pub max_tokens: Option<u32>,
pub tools: Vec<Value>,
pub images: Vec<Value>,
pub extra_params: Value,
}Expand description
Configuration for LLM nodes
Fields§
§provider: StringProvider (e.g., “openai”, “anthropic”, “local”)
model: StringModel identifier (e.g., “gpt-4”, “claude-3-opus”)
system_prompt: Option<String>System prompt template
prompt_template: StringUser prompt template (can reference previous node outputs)
temperature: Option<f64>Temperature for sampling (0.0 - 2.0)
max_tokens: Option<u32>Maximum tokens to generate
tools: Vec<Value>Tools/functions available for the LLM to call
images: Vec<Value>Images for vision models (multimodal input)
extra_params: ValueAdditional provider-specific parameters
Trait Implementations§
Source§impl<'de> Deserialize<'de> for LlmConfig
impl<'de> Deserialize<'de> for LlmConfig
Source§fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Auto Trait Implementations§
impl Freeze for LlmConfig
impl RefUnwindSafe for LlmConfig
impl Send for LlmConfig
impl Sync for LlmConfig
impl Unpin for LlmConfig
impl UnwindSafe for LlmConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more