autoagents_llm/
builder.rs

1//! Builder module for configuring and instantiating LLM providers.
2//!
3//! This module provides a flexible builder pattern for creating and configuring
4//! LLM (Large Language Model) provider instances with various settings and options.
5
6use crate::{
7    chat::{FunctionTool, ParameterProperty, ParametersSchema, ReasoningEffort, Tool, ToolChoice},
8    error::LLMError,
9    LLMProvider,
10};
11use std::{collections::HashMap, marker::PhantomData};
12
13/// A function type for validating LLM provider outputs.
14/// Takes a response string and returns Ok(()) if valid, or Err with an error message if invalid.
15pub type ValidatorFn = dyn Fn(&str) -> Result<(), String> + Send + Sync + 'static;
16
17/// Supported LLM backend providers.
18#[derive(Debug, Clone)]
19pub enum LLMBackend {
20    /// OpenAI API provider (GPT-3, GPT-4, etc.)
21    OpenAI,
22    /// Anthropic API provider (Claude models)
23    Anthropic,
24    /// Ollama local LLM provider for self-hosted models
25    Ollama,
26    /// DeepSeek API provider for their LLM models
27    DeepSeek,
28    /// X.AI (formerly Twitter) API provider
29    XAI,
30    /// Phind API provider for code-specialized models
31    Phind,
32    /// Google Gemini API provider
33    Google,
34    /// Groq API provider
35    Groq,
36    /// Azure OpenAI API provider
37    AzureOpenAI,
38}
39
40/// Implements string parsing for LLMBackend enum.
41///
42/// Converts a string representation of a backend provider name into the corresponding
43/// LLMBackend variant. The parsing is case-insensitive.
44///
45/// # Arguments
46///
47/// * `s` - The string to parse
48///
49/// # Returns
50///
51/// * `Ok(LLMBackend)` - The corresponding backend variant if valid
52/// * `Err(LLMError)` - An error if the string doesn't match any known backend
53///
54/// # Examples
55///
56/// ```
57/// use std::str::FromStr;
58/// use autoagents_llm::builder::LLMBackend;
59///
60/// let backend = LLMBackend::from_str("openai").unwrap();
61/// assert!(matches!(backend, LLMBackend::OpenAI));
62///
63/// let err = LLMBackend::from_str("invalid").unwrap_err();
64/// assert!(err.to_string().contains("Unknown LLM backend"));
65/// ```
66impl std::str::FromStr for LLMBackend {
67    type Err = LLMError;
68
69    fn from_str(s: &str) -> Result<Self, Self::Err> {
70        match s.to_lowercase().as_str() {
71            "openai" => Ok(LLMBackend::OpenAI),
72            "anthropic" => Ok(LLMBackend::Anthropic),
73            "ollama" => Ok(LLMBackend::Ollama),
74            "deepseek" => Ok(LLMBackend::DeepSeek),
75            "xai" => Ok(LLMBackend::XAI),
76            "phind" => Ok(LLMBackend::Phind),
77            "google" => Ok(LLMBackend::Google),
78            "groq" => Ok(LLMBackend::Groq),
79            "azure-openai" => Ok(LLMBackend::AzureOpenAI),
80            _ => Err(LLMError::InvalidRequest(format!(
81                "Unknown LLM backend: {s}"
82            ))),
83        }
84    }
85}
86
87/// Builder for configuring and instantiating LLM providers.
88///
89/// Provides a fluent interface for setting various configuration options
90/// like model selection, API keys, generation parameters, etc.
91pub struct LLMBuilder<L: LLMProvider> {
92    /// Selected backend provider
93    pub(crate) backend: PhantomData<L>,
94    /// API key for authentication with the provider
95    pub(crate) api_key: Option<String>,
96    /// Base URL for API requests (primarily for self-hosted instances)
97    pub(crate) base_url: Option<String>,
98    /// Model identifier/name to use
99    pub(crate) model: Option<String>,
100    /// Model abstraction for edge inference backends
101    #[cfg(feature = "liquid_edge")]
102    pub(crate) edge_model: Option<Box<dyn liquid_edge::Model>>,
103    /// Device for edge inference backends
104    #[cfg(feature = "liquid_edge")]
105    pub(crate) edge_device: Option<liquid_edge::Device>,
106    /// Maximum tokens to generate in responses
107    pub(crate) max_tokens: Option<u32>,
108    /// Temperature parameter for controlling response randomness (0.0-1.0)
109    pub(crate) temperature: Option<f32>,
110    /// System prompt/context to guide model behavior
111    pub(crate) system: Option<String>,
112    /// Request timeout duration in seconds
113    pub(crate) timeout_seconds: Option<u64>,
114    /// Top-p (nucleus) sampling parameter
115    pub(crate) top_p: Option<f32>,
116    /// Top-k sampling parameter
117    pub(crate) top_k: Option<u32>,
118    /// Format specification for embedding outputs
119    pub(crate) embedding_encoding_format: Option<String>,
120    /// Vector dimensions for embedding outputs
121    pub(crate) embedding_dimensions: Option<u32>,
122    /// Optional validation function for response content
123    pub(crate) validator: Option<Box<ValidatorFn>>,
124    /// Number of retry attempts when validation fails
125    pub(crate) validator_attempts: usize,
126    /// Tool choice
127    pub(crate) tool_choice: Option<ToolChoice>,
128    /// Enable parallel tool use
129    pub(crate) enable_parallel_tool_use: Option<bool>,
130    /// Enable reasoning
131    pub(crate) reasoning: Option<bool>,
132    /// Enable reasoning effort
133    pub(crate) reasoning_effort: Option<String>,
134    /// reasoning_budget_tokens
135    pub(crate) reasoning_budget_tokens: Option<u32>,
136    /// API Version
137    pub(crate) api_version: Option<String>,
138    /// Deployment Id
139    pub(crate) deployment_id: Option<String>,
140    /// Voice
141    #[allow(dead_code)]
142    pub(crate) voice: Option<String>,
143}
144
145impl<L: LLMProvider> Default for LLMBuilder<L> {
146    fn default() -> Self {
147        Self {
148            backend: PhantomData,
149            api_key: None,
150            base_url: None,
151            model: None,
152            #[cfg(feature = "liquid_edge")]
153            edge_model: None,
154            #[cfg(feature = "liquid_edge")]
155            edge_device: None,
156            max_tokens: None,
157            temperature: None,
158            system: None,
159            timeout_seconds: None,
160            top_p: None,
161            top_k: None,
162            embedding_encoding_format: None,
163            embedding_dimensions: None,
164            validator: None,
165            validator_attempts: 0,
166            tool_choice: None,
167            enable_parallel_tool_use: None,
168            reasoning: None,
169            reasoning_effort: None,
170            reasoning_budget_tokens: None,
171            api_version: None,
172            deployment_id: None,
173            voice: None,
174        }
175    }
176}
177
178impl<L: LLMProvider> LLMBuilder<L> {
179    /// Creates a new empty builder instance with default values.
180    pub fn new() -> Self {
181        Self::default()
182    }
183
184    /// Sets the API key for authentication.
185    pub fn api_key(mut self, key: impl Into<String>) -> Self {
186        self.api_key = Some(key.into());
187        self
188    }
189
190    /// Sets the base URL for API requests.
191    pub fn base_url(mut self, url: impl Into<String>) -> Self {
192        self.base_url = Some(url.into());
193        self
194    }
195
196    /// Sets the model identifier to use.
197    pub fn model(mut self, model: impl Into<String>) -> Self {
198        self.model = Some(model.into());
199        self
200    }
201
202    /// Sets the edge model for local inference backends.
203    /// This method accepts any type that implements the Model trait.
204    #[cfg(feature = "liquid_edge")]
205    pub fn with_model<M: liquid_edge::Model + 'static>(mut self, model: M) -> Self {
206        self.edge_model = Some(Box::new(model));
207        self
208    }
209
210    /// Sets the device for edge inference backends.
211    #[cfg(feature = "liquid_edge")]
212    pub fn with_device(mut self, device: liquid_edge::Device) -> Self {
213        self.edge_device = Some(device);
214        self
215    }
216
217    /// Sets the maximum number of tokens to generate.
218    pub fn max_tokens(mut self, max_tokens: u32) -> Self {
219        self.max_tokens = Some(max_tokens);
220        self
221    }
222
223    /// Sets the temperature for controlling response randomness (0.0-1.0).
224    pub fn temperature(mut self, temperature: f32) -> Self {
225        self.temperature = Some(temperature);
226        self
227    }
228
229    /// Sets the system prompt/context.
230    pub fn system(mut self, system: impl Into<String>) -> Self {
231        self.system = Some(system.into());
232        self
233    }
234
235    /// Sets the reasoning flag.
236    pub fn reasoning_effort(mut self, reasoning_effort: ReasoningEffort) -> Self {
237        self.reasoning_effort = Some(reasoning_effort.to_string());
238        self
239    }
240
241    /// Sets the reasoning flag.
242    pub fn reasoning(mut self, reasoning: bool) -> Self {
243        self.reasoning = Some(reasoning);
244        self
245    }
246
247    /// Sets the reasoning budget tokens.
248    pub fn reasoning_budget_tokens(mut self, reasoning_budget_tokens: u32) -> Self {
249        self.reasoning_budget_tokens = Some(reasoning_budget_tokens);
250        self
251    }
252
253    /// Sets the request timeout in seconds.
254    pub fn timeout_seconds(mut self, timeout_seconds: u64) -> Self {
255        self.timeout_seconds = Some(timeout_seconds);
256        self
257    }
258
259    /// Sets the top-p (nucleus) sampling parameter.
260    pub fn top_p(mut self, top_p: f32) -> Self {
261        self.top_p = Some(top_p);
262        self
263    }
264
265    /// Sets the top-k sampling parameter.
266    pub fn top_k(mut self, top_k: u32) -> Self {
267        self.top_k = Some(top_k);
268        self
269    }
270
271    /// Sets the encoding format for embeddings.
272    pub fn embedding_encoding_format(
273        mut self,
274        embedding_encoding_format: impl Into<String>,
275    ) -> Self {
276        self.embedding_encoding_format = Some(embedding_encoding_format.into());
277        self
278    }
279
280    /// Sets the dimensions for embeddings.
281    pub fn embedding_dimensions(mut self, embedding_dimensions: u32) -> Self {
282        self.embedding_dimensions = Some(embedding_dimensions);
283        self
284    }
285
286    /// Sets a validation function to verify LLM responses.
287    ///
288    /// # Arguments
289    ///
290    /// * `f` - Function that takes a response string and returns Ok(()) if valid, or Err with error message if invalid
291    pub fn validator<F>(mut self, f: F) -> Self
292    where
293        F: Fn(&str) -> Result<(), String> + Send + Sync + 'static,
294    {
295        self.validator = Some(Box::new(f));
296        self
297    }
298
299    /// Sets the number of retry attempts for validation failures.
300    ///
301    /// # Arguments
302    ///
303    /// * `attempts` - Maximum number of times to retry generating a valid response
304    pub fn validator_attempts(mut self, attempts: usize) -> Self {
305        self.validator_attempts = attempts;
306        self
307    }
308
309    /// Enable parallel tool use
310    pub fn enable_parallel_tool_use(mut self, enable: bool) -> Self {
311        self.enable_parallel_tool_use = Some(enable);
312        self
313    }
314
315    /// Set tool choice.  Note that if the choice is given as Tool(name), and that
316    /// tool isn't available, the builder will fail.
317    pub fn tool_choice(mut self, choice: ToolChoice) -> Self {
318        self.tool_choice = Some(choice);
319        self
320    }
321
322    /// Explicitly disable the use of tools, even if they are provided.
323    pub fn disable_tools(mut self) -> Self {
324        self.tool_choice = Some(ToolChoice::None);
325        self
326    }
327
328    /// Set the API version.
329    pub fn api_version(mut self, api_version: impl Into<String>) -> Self {
330        self.api_version = Some(api_version.into());
331        self
332    }
333
334    /// Set the deployment id. Used in Azure OpenAI.
335    pub fn deployment_id(mut self, deployment_id: impl Into<String>) -> Self {
336        self.deployment_id = Some(deployment_id.into());
337        self
338    }
339}
340
341/// Builder for function parameters
342#[allow(dead_code)]
343pub struct ParamBuilder {
344    name: String,
345    property_type: String,
346    description: String,
347    items: Option<Box<ParameterProperty>>,
348    enum_list: Option<Vec<String>>,
349}
350
351impl ParamBuilder {
352    /// Creates a new parameter builder
353    pub fn new(name: impl Into<String>) -> Self {
354        Self {
355            name: name.into(),
356            property_type: "string".to_string(),
357            description: String::new(),
358            items: None,
359            enum_list: None,
360        }
361    }
362
363    /// Sets the parameter type
364    pub fn type_of(mut self, type_str: impl Into<String>) -> Self {
365        self.property_type = type_str.into();
366        self
367    }
368
369    /// Sets the parameter description
370    pub fn description(mut self, desc: impl Into<String>) -> Self {
371        self.description = desc.into();
372        self
373    }
374
375    /// Sets the array item type for array parameters
376    pub fn items(mut self, item_property: ParameterProperty) -> Self {
377        self.items = Some(Box::new(item_property));
378        self
379    }
380
381    /// Sets the enum values for enum parameters
382    pub fn enum_values(mut self, values: Vec<String>) -> Self {
383        self.enum_list = Some(values);
384        self
385    }
386
387    /// Builds the parameter property
388    #[allow(dead_code)]
389    fn build(self) -> (String, ParameterProperty) {
390        (
391            self.name,
392            ParameterProperty {
393                property_type: self.property_type,
394                description: self.description,
395                items: self.items,
396                enum_list: self.enum_list,
397            },
398        )
399    }
400}
401
402/// Builder for function tools
403#[allow(dead_code)]
404pub struct FunctionBuilder {
405    name: String,
406    description: String,
407    parameters: Vec<ParamBuilder>,
408    required: Vec<String>,
409    raw_schema: Option<serde_json::Value>,
410}
411
412impl FunctionBuilder {
413    /// Creates a new function builder
414    pub fn new(name: impl Into<String>) -> Self {
415        Self {
416            name: name.into(),
417            description: String::new(),
418            parameters: Vec::new(),
419            required: Vec::new(),
420            raw_schema: None,
421        }
422    }
423
424    /// Sets the function description
425    pub fn description(mut self, desc: impl Into<String>) -> Self {
426        self.description = desc.into();
427        self
428    }
429
430    /// Adds a parameter to the function
431    pub fn param(mut self, param: ParamBuilder) -> Self {
432        self.parameters.push(param);
433        self
434    }
435
436    /// Marks parameters as required
437    pub fn required(mut self, param_names: Vec<String>) -> Self {
438        self.required = param_names;
439        self
440    }
441
442    /// Provides a full JSON Schema for the parameters.  Using this method
443    /// bypasses the DSL and allows arbitrary complex schemas (nested arrays,
444    /// objects, oneOf, etc.).
445    pub fn json_schema(mut self, schema: serde_json::Value) -> Self {
446        self.raw_schema = Some(schema);
447        self
448    }
449
450    /// Builds the function tool
451    #[allow(dead_code)]
452    fn build(self) -> Tool {
453        let parameters_value = if let Some(schema) = self.raw_schema {
454            schema
455        } else {
456            let mut properties = HashMap::new();
457            for param in self.parameters {
458                let (name, prop) = param.build();
459                properties.insert(name, prop);
460            }
461
462            serde_json::to_value(ParametersSchema {
463                schema_type: "object".to_string(),
464                properties,
465                required: self.required,
466            })
467            .unwrap_or_else(|_| serde_json::Value::Object(serde_json::Map::new()))
468        };
469
470        Tool {
471            tool_type: "function".to_string(),
472            function: FunctionTool {
473                name: self.name,
474                description: self.description,
475                parameters: parameters_value,
476            },
477        }
478    }
479}
480
481#[cfg(test)]
482mod tests {
483    use super::*;
484    use crate::chat::{ChatMessage, ChatResponse, StructuredOutputFormat};
485    use crate::error::LLMError;
486    use serde_json::json;
487    use std::str::FromStr;
488
489    #[test]
490    fn test_llm_backend_from_str() {
491        assert!(matches!(
492            LLMBackend::from_str("openai").unwrap(),
493            LLMBackend::OpenAI
494        ));
495        assert!(matches!(
496            LLMBackend::from_str("OpenAI").unwrap(),
497            LLMBackend::OpenAI
498        ));
499        assert!(matches!(
500            LLMBackend::from_str("OPENAI").unwrap(),
501            LLMBackend::OpenAI
502        ));
503        assert!(matches!(
504            LLMBackend::from_str("anthropic").unwrap(),
505            LLMBackend::Anthropic
506        ));
507        assert!(matches!(
508            LLMBackend::from_str("ollama").unwrap(),
509            LLMBackend::Ollama
510        ));
511        assert!(matches!(
512            LLMBackend::from_str("deepseek").unwrap(),
513            LLMBackend::DeepSeek
514        ));
515        assert!(matches!(
516            LLMBackend::from_str("xai").unwrap(),
517            LLMBackend::XAI
518        ));
519        assert!(matches!(
520            LLMBackend::from_str("phind").unwrap(),
521            LLMBackend::Phind
522        ));
523        assert!(matches!(
524            LLMBackend::from_str("google").unwrap(),
525            LLMBackend::Google
526        ));
527        assert!(matches!(
528            LLMBackend::from_str("groq").unwrap(),
529            LLMBackend::Groq
530        ));
531        assert!(matches!(
532            LLMBackend::from_str("azure-openai").unwrap(),
533            LLMBackend::AzureOpenAI
534        ));
535
536        let result = LLMBackend::from_str("invalid");
537        assert!(result.is_err());
538        assert!(result
539            .unwrap_err()
540            .to_string()
541            .contains("Unknown LLM backend"));
542    }
543
544    #[test]
545    fn test_param_builder_new() {
546        let builder = ParamBuilder::new("test_param");
547        assert_eq!(builder.name, "test_param");
548        assert_eq!(builder.property_type, "string");
549        assert_eq!(builder.description, "");
550        assert!(builder.items.is_none());
551        assert!(builder.enum_list.is_none());
552    }
553
554    #[test]
555    fn test_param_builder_fluent_interface() {
556        let builder = ParamBuilder::new("test_param")
557            .type_of("integer")
558            .description("A test parameter")
559            .enum_values(vec!["option1".to_string(), "option2".to_string()]);
560
561        assert_eq!(builder.name, "test_param");
562        assert_eq!(builder.property_type, "integer");
563        assert_eq!(builder.description, "A test parameter");
564        assert_eq!(
565            builder.enum_list,
566            Some(vec!["option1".to_string(), "option2".to_string()])
567        );
568    }
569
570    #[test]
571    fn test_param_builder_with_items() {
572        let item_property = ParameterProperty {
573            property_type: "string".to_string(),
574            description: "Array item".to_string(),
575            items: None,
576            enum_list: None,
577        };
578
579        let builder = ParamBuilder::new("array_param")
580            .type_of("array")
581            .description("An array parameter")
582            .items(item_property);
583
584        assert_eq!(builder.name, "array_param");
585        assert_eq!(builder.property_type, "array");
586        assert_eq!(builder.description, "An array parameter");
587        assert!(builder.items.is_some());
588    }
589
590    #[test]
591    fn test_param_builder_build() {
592        let builder = ParamBuilder::new("test_param")
593            .type_of("string")
594            .description("A test parameter");
595
596        let (name, property) = builder.build();
597        assert_eq!(name, "test_param");
598        assert_eq!(property.property_type, "string");
599        assert_eq!(property.description, "A test parameter");
600    }
601
602    #[test]
603    fn test_function_builder_new() {
604        let builder = FunctionBuilder::new("test_function");
605        assert_eq!(builder.name, "test_function");
606        assert_eq!(builder.description, "");
607        assert!(builder.parameters.is_empty());
608        assert!(builder.required.is_empty());
609        assert!(builder.raw_schema.is_none());
610    }
611
612    #[test]
613    fn test_function_builder_fluent_interface() {
614        let param = ParamBuilder::new("name")
615            .type_of("string")
616            .description("Name");
617        let builder = FunctionBuilder::new("test_function")
618            .description("A test function")
619            .param(param)
620            .required(vec!["name".to_string()]);
621
622        assert_eq!(builder.name, "test_function");
623        assert_eq!(builder.description, "A test function");
624        assert_eq!(builder.parameters.len(), 1);
625        assert_eq!(builder.required, vec!["name".to_string()]);
626    }
627
628    #[test]
629    fn test_function_builder_with_json_schema() {
630        let schema = json!({
631            "type": "object",
632            "properties": {
633                "name": {"type": "string"},
634                "age": {"type": "integer"}
635            },
636            "required": ["name", "age"]
637        });
638
639        let builder = FunctionBuilder::new("test_function").json_schema(schema.clone());
640        assert_eq!(builder.raw_schema, Some(schema));
641    }
642
643    #[test]
644    fn test_function_builder_build_with_parameters() {
645        let param = ParamBuilder::new("name").type_of("string");
646        let tool = FunctionBuilder::new("test_function")
647            .description("A test function")
648            .param(param)
649            .required(vec!["name".to_string()])
650            .build();
651
652        assert_eq!(tool.tool_type, "function");
653        assert_eq!(tool.function.name, "test_function");
654        assert_eq!(tool.function.description, "A test function");
655        assert!(tool.function.parameters.is_object());
656    }
657
658    #[test]
659    fn test_function_builder_build_with_raw_schema() {
660        let schema = json!({
661            "type": "object",
662            "properties": {
663                "name": {"type": "string"}
664            }
665        });
666
667        let tool = FunctionBuilder::new("test_function")
668            .json_schema(schema.clone())
669            .build();
670
671        assert_eq!(tool.function.parameters, schema);
672    }
673
674    // Mock LLM provider for testing
675    struct MockLLMProvider;
676
677    #[async_trait::async_trait]
678    impl crate::chat::ChatProvider for MockLLMProvider {
679        async fn chat(
680            &self,
681            _messages: &[ChatMessage],
682            _tools: Option<&[Tool]>,
683            _json_schema: Option<StructuredOutputFormat>,
684        ) -> Result<Box<dyn ChatResponse>, LLMError> {
685            unimplemented!()
686        }
687    }
688
689    #[async_trait::async_trait]
690    impl crate::completion::CompletionProvider for MockLLMProvider {
691        async fn complete(
692            &self,
693            _req: &crate::completion::CompletionRequest,
694            _json_schema: Option<crate::chat::StructuredOutputFormat>,
695        ) -> Result<crate::completion::CompletionResponse, LLMError> {
696            unimplemented!()
697        }
698    }
699
700    #[async_trait::async_trait]
701    impl crate::embedding::EmbeddingProvider for MockLLMProvider {
702        async fn embed(&self, _text: Vec<String>) -> Result<Vec<Vec<f32>>, LLMError> {
703            unimplemented!()
704        }
705    }
706
707    #[async_trait::async_trait]
708    impl crate::models::ModelsProvider for MockLLMProvider {}
709
710    impl crate::LLMProvider for MockLLMProvider {}
711
712    #[test]
713    fn test_llm_builder_new() {
714        let builder = LLMBuilder::<MockLLMProvider>::new();
715        assert!(builder.api_key.is_none());
716        assert!(builder.base_url.is_none());
717        assert!(builder.model.is_none());
718        assert!(builder.max_tokens.is_none());
719        assert!(builder.temperature.is_none());
720        assert!(builder.system.is_none());
721        assert!(builder.timeout_seconds.is_none());
722        assert!(builder.tool_choice.is_none());
723    }
724
725    #[test]
726    fn test_llm_builder_default() {
727        let builder = LLMBuilder::<MockLLMProvider>::default();
728        assert!(builder.api_key.is_none());
729        assert!(builder.base_url.is_none());
730        assert!(builder.model.is_none());
731        assert_eq!(builder.validator_attempts, 0);
732    }
733
734    #[test]
735    fn test_llm_builder_api_key() {
736        let builder = LLMBuilder::<MockLLMProvider>::new().api_key("test_key");
737        assert_eq!(builder.api_key, Some("test_key".to_string()));
738    }
739
740    #[test]
741    fn test_llm_builder_base_url() {
742        let builder = LLMBuilder::<MockLLMProvider>::new().base_url("https://api.example.com");
743        assert_eq!(
744            builder.base_url,
745            Some("https://api.example.com".to_string())
746        );
747    }
748
749    #[test]
750    fn test_llm_builder_model() {
751        let builder = LLMBuilder::<MockLLMProvider>::new().model("gpt-4");
752        assert_eq!(builder.model, Some("gpt-4".to_string()));
753    }
754
755    #[test]
756    fn test_llm_builder_max_tokens() {
757        let builder = LLMBuilder::<MockLLMProvider>::new().max_tokens(1000);
758        assert_eq!(builder.max_tokens, Some(1000));
759    }
760
761    #[test]
762    fn test_llm_builder_temperature() {
763        let builder = LLMBuilder::<MockLLMProvider>::new().temperature(0.7);
764        assert_eq!(builder.temperature, Some(0.7));
765    }
766
767    #[test]
768    fn test_llm_builder_system() {
769        let builder = LLMBuilder::<MockLLMProvider>::new().system("You are a helpful assistant");
770        assert_eq!(
771            builder.system,
772            Some("You are a helpful assistant".to_string())
773        );
774    }
775
776    #[test]
777    fn test_llm_builder_reasoning_effort() {
778        let builder = LLMBuilder::<MockLLMProvider>::new()
779            .reasoning_effort(crate::chat::ReasoningEffort::High);
780        assert_eq!(builder.reasoning_effort, Some("high".to_string()));
781    }
782
783    #[test]
784    fn test_llm_builder_reasoning() {
785        let builder = LLMBuilder::<MockLLMProvider>::new().reasoning(true);
786        assert_eq!(builder.reasoning, Some(true));
787    }
788
789    #[test]
790    fn test_llm_builder_reasoning_budget_tokens() {
791        let builder = LLMBuilder::<MockLLMProvider>::new().reasoning_budget_tokens(5000);
792        assert_eq!(builder.reasoning_budget_tokens, Some(5000));
793    }
794
795    #[test]
796    fn test_llm_builder_timeout_seconds() {
797        let builder = LLMBuilder::<MockLLMProvider>::new().timeout_seconds(30);
798        assert_eq!(builder.timeout_seconds, Some(30));
799    }
800
801    #[test]
802    fn test_llm_builder_top_p() {
803        let builder = LLMBuilder::<MockLLMProvider>::new().top_p(0.9);
804        assert_eq!(builder.top_p, Some(0.9));
805    }
806
807    #[test]
808    fn test_llm_builder_top_k() {
809        let builder = LLMBuilder::<MockLLMProvider>::new().top_k(50);
810        assert_eq!(builder.top_k, Some(50));
811    }
812
813    #[test]
814    fn test_llm_builder_embedding_encoding_format() {
815        let builder = LLMBuilder::<MockLLMProvider>::new().embedding_encoding_format("float");
816        assert_eq!(builder.embedding_encoding_format, Some("float".to_string()));
817    }
818
819    #[test]
820    fn test_llm_builder_embedding_dimensions() {
821        let builder = LLMBuilder::<MockLLMProvider>::new().embedding_dimensions(1536);
822        assert_eq!(builder.embedding_dimensions, Some(1536));
823    }
824
825    #[test]
826    fn test_llm_builder_validator() {
827        let builder = LLMBuilder::<MockLLMProvider>::new().validator(|response| {
828            if response.contains("error") {
829                Err("Response contains error".to_string())
830            } else {
831                Ok(())
832            }
833        });
834        assert!(builder.validator.is_some());
835    }
836
837    #[test]
838    fn test_llm_builder_validator_attempts() {
839        let builder = LLMBuilder::<MockLLMProvider>::new().validator_attempts(3);
840        assert_eq!(builder.validator_attempts, 3);
841    }
842
843    #[test]
844    fn test_llm_builder_enable_parallel_tool_use() {
845        let builder = LLMBuilder::<MockLLMProvider>::new().enable_parallel_tool_use(true);
846        assert_eq!(builder.enable_parallel_tool_use, Some(true));
847    }
848
849    #[test]
850    fn test_llm_builder_tool_choice() {
851        let builder = LLMBuilder::<MockLLMProvider>::new().tool_choice(ToolChoice::Auto);
852        assert!(matches!(builder.tool_choice, Some(ToolChoice::Auto)));
853    }
854
855    #[test]
856    fn test_llm_builder_disable_tools() {
857        let builder = LLMBuilder::<MockLLMProvider>::new().disable_tools();
858        assert!(matches!(builder.tool_choice, Some(ToolChoice::None)));
859    }
860
861    #[test]
862    fn test_llm_builder_api_version() {
863        let builder = LLMBuilder::<MockLLMProvider>::new().api_version("2023-05-15");
864        assert_eq!(builder.api_version, Some("2023-05-15".to_string()));
865    }
866
867    #[test]
868    fn test_llm_builder_deployment_id() {
869        let builder = LLMBuilder::<MockLLMProvider>::new().deployment_id("my-deployment");
870        assert_eq!(builder.deployment_id, Some("my-deployment".to_string()));
871    }
872
873    #[test]
874    fn test_llm_builder_chaining() {
875        let builder = LLMBuilder::<MockLLMProvider>::new()
876            .api_key("test_key")
877            .model("gpt-4")
878            .max_tokens(2000)
879            .temperature(0.8)
880            .system("You are helpful")
881            .timeout_seconds(60)
882            .top_p(0.95)
883            .top_k(40)
884            .embedding_encoding_format("float")
885            .embedding_dimensions(1536)
886            .validator_attempts(5)
887            .reasoning(true)
888            .reasoning_budget_tokens(10000)
889            .api_version("2023-05-15")
890            .deployment_id("test-deployment");
891
892        assert_eq!(builder.api_key, Some("test_key".to_string()));
893        assert_eq!(builder.model, Some("gpt-4".to_string()));
894        assert_eq!(builder.max_tokens, Some(2000));
895        assert_eq!(builder.temperature, Some(0.8));
896        assert_eq!(builder.system, Some("You are helpful".to_string()));
897        assert_eq!(builder.timeout_seconds, Some(60));
898        assert_eq!(builder.top_p, Some(0.95));
899        assert_eq!(builder.top_k, Some(40));
900        assert_eq!(builder.embedding_encoding_format, Some("float".to_string()));
901        assert_eq!(builder.embedding_dimensions, Some(1536));
902        assert_eq!(builder.validator_attempts, 5);
903        assert_eq!(builder.reasoning, Some(true));
904        assert_eq!(builder.reasoning_budget_tokens, Some(10000));
905        assert_eq!(builder.api_version, Some("2023-05-15".to_string()));
906        assert_eq!(builder.deployment_id, Some("test-deployment".to_string()));
907    }
908}