Skip to main content

autoagents_llm/
builder.rs

1//! Builder module for configuring and instantiating LLM providers.
2//!
3//! This module provides a flexible builder pattern for creating and configuring
4//! LLM (Large Language Model) provider instances with various settings and options.
5
6use crate::{
7    HasConfig, LLMProvider,
8    chat::{FunctionTool, ParameterProperty, ParametersSchema, ReasoningEffort, Tool, ToolChoice},
9    error::LLMError,
10};
11use std::{collections::HashMap, marker::PhantomData};
12
13/// A function type for validating LLM provider outputs.
14/// Takes a response string and returns Ok(()) if valid, or Err with an error message if invalid.
15pub type ValidatorFn = dyn Fn(&str) -> Result<(), String> + Send + Sync + 'static;
16
17/// Supported LLM backend providers.
18#[derive(Debug, Clone)]
19pub enum LLMBackend {
20    /// OpenAI API provider (GPT-3, GPT-4, etc.)
21    OpenAI,
22    /// Anthropic API provider (Claude models)
23    Anthropic,
24    /// Ollama local LLM provider for self-hosted models
25    Ollama,
26    /// DeepSeek API provider for their LLM models
27    DeepSeek,
28    /// X.AI (formerly Twitter) API provider
29    XAI,
30    /// Phind API provider for code-specialized models
31    Phind,
32    /// Google Gemini API provider
33    Google,
34    /// Groq API provider
35    Groq,
36    /// Azure OpenAI API provider
37    AzureOpenAI,
38    /// OpenRouter API provider for various models
39    OpenRouter,
40    /// MiniMax API provider
41    MiniMax,
42}
43
44/// Implements string parsing for LLMBackend enum.
45///
46/// Converts a string representation of a backend provider name into the corresponding
47/// LLMBackend variant. The parsing is case-insensitive.
48///
49/// # Arguments
50///
51/// * `s` - The string to parse
52///
53/// # Returns
54///
55/// * `Ok(LLMBackend)` - The corresponding backend variant if valid
56/// * `Err(LLMError)` - An error if the string doesn't match any known backend
57///
58/// # Examples
59///
60/// ```
61/// use std::str::FromStr;
62/// use autoagents_llm::builder::LLMBackend;
63///
64/// let backend = LLMBackend::from_str("openai").unwrap();
65/// assert!(matches!(backend, LLMBackend::OpenAI));
66///
67/// let err = LLMBackend::from_str("invalid").unwrap_err();
68/// assert!(err.to_string().contains("Unknown LLM backend"));
69/// ```
70impl std::str::FromStr for LLMBackend {
71    type Err = LLMError;
72
73    fn from_str(s: &str) -> Result<Self, Self::Err> {
74        match s.to_lowercase().as_str() {
75            "openai" => Ok(LLMBackend::OpenAI),
76            "anthropic" => Ok(LLMBackend::Anthropic),
77            "ollama" => Ok(LLMBackend::Ollama),
78            "deepseek" => Ok(LLMBackend::DeepSeek),
79            "xai" => Ok(LLMBackend::XAI),
80            "phind" => Ok(LLMBackend::Phind),
81            "google" => Ok(LLMBackend::Google),
82            "groq" => Ok(LLMBackend::Groq),
83            "azure-openai" => Ok(LLMBackend::AzureOpenAI),
84            "openrouter" => Ok(LLMBackend::OpenRouter),
85            "minimax" => Ok(LLMBackend::MiniMax),
86            _ => Err(LLMError::InvalidRequest(format!(
87                "Unknown LLM backend: {s}"
88            ))),
89        }
90    }
91}
92
93impl std::fmt::Display for LLMBackend {
94    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
95        let s = match self {
96            LLMBackend::OpenAI => "openai",
97            LLMBackend::Anthropic => "anthropic",
98            LLMBackend::Ollama => "ollama",
99            LLMBackend::DeepSeek => "deepseek",
100            LLMBackend::XAI => "xai",
101            LLMBackend::Phind => "phind",
102            LLMBackend::Google => "google",
103            LLMBackend::Groq => "groq",
104            LLMBackend::AzureOpenAI => "azure-openai",
105            LLMBackend::OpenRouter => "openrouter",
106            LLMBackend::MiniMax => "minimax",
107        };
108        f.write_str(s)
109    }
110}
111
112/// Builder for configuring and instantiating LLM providers.
113///
114/// Provides a fluent interface for setting various configuration options
115/// like model selection, API keys, generation parameters, etc.
116pub struct LLMBuilder<L: LLMProvider + HasConfig> {
117    /// Selected backend provider
118    pub(crate) backend: PhantomData<L>,
119    /// API key for authentication with the provider
120    pub(crate) api_key: Option<String>,
121    /// Base URL for API requests (primarily for self-hosted instances)
122    pub(crate) base_url: Option<String>,
123    /// Model identifier/name to use
124    pub model: Option<String>,
125    /// Maximum tokens to generate in responses
126    pub max_tokens: Option<u32>,
127    /// Temperature parameter for controlling response randomness (0.0-1.0)
128    pub temperature: Option<f32>,
129    /// Request timeout duration in seconds
130    pub(crate) timeout_seconds: Option<u64>,
131    /// Top-p (nucleus) sampling parameter
132    pub top_p: Option<f32>,
133    /// Top-k sampling parameter
134    pub(crate) top_k: Option<u32>,
135    /// Format specification for embedding outputs
136    pub(crate) embedding_encoding_format: Option<String>,
137    /// Vector dimensions for embedding outputs
138    pub(crate) embedding_dimensions: Option<u32>,
139    /// Optional validation function for response content
140    pub(crate) validator: Option<Box<ValidatorFn>>,
141    /// Number of retry attempts when validation fails
142    pub(crate) validator_attempts: usize,
143    /// Tool choice
144    pub(crate) tool_choice: Option<ToolChoice>,
145    /// Enable parallel tool use
146    pub(crate) enable_parallel_tool_use: Option<bool>,
147    /// Enable reasoning
148    pub(crate) reasoning: Option<bool>,
149    /// Enable reasoning effort
150    pub(crate) reasoning_effort: Option<String>,
151    /// reasoning_budget_tokens
152    pub(crate) reasoning_budget_tokens: Option<u32>,
153    /// API Version
154    pub(crate) api_version: Option<String>,
155    /// Deployment Id
156    pub(crate) deployment_id: Option<String>,
157    /// Whether to normalize response format
158    pub(crate) normalize_response: Option<bool>,
159    /// ExtraBody
160    pub(crate) extra_body: Option<serde_json::Value>,
161    /// Provider-specific configuration
162    pub config: L::Config,
163}
164
165impl<L: LLMProvider + HasConfig> Default for LLMBuilder<L> {
166    fn default() -> Self {
167        Self {
168            backend: PhantomData,
169            api_key: None,
170            base_url: None,
171            model: None,
172            max_tokens: None,
173            temperature: None,
174            timeout_seconds: None,
175            top_p: None,
176            top_k: None,
177            embedding_encoding_format: None,
178            embedding_dimensions: None,
179            validator: None,
180            validator_attempts: 0,
181            tool_choice: None,
182            enable_parallel_tool_use: None,
183            reasoning: None,
184            reasoning_effort: None,
185            reasoning_budget_tokens: None,
186            api_version: None,
187            deployment_id: None,
188            normalize_response: Some(true), //Defaulting so it accumilates tool calls in streams, easy for agent handling
189            extra_body: None,
190            config: L::Config::default(),
191        }
192    }
193}
194
195impl<L: LLMProvider + HasConfig> LLMBuilder<L> {
196    /// Creates a new empty builder instance with default values.
197    pub fn new() -> Self {
198        Self::default()
199    }
200
201    /// Sets the API key for authentication.
202    pub fn api_key(mut self, key: impl Into<String>) -> Self {
203        self.api_key = Some(key.into());
204        self
205    }
206
207    /// Sets the base URL for API requests.
208    pub fn base_url(mut self, url: impl Into<String>) -> Self {
209        self.base_url = Some(url.into());
210        self
211    }
212
213    /// Sets the model identifier to use.
214    pub fn model(mut self, model: impl Into<String>) -> Self {
215        self.model = Some(model.into());
216        self
217    }
218
219    /// Sets the maximum number of tokens to generate.
220    pub fn max_tokens(mut self, max_tokens: u32) -> Self {
221        self.max_tokens = Some(max_tokens);
222        self
223    }
224
225    /// Sets the request timeout in seconds.
226    pub fn normalize_response(mut self, normalize_response: bool) -> Self {
227        self.normalize_response = Some(normalize_response);
228        self
229    }
230
231    /// Sets the temperature for controlling response randomness (0.0-1.0).
232    pub fn temperature(mut self, temperature: f32) -> Self {
233        self.temperature = Some(temperature);
234        self
235    }
236
237    /// Sets the reasoning flag.
238    pub fn reasoning_effort(mut self, reasoning_effort: ReasoningEffort) -> Self {
239        self.reasoning_effort = Some(reasoning_effort.to_string());
240        self
241    }
242
243    /// Sets the reasoning flag.
244    pub fn reasoning(mut self, reasoning: bool) -> Self {
245        self.reasoning = Some(reasoning);
246        self
247    }
248
249    /// Sets the reasoning budget tokens.
250    pub fn reasoning_budget_tokens(mut self, reasoning_budget_tokens: u32) -> Self {
251        self.reasoning_budget_tokens = Some(reasoning_budget_tokens);
252        self
253    }
254
255    /// Sets the request timeout in seconds.
256    pub fn timeout_seconds(mut self, timeout_seconds: u64) -> Self {
257        self.timeout_seconds = Some(timeout_seconds);
258        self
259    }
260
261    /// Sets the top-p (nucleus) sampling parameter.
262    pub fn top_p(mut self, top_p: f32) -> Self {
263        self.top_p = Some(top_p);
264        self
265    }
266
267    /// Sets the top-k sampling parameter.
268    pub fn top_k(mut self, top_k: u32) -> Self {
269        self.top_k = Some(top_k);
270        self
271    }
272
273    /// Sets the encoding format for embeddings.
274    pub fn embedding_encoding_format(
275        mut self,
276        embedding_encoding_format: impl Into<String>,
277    ) -> Self {
278        self.embedding_encoding_format = Some(embedding_encoding_format.into());
279        self
280    }
281
282    /// Sets the dimensions for embeddings.
283    pub fn embedding_dimensions(mut self, embedding_dimensions: u32) -> Self {
284        self.embedding_dimensions = Some(embedding_dimensions);
285        self
286    }
287
288    /// Sets a validation function to verify LLM responses.
289    ///
290    /// # Arguments
291    ///
292    /// * `f` - Function that takes a response string and returns Ok(()) if valid, or Err with error message if invalid
293    pub fn validator<F>(mut self, f: F) -> Self
294    where
295        F: Fn(&str) -> Result<(), String> + Send + Sync + 'static,
296    {
297        self.validator = Some(Box::new(f));
298        self
299    }
300
301    /// Sets the number of retry attempts for validation failures.
302    ///
303    /// # Arguments
304    ///
305    /// * `attempts` - Maximum number of times to retry generating a valid response
306    pub fn validator_attempts(mut self, attempts: usize) -> Self {
307        self.validator_attempts = attempts;
308        self
309    }
310
311    /// Enable parallel tool use
312    pub fn enable_parallel_tool_use(mut self, enable: bool) -> Self {
313        self.enable_parallel_tool_use = Some(enable);
314        self
315    }
316
317    /// Set tool choice.  Note that if the choice is given as Tool(name), and that
318    /// tool isn't available, the builder will fail.
319    pub fn tool_choice(mut self, choice: ToolChoice) -> Self {
320        self.tool_choice = Some(choice);
321        self
322    }
323
324    /// Explicitly disable the use of tools, even if they are provided.
325    pub fn disable_tools(mut self) -> Self {
326        self.tool_choice = Some(ToolChoice::None);
327        self
328    }
329
330    /// Set the API version.
331    pub fn api_version(mut self, api_version: impl Into<String>) -> Self {
332        self.api_version = Some(api_version.into());
333        self
334    }
335
336    /// Set the deployment id. Used in Azure OpenAI.
337    pub fn deployment_id(mut self, deployment_id: impl Into<String>) -> Self {
338        self.deployment_id = Some(deployment_id.into());
339        self
340    }
341
342    pub fn extra_body(mut self, extra_body: impl serde::Serialize) -> Self {
343        let value = serde_json::to_value(extra_body).ok();
344        self.extra_body = value;
345        self
346    }
347}
348
349/// Builder for function parameters
350#[allow(dead_code)]
351pub struct ParamBuilder {
352    name: String,
353    property_type: String,
354    description: String,
355    items: Option<Box<ParameterProperty>>,
356    enum_list: Option<Vec<String>>,
357}
358
359impl ParamBuilder {
360    /// Creates a new parameter builder
361    pub fn new(name: impl Into<String>) -> Self {
362        Self {
363            name: name.into(),
364            property_type: "string".to_string(),
365            description: String::default(),
366            items: None,
367            enum_list: None,
368        }
369    }
370
371    /// Sets the parameter type
372    pub fn type_of(mut self, type_str: impl Into<String>) -> Self {
373        self.property_type = type_str.into();
374        self
375    }
376
377    /// Sets the parameter description
378    pub fn description(mut self, desc: impl Into<String>) -> Self {
379        self.description = desc.into();
380        self
381    }
382
383    /// Sets the array item type for array parameters
384    pub fn items(mut self, item_property: ParameterProperty) -> Self {
385        self.items = Some(Box::new(item_property));
386        self
387    }
388
389    /// Sets the enum values for enum parameters
390    pub fn enum_values(mut self, values: Vec<String>) -> Self {
391        self.enum_list = Some(values);
392        self
393    }
394
395    /// Builds the parameter property
396    #[allow(dead_code)]
397    fn build(self) -> (String, ParameterProperty) {
398        (
399            self.name,
400            ParameterProperty {
401                property_type: self.property_type,
402                description: self.description,
403                items: self.items,
404                enum_list: self.enum_list,
405            },
406        )
407    }
408}
409
410/// Builder for function tools
411#[allow(dead_code)]
412pub struct FunctionBuilder {
413    name: String,
414    description: String,
415    parameters: Vec<ParamBuilder>,
416    required: Vec<String>,
417    raw_schema: Option<serde_json::Value>,
418}
419
420impl FunctionBuilder {
421    /// Creates a new function builder
422    pub fn new(name: impl Into<String>) -> Self {
423        Self {
424            name: name.into(),
425            description: String::default(),
426            parameters: Vec::default(),
427            required: Vec::default(),
428            raw_schema: None,
429        }
430    }
431
432    /// Sets the function description
433    pub fn description(mut self, desc: impl Into<String>) -> Self {
434        self.description = desc.into();
435        self
436    }
437
438    /// Adds a parameter to the function
439    pub fn param(mut self, param: ParamBuilder) -> Self {
440        self.parameters.push(param);
441        self
442    }
443
444    /// Marks parameters as required
445    pub fn required(mut self, param_names: Vec<String>) -> Self {
446        self.required = param_names;
447        self
448    }
449
450    /// Provides a full JSON Schema for the parameters.  Using this method
451    /// bypasses the DSL and allows arbitrary complex schemas (nested arrays,
452    /// objects, oneOf, etc.).
453    pub fn json_schema(mut self, schema: serde_json::Value) -> Self {
454        self.raw_schema = Some(schema);
455        self
456    }
457
458    /// Builds the function tool
459    #[allow(dead_code)]
460    pub fn build(self) -> Tool {
461        let parameters_value = if let Some(schema) = self.raw_schema {
462            schema
463        } else {
464            let mut properties = HashMap::new();
465            for param in self.parameters {
466                let (name, prop) = param.build();
467                properties.insert(name, prop);
468            }
469
470            serde_json::to_value(ParametersSchema {
471                schema_type: "object".to_string(),
472                properties,
473                required: self.required,
474            })
475            .unwrap_or_else(|_| serde_json::Value::Object(serde_json::Map::new()))
476        };
477
478        Tool {
479            tool_type: "function".to_string(),
480            function: FunctionTool {
481                name: self.name,
482                description: self.description,
483                parameters: parameters_value,
484            },
485        }
486    }
487}
488
489#[cfg(test)]
490mod tests {
491    use super::*;
492    use crate::chat::{ChatMessage, ChatResponse, StructuredOutputFormat};
493    use crate::error::LLMError;
494    use serde_json::json;
495    use std::str::FromStr;
496
497    #[test]
498    fn test_llm_backend_from_str() {
499        assert!(matches!(
500            LLMBackend::from_str("openai").unwrap(),
501            LLMBackend::OpenAI
502        ));
503        assert!(matches!(
504            LLMBackend::from_str("OpenAI").unwrap(),
505            LLMBackend::OpenAI
506        ));
507        assert!(matches!(
508            LLMBackend::from_str("OPENAI").unwrap(),
509            LLMBackend::OpenAI
510        ));
511        assert!(matches!(
512            LLMBackend::from_str("anthropic").unwrap(),
513            LLMBackend::Anthropic
514        ));
515        assert!(matches!(
516            LLMBackend::from_str("ollama").unwrap(),
517            LLMBackend::Ollama
518        ));
519        assert!(matches!(
520            LLMBackend::from_str("deepseek").unwrap(),
521            LLMBackend::DeepSeek
522        ));
523        assert!(matches!(
524            LLMBackend::from_str("xai").unwrap(),
525            LLMBackend::XAI
526        ));
527        assert!(matches!(
528            LLMBackend::from_str("phind").unwrap(),
529            LLMBackend::Phind
530        ));
531        assert!(matches!(
532            LLMBackend::from_str("google").unwrap(),
533            LLMBackend::Google
534        ));
535        assert!(matches!(
536            LLMBackend::from_str("groq").unwrap(),
537            LLMBackend::Groq
538        ));
539        assert!(matches!(
540            LLMBackend::from_str("azure-openai").unwrap(),
541            LLMBackend::AzureOpenAI
542        ));
543
544        let result = LLMBackend::from_str("invalid");
545        assert!(result.is_err());
546        assert!(
547            result
548                .unwrap_err()
549                .to_string()
550                .contains("Unknown LLM backend")
551        );
552    }
553
554    #[test]
555    fn test_param_builder_new() {
556        let builder = ParamBuilder::new("test_param");
557        assert_eq!(builder.name, "test_param");
558        assert_eq!(builder.property_type, "string");
559        assert_eq!(builder.description, "");
560        assert!(builder.items.is_none());
561        assert!(builder.enum_list.is_none());
562    }
563
564    #[test]
565    fn test_param_builder_fluent_interface() {
566        let builder = ParamBuilder::new("test_param")
567            .type_of("integer")
568            .description("A test parameter")
569            .enum_values(vec!["option1".to_string(), "option2".to_string()]);
570
571        assert_eq!(builder.name, "test_param");
572        assert_eq!(builder.property_type, "integer");
573        assert_eq!(builder.description, "A test parameter");
574        assert_eq!(
575            builder.enum_list,
576            Some(vec!["option1".to_string(), "option2".to_string()])
577        );
578    }
579
580    #[test]
581    fn test_param_builder_with_items() {
582        let item_property = ParameterProperty {
583            property_type: "string".to_string(),
584            description: "Array item".to_string(),
585            items: None,
586            enum_list: None,
587        };
588
589        let builder = ParamBuilder::new("array_param")
590            .type_of("array")
591            .description("An array parameter")
592            .items(item_property);
593
594        assert_eq!(builder.name, "array_param");
595        assert_eq!(builder.property_type, "array");
596        assert_eq!(builder.description, "An array parameter");
597        assert!(builder.items.is_some());
598    }
599
600    #[test]
601    fn test_param_builder_build() {
602        let builder = ParamBuilder::new("test_param")
603            .type_of("string")
604            .description("A test parameter");
605
606        let (name, property) = builder.build();
607        assert_eq!(name, "test_param");
608        assert_eq!(property.property_type, "string");
609        assert_eq!(property.description, "A test parameter");
610    }
611
612    #[test]
613    fn test_function_builder_new() {
614        let builder = FunctionBuilder::new("test_function");
615        assert_eq!(builder.name, "test_function");
616        assert_eq!(builder.description, "");
617        assert!(builder.parameters.is_empty());
618        assert!(builder.required.is_empty());
619        assert!(builder.raw_schema.is_none());
620    }
621
622    #[test]
623    fn test_function_builder_fluent_interface() {
624        let param = ParamBuilder::new("name")
625            .type_of("string")
626            .description("Name");
627        let builder = FunctionBuilder::new("test_function")
628            .description("A test function")
629            .param(param)
630            .required(vec!["name".to_string()]);
631
632        assert_eq!(builder.name, "test_function");
633        assert_eq!(builder.description, "A test function");
634        assert_eq!(builder.parameters.len(), 1);
635        assert_eq!(builder.required, vec!["name".to_string()]);
636    }
637
638    #[test]
639    fn test_function_builder_with_json_schema() {
640        let schema = json!({
641            "type": "object",
642            "properties": {
643                "name": {"type": "string"},
644                "age": {"type": "integer"}
645            },
646            "required": ["name", "age"]
647        });
648
649        let builder = FunctionBuilder::new("test_function").json_schema(schema.clone());
650        assert_eq!(builder.raw_schema, Some(schema));
651    }
652
653    #[test]
654    fn test_function_builder_build_with_parameters() {
655        let param = ParamBuilder::new("name").type_of("string");
656        let tool = FunctionBuilder::new("test_function")
657            .description("A test function")
658            .param(param)
659            .required(vec!["name".to_string()])
660            .build();
661
662        assert_eq!(tool.tool_type, "function");
663        assert_eq!(tool.function.name, "test_function");
664        assert_eq!(tool.function.description, "A test function");
665        assert!(tool.function.parameters.is_object());
666    }
667
668    #[test]
669    fn test_function_builder_build_with_raw_schema() {
670        let schema = json!({
671            "type": "object",
672            "properties": {
673                "name": {"type": "string"}
674            }
675        });
676
677        let tool = FunctionBuilder::new("test_function")
678            .json_schema(schema.clone())
679            .build();
680
681        assert_eq!(tool.function.parameters, schema);
682    }
683
684    // Mock LLM provider for testing
685    struct MockLLMProvider;
686
687    #[async_trait::async_trait]
688    impl crate::chat::ChatProvider for MockLLMProvider {
689        async fn chat(
690            &self,
691            _messages: &[ChatMessage],
692            _json_schema: Option<StructuredOutputFormat>,
693        ) -> Result<Box<dyn ChatResponse>, LLMError> {
694            unimplemented!()
695        }
696
697        async fn chat_with_tools(
698            &self,
699            _messages: &[ChatMessage],
700            _tools: Option<&[Tool]>,
701            _json_schema: Option<StructuredOutputFormat>,
702        ) -> Result<Box<dyn ChatResponse>, LLMError> {
703            unimplemented!()
704        }
705    }
706
707    #[async_trait::async_trait]
708    impl crate::completion::CompletionProvider for MockLLMProvider {
709        async fn complete(
710            &self,
711            _req: &crate::completion::CompletionRequest,
712            _json_schema: Option<crate::chat::StructuredOutputFormat>,
713        ) -> Result<crate::completion::CompletionResponse, LLMError> {
714            unimplemented!()
715        }
716    }
717
718    #[async_trait::async_trait]
719    impl crate::embedding::EmbeddingProvider for MockLLMProvider {
720        async fn embed(&self, _text: Vec<String>) -> Result<Vec<Vec<f32>>, LLMError> {
721            unimplemented!()
722        }
723    }
724
725    #[async_trait::async_trait]
726    impl crate::models::ModelsProvider for MockLLMProvider {}
727
728    impl crate::LLMProvider for MockLLMProvider {}
729
730    impl crate::HasConfig for MockLLMProvider {
731        type Config = crate::NoConfig;
732    }
733
734    #[test]
735    fn test_llm_builder_new() {
736        let builder = LLMBuilder::<MockLLMProvider>::new();
737        assert!(builder.api_key.is_none());
738        assert!(builder.base_url.is_none());
739        assert!(builder.model.is_none());
740        assert!(builder.max_tokens.is_none());
741        assert!(builder.temperature.is_none());
742        assert!(builder.timeout_seconds.is_none());
743        assert!(builder.tool_choice.is_none());
744    }
745
746    #[test]
747    fn test_llm_builder_default() {
748        let builder = LLMBuilder::<MockLLMProvider>::default();
749        assert!(builder.api_key.is_none());
750        assert!(builder.base_url.is_none());
751        assert!(builder.model.is_none());
752        assert_eq!(builder.validator_attempts, 0);
753    }
754
755    #[test]
756    fn test_llm_builder_api_key() {
757        let builder = LLMBuilder::<MockLLMProvider>::new().api_key("test_key");
758        assert_eq!(builder.api_key, Some("test_key".to_string()));
759    }
760
761    #[test]
762    fn test_llm_builder_base_url() {
763        let builder = LLMBuilder::<MockLLMProvider>::new().base_url("https://api.example.com");
764        assert_eq!(
765            builder.base_url,
766            Some("https://api.example.com".to_string())
767        );
768    }
769
770    #[test]
771    fn test_llm_builder_model() {
772        let builder = LLMBuilder::<MockLLMProvider>::new().model("gpt-4");
773        assert_eq!(builder.model, Some("gpt-4".to_string()));
774    }
775
776    #[test]
777    fn test_llm_builder_max_tokens() {
778        let builder = LLMBuilder::<MockLLMProvider>::new().max_tokens(1000);
779        assert_eq!(builder.max_tokens, Some(1000));
780    }
781
782    #[test]
783    fn test_llm_builder_temperature() {
784        let builder = LLMBuilder::<MockLLMProvider>::new().temperature(0.7);
785        assert_eq!(builder.temperature, Some(0.7));
786    }
787
788    #[test]
789    fn test_llm_builder_reasoning_effort() {
790        let builder = LLMBuilder::<MockLLMProvider>::new()
791            .reasoning_effort(crate::chat::ReasoningEffort::High);
792        assert_eq!(builder.reasoning_effort, Some("high".to_string()));
793    }
794
795    #[test]
796    fn test_llm_builder_reasoning() {
797        let builder = LLMBuilder::<MockLLMProvider>::new().reasoning(true);
798        assert_eq!(builder.reasoning, Some(true));
799    }
800
801    #[test]
802    fn test_llm_builder_reasoning_budget_tokens() {
803        let builder = LLMBuilder::<MockLLMProvider>::new().reasoning_budget_tokens(5000);
804        assert_eq!(builder.reasoning_budget_tokens, Some(5000));
805    }
806
807    #[test]
808    fn test_llm_builder_timeout_seconds() {
809        let builder = LLMBuilder::<MockLLMProvider>::new().timeout_seconds(30);
810        assert_eq!(builder.timeout_seconds, Some(30));
811    }
812
813    #[test]
814    fn test_llm_builder_top_p() {
815        let builder = LLMBuilder::<MockLLMProvider>::new().top_p(0.9);
816        assert_eq!(builder.top_p, Some(0.9));
817    }
818
819    #[test]
820    fn test_llm_builder_top_k() {
821        let builder = LLMBuilder::<MockLLMProvider>::new().top_k(50);
822        assert_eq!(builder.top_k, Some(50));
823    }
824
825    #[test]
826    fn test_llm_builder_embedding_encoding_format() {
827        let builder = LLMBuilder::<MockLLMProvider>::new().embedding_encoding_format("float");
828        assert_eq!(builder.embedding_encoding_format, Some("float".to_string()));
829    }
830
831    #[test]
832    fn test_llm_builder_embedding_dimensions() {
833        let builder = LLMBuilder::<MockLLMProvider>::new().embedding_dimensions(1536);
834        assert_eq!(builder.embedding_dimensions, Some(1536));
835    }
836
837    #[test]
838    fn test_llm_builder_validator() {
839        let builder = LLMBuilder::<MockLLMProvider>::new().validator(|response| {
840            if response.contains("error") {
841                Err("Response contains error".to_string())
842            } else {
843                Ok(())
844            }
845        });
846        assert!(builder.validator.is_some());
847    }
848
849    #[test]
850    fn test_llm_builder_validator_attempts() {
851        let builder = LLMBuilder::<MockLLMProvider>::new().validator_attempts(3);
852        assert_eq!(builder.validator_attempts, 3);
853    }
854
855    #[test]
856    fn test_llm_builder_enable_parallel_tool_use() {
857        let builder = LLMBuilder::<MockLLMProvider>::new().enable_parallel_tool_use(true);
858        assert_eq!(builder.enable_parallel_tool_use, Some(true));
859    }
860
861    #[test]
862    fn test_llm_builder_tool_choice() {
863        let builder = LLMBuilder::<MockLLMProvider>::new().tool_choice(ToolChoice::Auto);
864        assert!(matches!(builder.tool_choice, Some(ToolChoice::Auto)));
865    }
866
867    #[test]
868    fn test_llm_builder_disable_tools() {
869        let builder = LLMBuilder::<MockLLMProvider>::new().disable_tools();
870        assert!(matches!(builder.tool_choice, Some(ToolChoice::None)));
871    }
872
873    #[test]
874    fn test_llm_builder_api_version() {
875        let builder = LLMBuilder::<MockLLMProvider>::new().api_version("2023-05-15");
876        assert_eq!(builder.api_version, Some("2023-05-15".to_string()));
877    }
878
879    #[test]
880    fn test_llm_builder_deployment_id() {
881        let builder = LLMBuilder::<MockLLMProvider>::new().deployment_id("my-deployment");
882        assert_eq!(builder.deployment_id, Some("my-deployment".to_string()));
883    }
884
885    #[test]
886    fn test_llm_builder_chaining() {
887        let builder = LLMBuilder::<MockLLMProvider>::new()
888            .api_key("test_key")
889            .model("gpt-4")
890            .max_tokens(2000)
891            .temperature(0.8)
892            .timeout_seconds(60)
893            .top_p(0.95)
894            .top_k(40)
895            .embedding_encoding_format("float")
896            .embedding_dimensions(1536)
897            .validator_attempts(5)
898            .reasoning(true)
899            .reasoning_budget_tokens(10000)
900            .api_version("2023-05-15")
901            .deployment_id("test-deployment");
902
903        assert_eq!(builder.api_key, Some("test_key".to_string()));
904        assert_eq!(builder.model, Some("gpt-4".to_string()));
905        assert_eq!(builder.max_tokens, Some(2000));
906        assert_eq!(builder.temperature, Some(0.8));
907        assert_eq!(builder.timeout_seconds, Some(60));
908        assert_eq!(builder.top_p, Some(0.95));
909        assert_eq!(builder.top_k, Some(40));
910        assert_eq!(builder.embedding_encoding_format, Some("float".to_string()));
911        assert_eq!(builder.embedding_dimensions, Some(1536));
912        assert_eq!(builder.validator_attempts, 5);
913        assert_eq!(builder.reasoning, Some(true));
914        assert_eq!(builder.reasoning_budget_tokens, Some(10000));
915        assert_eq!(builder.api_version, Some("2023-05-15".to_string()));
916        assert_eq!(builder.deployment_id, Some("test-deployment".to_string()));
917    }
918}