Skip to main content

cortexai_cloudflare/
config.rs

1//! Configuration for Cloudflare Workers agent
2
3use cortexai_llm_client::Provider;
4
5/// Configuration for the Cloudflare agent
6#[derive(Debug, Clone)]
7pub struct CloudflareConfig {
8    /// LLM provider (OpenAI, Anthropic, OpenRouter)
9    pub provider: Provider,
10
11    /// API key for the provider
12    pub api_key: String,
13
14    /// Model identifier (e.g., "gpt-4", "claude-3-5-sonnet-20241022")
15    pub model: String,
16
17    /// System prompt for the agent
18    pub system_prompt: Option<String>,
19
20    /// Temperature for response generation (0.0 - 2.0)
21    pub temperature: f32,
22
23    /// Maximum tokens in response
24    pub max_tokens: Option<u32>,
25
26    /// Whether to use streaming responses
27    pub stream: bool,
28}
29
30impl Default for CloudflareConfig {
31    fn default() -> Self {
32        Self {
33            provider: Provider::OpenAI,
34            api_key: String::new(),
35            model: "gpt-4".to_string(),
36            system_prompt: None,
37            temperature: 0.7,
38            max_tokens: None,
39            stream: false,
40        }
41    }
42}
43
44impl CloudflareConfig {
45    /// Create a new configuration builder
46    pub fn builder() -> CloudflareConfigBuilder {
47        CloudflareConfigBuilder::default()
48    }
49}
50
51/// Builder for CloudflareConfig
52#[derive(Debug, Default)]
53pub struct CloudflareConfigBuilder {
54    provider: Option<Provider>,
55    api_key: Option<String>,
56    model: Option<String>,
57    system_prompt: Option<String>,
58    temperature: Option<f32>,
59    max_tokens: Option<u32>,
60    stream: Option<bool>,
61}
62
63impl CloudflareConfigBuilder {
64    /// Set the LLM provider
65    pub fn provider(mut self, provider: Provider) -> Self {
66        self.provider = Some(provider);
67        self
68    }
69
70    /// Set the API key
71    pub fn api_key(mut self, api_key: impl Into<String>) -> Self {
72        self.api_key = Some(api_key.into());
73        self
74    }
75
76    /// Set the model identifier
77    pub fn model(mut self, model: impl Into<String>) -> Self {
78        self.model = Some(model.into());
79        self
80    }
81
82    /// Set the system prompt
83    pub fn system_prompt(mut self, prompt: impl Into<String>) -> Self {
84        self.system_prompt = Some(prompt.into());
85        self
86    }
87
88    /// Set the temperature
89    pub fn temperature(mut self, temp: f32) -> Self {
90        self.temperature = Some(temp);
91        self
92    }
93
94    /// Set maximum tokens
95    pub fn max_tokens(mut self, tokens: u32) -> Self {
96        self.max_tokens = Some(tokens);
97        self
98    }
99
100    /// Enable streaming responses
101    pub fn stream(mut self, enable: bool) -> Self {
102        self.stream = Some(enable);
103        self
104    }
105
106    /// Build the configuration
107    pub fn build(self) -> CloudflareConfig {
108        CloudflareConfig {
109            provider: self.provider.unwrap_or(Provider::OpenAI),
110            api_key: self.api_key.unwrap_or_default(),
111            model: self.model.unwrap_or_else(|| "gpt-4".to_string()),
112            system_prompt: self.system_prompt,
113            temperature: self.temperature.unwrap_or(0.7),
114            max_tokens: self.max_tokens,
115            stream: self.stream.unwrap_or(false),
116        }
117    }
118}
119
120#[cfg(test)]
121mod tests {
122    use super::*;
123
124    #[test]
125    fn test_config_builder() {
126        let config = CloudflareConfig::builder()
127            .provider(Provider::Anthropic)
128            .api_key("test-key")
129            .model("claude-3")
130            .temperature(0.5)
131            .build();
132
133        assert!(matches!(config.provider, Provider::Anthropic));
134        assert_eq!(config.api_key, "test-key");
135        assert_eq!(config.model, "claude-3");
136        assert_eq!(config.temperature, 0.5);
137    }
138
139    #[test]
140    fn test_config_defaults() {
141        let config = CloudflareConfig::default();
142
143        assert!(matches!(config.provider, Provider::OpenAI));
144        assert_eq!(config.temperature, 0.7);
145        assert!(!config.stream);
146    }
147}