cortexai_cloudflare/
config.rs1use cortexai_llm_client::Provider;
4
5#[derive(Debug, Clone)]
7pub struct CloudflareConfig {
8 pub provider: Provider,
10
11 pub api_key: String,
13
14 pub model: String,
16
17 pub system_prompt: Option<String>,
19
20 pub temperature: f32,
22
23 pub max_tokens: Option<u32>,
25
26 pub stream: bool,
28}
29
30impl Default for CloudflareConfig {
31 fn default() -> Self {
32 Self {
33 provider: Provider::OpenAI,
34 api_key: String::new(),
35 model: "gpt-4".to_string(),
36 system_prompt: None,
37 temperature: 0.7,
38 max_tokens: None,
39 stream: false,
40 }
41 }
42}
43
44impl CloudflareConfig {
45 pub fn builder() -> CloudflareConfigBuilder {
47 CloudflareConfigBuilder::default()
48 }
49}
50
51#[derive(Debug, Default)]
53pub struct CloudflareConfigBuilder {
54 provider: Option<Provider>,
55 api_key: Option<String>,
56 model: Option<String>,
57 system_prompt: Option<String>,
58 temperature: Option<f32>,
59 max_tokens: Option<u32>,
60 stream: Option<bool>,
61}
62
63impl CloudflareConfigBuilder {
64 pub fn provider(mut self, provider: Provider) -> Self {
66 self.provider = Some(provider);
67 self
68 }
69
70 pub fn api_key(mut self, api_key: impl Into<String>) -> Self {
72 self.api_key = Some(api_key.into());
73 self
74 }
75
76 pub fn model(mut self, model: impl Into<String>) -> Self {
78 self.model = Some(model.into());
79 self
80 }
81
82 pub fn system_prompt(mut self, prompt: impl Into<String>) -> Self {
84 self.system_prompt = Some(prompt.into());
85 self
86 }
87
88 pub fn temperature(mut self, temp: f32) -> Self {
90 self.temperature = Some(temp);
91 self
92 }
93
94 pub fn max_tokens(mut self, tokens: u32) -> Self {
96 self.max_tokens = Some(tokens);
97 self
98 }
99
100 pub fn stream(mut self, enable: bool) -> Self {
102 self.stream = Some(enable);
103 self
104 }
105
106 pub fn build(self) -> CloudflareConfig {
108 CloudflareConfig {
109 provider: self.provider.unwrap_or(Provider::OpenAI),
110 api_key: self.api_key.unwrap_or_default(),
111 model: self.model.unwrap_or_else(|| "gpt-4".to_string()),
112 system_prompt: self.system_prompt,
113 temperature: self.temperature.unwrap_or(0.7),
114 max_tokens: self.max_tokens,
115 stream: self.stream.unwrap_or(false),
116 }
117 }
118}
119
120#[cfg(test)]
121mod tests {
122 use super::*;
123
124 #[test]
125 fn test_config_builder() {
126 let config = CloudflareConfig::builder()
127 .provider(Provider::Anthropic)
128 .api_key("test-key")
129 .model("claude-3")
130 .temperature(0.5)
131 .build();
132
133 assert!(matches!(config.provider, Provider::Anthropic));
134 assert_eq!(config.api_key, "test-key");
135 assert_eq!(config.model, "claude-3");
136 assert_eq!(config.temperature, 0.5);
137 }
138
139 #[test]
140 fn test_config_defaults() {
141 let config = CloudflareConfig::default();
142
143 assert!(matches!(config.provider, Provider::OpenAI));
144 assert_eq!(config.temperature, 0.7);
145 assert!(!config.stream);
146 }
147}