Skip to main content

cortexai_cloudflare/
agent.rs

1//! Cloudflare Workers AI Agent
2
3use crate::config::CloudflareConfig;
4use crate::error::{CloudflareError, Result};
5use crate::http::CloudflareHttpClient;
6use crate::kv::{KvStore, SessionMetadata};
7use cortexai_llm_client::{Message, RequestBuilder, Role};
8
9/// AI Agent for Cloudflare Workers
10///
11/// Provides chat functionality with optional KV-based conversation persistence.
12pub struct CloudflareAgent {
13    config: CloudflareConfig,
14    messages: Vec<Message>,
15    kv: Option<KvStore>,
16}
17
18impl CloudflareAgent {
19    /// Create a new Cloudflare agent with the given configuration
20    pub fn new(config: CloudflareConfig) -> Self {
21        let mut messages = Vec::new();
22
23        // Add system prompt if configured
24        if let Some(ref prompt) = config.system_prompt {
25            messages.push(Message::system(prompt));
26        }
27
28        Self {
29            config,
30            messages,
31            kv: None,
32        }
33    }
34
35    /// Attach a KV store for conversation persistence
36    pub fn with_kv(mut self, kv: KvStore) -> Self {
37        self.kv = Some(kv);
38        self
39    }
40
41    /// Send a message and get a response (non-streaming)
42    pub async fn chat(&mut self, message: &str) -> Result<AgentResponse> {
43        // Add user message
44        self.messages.push(Message::user(message));
45
46        // Build and send request
47        let request = self.build_request(false)?;
48        let response = CloudflareHttpClient::execute(request).await?;
49
50        // Add assistant response to history
51        self.messages.push(Message::assistant(&response.content));
52
53        Ok(AgentResponse {
54            content: response.content,
55            tool_calls: if response.tool_calls.is_empty() {
56                None
57            } else {
58                Some(response.tool_calls)
59            },
60            usage: response.usage.map(|u| AgentUsage {
61                prompt_tokens: u.prompt_tokens,
62                completion_tokens: u.completion_tokens,
63                total_tokens: u.total_tokens,
64            }),
65        })
66    }
67
68    /// Send a message with session persistence
69    pub async fn chat_with_session(
70        &mut self,
71        session_id: &str,
72        message: &str,
73    ) -> Result<AgentResponse> {
74        // Check KV is configured
75        if self.kv.is_none() {
76            return Err(CloudflareError::ConfigError(
77                "KV store not configured".to_string(),
78            ));
79        }
80
81        // Load existing conversation
82        let history = self
83            .kv
84            .as_ref()
85            .unwrap()
86            .get_conversation(session_id)
87            .await?;
88
89        if !history.is_empty() {
90            // Keep system prompt if exists, then add history
91            let system_prompt = self.messages.first().cloned();
92            self.messages.clear();
93            if let Some(prompt) = system_prompt {
94                if matches!(prompt.role, Role::System) {
95                    self.messages.push(prompt);
96                }
97            }
98            self.messages.extend(history);
99        }
100
101        // Chat normally
102        let response = self.chat(message).await?;
103
104        // Save updated conversation
105        let kv = self.kv.as_ref().unwrap();
106        kv.save_conversation(session_id, &self.messages).await?;
107
108        // Update metadata
109        let mut metadata = kv
110            .get_metadata(session_id)
111            .await?
112            .unwrap_or_else(|| SessionMetadata::new(session_id));
113
114        if let Some(usage) = &response.usage {
115            metadata.add_tokens(usage.total_tokens);
116        }
117        metadata.increment_messages();
118        kv.save_metadata(session_id, &metadata).await?;
119
120        Ok(response)
121    }
122
123    /// Send a message and get a streaming response
124    pub async fn chat_stream(&mut self, message: &str) -> Result<worker::Response> {
125        // Add user message
126        self.messages.push(Message::user(message));
127
128        // Build and send streaming request
129        let request = self.build_request(true)?;
130        let response = CloudflareHttpClient::execute_stream(request).await?;
131
132        Ok(response)
133    }
134
135    /// Get current conversation history
136    pub fn history(&self) -> &[Message] {
137        &self.messages
138    }
139
140    /// Clear conversation history (keeps system prompt)
141    pub fn clear_history(&mut self) {
142        let system_prompt = self.messages.first().cloned();
143        self.messages.clear();
144        if let Some(prompt) = system_prompt {
145            if matches!(prompt.role, Role::System) {
146                self.messages.push(prompt);
147            }
148        }
149    }
150
151    /// Build an HTTP request for the LLM API
152    fn build_request(&self, stream: bool) -> Result<cortexai_llm_client::HttpRequest> {
153        let mut builder = RequestBuilder::new(self.config.provider)
154            .api_key(&self.config.api_key)
155            .model(&self.config.model)
156            .temperature(self.config.temperature)
157            .stream(stream)
158            .messages(&self.messages);
159
160        if let Some(max_tokens) = self.config.max_tokens {
161            builder = builder.max_tokens(max_tokens);
162        }
163
164        builder
165            .build()
166            .map_err(|e| CloudflareError::ConfigError(e.to_string()))
167    }
168}
169
170/// Response from the agent
171#[derive(Debug, Clone)]
172pub struct AgentResponse {
173    /// Generated content
174    pub content: String,
175    /// Tool calls (if any)
176    pub tool_calls: Option<Vec<cortexai_llm_client::ToolCall>>,
177    /// Token usage
178    pub usage: Option<AgentUsage>,
179}
180
181/// Token usage information
182#[derive(Debug, Clone)]
183pub struct AgentUsage {
184    pub prompt_tokens: u32,
185    pub completion_tokens: u32,
186    pub total_tokens: u32,
187}
188
189#[cfg(test)]
190mod tests {
191    use super::*;
192    use cortexai_llm_client::Provider;
193
194    #[test]
195    fn test_agent_creation() {
196        let config = CloudflareConfig::builder()
197            .provider(Provider::Anthropic)
198            .api_key("test-key")
199            .model("claude-3")
200            .system_prompt("You are helpful")
201            .build();
202
203        let agent = CloudflareAgent::new(config);
204
205        assert_eq!(agent.messages.len(), 1);
206        assert!(matches!(agent.messages[0].role, Role::System));
207        assert_eq!(agent.messages[0].content, "You are helpful");
208    }
209
210    #[test]
211    fn test_clear_history() {
212        let config = CloudflareConfig::builder()
213            .provider(Provider::OpenAI)
214            .api_key("test")
215            .system_prompt("System")
216            .build();
217
218        let mut agent = CloudflareAgent::new(config);
219
220        // Simulate adding messages
221        agent.messages.push(Message::user("Hello"));
222        agent.messages.push(Message::assistant("Hi"));
223
224        assert_eq!(agent.messages.len(), 3);
225
226        agent.clear_history();
227
228        // Should only have system prompt
229        assert_eq!(agent.messages.len(), 1);
230        assert!(matches!(agent.messages[0].role, Role::System));
231    }
232
233    #[test]
234    fn test_agent_without_system_prompt() {
235        let config = CloudflareConfig::builder()
236            .provider(Provider::OpenAI)
237            .api_key("test")
238            .build();
239
240        let agent = CloudflareAgent::new(config);
241        assert!(agent.messages.is_empty());
242    }
243}