Skip to main content

openclaw_agents/runtime/
mod.rs

1//! Agent runtime.
2
3use std::collections::HashMap;
4use std::sync::Arc;
5
6use openclaw_core::events::SessionProjection;
7use openclaw_core::types::{AgentId, SessionKey};
8use openclaw_providers::traits::Provider;
9
10use crate::tools::ToolRegistry;
11
12/// Agent execution context.
13pub struct AgentContext {
14    /// Agent ID.
15    pub agent_id: AgentId,
16    /// Session key.
17    pub session_key: SessionKey,
18    /// Session state.
19    pub session: SessionProjection,
20    /// Available tools.
21    pub tools: Arc<ToolRegistry>,
22    /// Custom context values.
23    pub values: HashMap<String, serde_json::Value>,
24}
25
26impl AgentContext {
27    /// Create a new agent context.
28    #[must_use]
29    pub fn new(
30        agent_id: AgentId,
31        session_key: SessionKey,
32        session: SessionProjection,
33        tools: Arc<ToolRegistry>,
34    ) -> Self {
35        Self {
36            agent_id,
37            session_key,
38            session,
39            tools,
40            values: HashMap::new(),
41        }
42    }
43
44    /// Set a context value.
45    pub fn set(&mut self, key: impl Into<String>, value: serde_json::Value) {
46        self.values.insert(key.into(), value);
47    }
48
49    /// Get a context value.
50    #[must_use]
51    pub fn get(&self, key: &str) -> Option<&serde_json::Value> {
52        self.values.get(key)
53    }
54}
55
56/// Agent runtime for executing agent logic.
57pub struct AgentRuntime {
58    provider: Arc<dyn Provider>,
59    tools: Arc<ToolRegistry>,
60    model: String,
61    system_prompt: Option<String>,
62    max_tokens: u32,
63    temperature: f32,
64}
65
66impl AgentRuntime {
67    /// Create a new agent runtime.
68    #[must_use]
69    pub fn new(provider: Arc<dyn Provider>) -> Self {
70        Self {
71            provider,
72            tools: Arc::new(ToolRegistry::new()),
73            model: "claude-3-5-sonnet-20241022".to_string(),
74            system_prompt: None,
75            max_tokens: 4096,
76            temperature: 0.7,
77        }
78    }
79
80    /// Set the model to use.
81    #[must_use]
82    pub fn with_model(mut self, model: impl Into<String>) -> Self {
83        self.model = model.into();
84        self
85    }
86
87    /// Set the system prompt.
88    #[must_use]
89    pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
90        self.system_prompt = Some(prompt.into());
91        self
92    }
93
94    /// Set max tokens.
95    #[must_use]
96    pub const fn with_max_tokens(mut self, max_tokens: u32) -> Self {
97        self.max_tokens = max_tokens;
98        self
99    }
100
101    /// Set temperature.
102    #[must_use]
103    pub const fn with_temperature(mut self, temperature: f32) -> Self {
104        self.temperature = temperature;
105        self
106    }
107
108    /// Set the tool registry.
109    #[must_use]
110    pub fn with_tools(mut self, tools: Arc<ToolRegistry>) -> Self {
111        self.tools = tools;
112        self
113    }
114
115    /// Get the tool registry.
116    #[must_use]
117    pub const fn tools(&self) -> &Arc<ToolRegistry> {
118        &self.tools
119    }
120
121    /// Get the model name.
122    #[must_use]
123    pub fn model(&self) -> &str {
124        &self.model
125    }
126
127    /// Get the system prompt.
128    #[must_use]
129    pub fn system_prompt(&self) -> Option<&str> {
130        self.system_prompt.as_deref()
131    }
132
133    /// Get max tokens.
134    #[must_use]
135    pub const fn max_tokens(&self) -> u32 {
136        self.max_tokens
137    }
138
139    /// Get temperature.
140    #[must_use]
141    pub const fn temperature(&self) -> f32 {
142        self.temperature
143    }
144
145    /// Process a user message and return a response.
146    ///
147    /// # Errors
148    ///
149    /// Returns error if provider call fails.
150    pub async fn process_message(
151        &self,
152        ctx: &mut AgentContext,
153        message: &str,
154    ) -> Result<String, AgentRuntimeError> {
155        use openclaw_providers::traits::{CompletionRequest, Message, MessageContent, Role};
156
157        // Build messages from session history
158        let mut messages: Vec<Message> = ctx
159            .session
160            .messages
161            .iter()
162            .map(|m| match m {
163                openclaw_core::events::SessionMessage::Inbound(text) => Message {
164                    role: Role::User,
165                    content: MessageContent::Text(text.clone()),
166                },
167                openclaw_core::events::SessionMessage::Outbound(text) => Message {
168                    role: Role::Assistant,
169                    content: MessageContent::Text(text.clone()),
170                },
171                openclaw_core::events::SessionMessage::Tool { name, result } => Message {
172                    role: Role::Tool,
173                    content: MessageContent::Text(format!("[{name}]: {result}")),
174                },
175            })
176            .collect();
177
178        // Add current message
179        messages.push(Message {
180            role: Role::User,
181            content: MessageContent::Text(message.to_string()),
182        });
183
184        // Build request
185        let request = CompletionRequest {
186            model: self.model.clone(),
187            messages,
188            system: self.system_prompt.clone(),
189            max_tokens: self.max_tokens,
190            temperature: self.temperature,
191            stop: None,
192            tools: Some(self.tools.as_tool_definitions()),
193        };
194
195        // Call provider
196        let response = self.provider.complete(request).await?;
197
198        // Extract text response
199        let text = response
200            .content
201            .iter()
202            .filter_map(|block| {
203                if let openclaw_providers::traits::ContentBlock::Text { text } = block {
204                    Some(text.as_str())
205                } else {
206                    None
207                }
208            })
209            .collect::<Vec<_>>()
210            .join("\n");
211
212        Ok(text)
213    }
214}
215
216/// Agent runtime errors.
217#[derive(Debug, thiserror::Error)]
218pub enum AgentRuntimeError {
219    /// Provider error.
220    #[error("Provider error: {0}")]
221    Provider(#[from] openclaw_providers::traits::ProviderError),
222
223    /// Tool execution error.
224    #[error("Tool error: {0}")]
225    Tool(String),
226
227    /// Configuration error.
228    #[error("Config error: {0}")]
229    Config(String),
230}
231
232#[cfg(test)]
233mod tests {
234    use super::*;
235
236    #[test]
237    fn test_agent_context() {
238        let ctx = AgentContext::new(
239            AgentId::default_agent(),
240            SessionKey::new("test"),
241            openclaw_core::events::SessionProjection::new(
242                SessionKey::new("test"),
243                "default".to_string(),
244                openclaw_core::types::ChannelId::telegram(),
245                "user".to_string(),
246            ),
247            Arc::new(ToolRegistry::new()),
248        );
249
250        assert_eq!(ctx.agent_id.as_ref(), "default");
251    }
252}