1extern crate proc_macro;
9
10use quote::quote;
11use syn::{DeriveInput, parse_macro_input};
12
13#[proc_macro_derive(Auto)]
14pub fn derive_agent(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
15 let input = parse_macro_input!(input as DeriveInput);
16 let name = &input.ident;
17
18 let expanded = quote! {
19 impl Agent for #name {
20 fn new(persona: Cow<'static, str>, behavior: Cow<'static, str>) -> Self {
21 let mut agent = Self::default();
22 agent.agent.persona = persona;
23 agent.agent.behavior = behavior;
24 agent
25 }
26
27 fn update(&mut self, status: Status) {
28 self.agent.update(status);
29 }
30
31 fn behavior(&self) -> &std::borrow::Cow<'static, str> {
32 &self.agent.behavior
33 }
34
35 fn persona(&self) -> &std::borrow::Cow<'static, str> {
36 &self.agent.persona
37 }
38
39 fn status(&self) -> &Status {
40 &self.agent.status
41 }
42
43 fn memory(&self) -> &Vec<crate::common::utils::Message> {
44 &self.agent.memory
45 }
46
47 fn tools(&self) -> &Vec<Tool> {
48 &self.agent.tools
49 }
50
51 fn knowledge(&self) -> &Knowledge {
52 &self.agent.knowledge
53 }
54
55 fn planner(&self) -> Option<&Planner> {
56 self.agent.planner.as_ref()
57 }
58
59 fn profile(&self) -> &Persona {
60 &self.agent.profile
61 }
62
63 #[cfg(feature = "net")]
64 fn collaborators(&self) -> Vec<Collaborator> {
65 let mut all = Vec::new();
66 all.extend(self.agent.local_collaborators.values().cloned());
67 all.extend(self.agent.remote_collaborators.values().cloned());
68 all
69 }
70
71 fn reflection(&self) -> Option<&Reflection> {
72 self.agent.reflection.as_ref()
73 }
74
75 fn scheduler(&self) -> Option<&TaskScheduler> {
76 self.agent.scheduler.as_ref()
77 }
78
79 fn capabilities(&self) -> &std::collections::HashSet<Capability> {
80 &self.agent.capabilities
81 }
82
83 fn context(&self) -> &ContextManager {
84 &self.agent.context
85 }
86
87 fn tasks(&self) -> &Vec<Task> {
88 &self.agent.tasks
89 }
90
91 fn memory_mut(&mut self) -> &mut Vec<crate::common::utils::Message> {
92 &mut self.agent.memory
93 }
94
95 fn planner_mut(&mut self) -> Option<&mut Planner> {
96 self.agent.planner.as_mut()
97 }
98
99 fn context_mut(&mut self) -> &mut ContextManager {
100 &mut self.agent.context
101 }
102 }
103
104 impl Functions for #name {
105 fn get_agent(&self) -> &AgentGPT {
106 &self.agent
107 }
108 }
109
110 #[async_trait]
111 impl AsyncFunctions for #name {
112 async fn execute<'a>(
113 &'a mut self,
114 task: &'a mut Task,
115 execute: bool,
116 browse: bool,
117 max_tries: u64,
118 ) -> Result<()> {
119 <#name as Executor>::execute(self, task, execute, browse, max_tries).await
120 }
121
122 #[cfg(feature = "mem")]
138 async fn save_ltm(&mut self, message: crate::common::utils::Message) -> Result<()> {
139 save_long_term_memory(&mut self.client, self.agent.id.clone(), message).await
140 }
141
142 #[cfg(feature = "mem")]
154 async fn get_ltm(&self) -> Result<Vec<crate::common::utils::Message>> {
155 load_long_term_memory(self.agent.id.clone()).await
156 }
157
158 #[cfg(feature = "mem")]
170 async fn ltm_context(&self) -> String {
171 long_term_memory_context(self.agent.id.clone()).await
172 }
173
174 async fn generate(&mut self, request: &str) -> Result<String> {
175 match &mut self.client {
176 #[cfg(feature = "gem")]
177 ClientType::Gemini(gem_client) => {
178 let parameters = ChatBuilder::default()
179 .messages(vec![gems::messages::Message::User {
180 content: Content::Text(request.to_string()),
181 name: None,
182 }])
183 .build()?;
184
185 let result = gem_client.chat().generate(parameters).await;
186 Ok(result.unwrap_or_default())
187 }
188
189 #[cfg(feature = "oai")]
190 ClientType::OpenAI(oai_client) => {
191 let parameters = ChatCompletionParametersBuilder::default()
192 .model(FlagshipModel::Gpt4O.to_string())
193 .messages(vec![ChatMessage::User {
194 content: ChatMessageContent::Text(request.to_string()),
195 name: None,
196 }])
197 .response_format(ChatCompletionResponseFormat::Text)
198 .build()?;
199
200 let result = oai_client.chat().create(parameters).await?;
201 let message = &result.choices[0].message;
202
203 Ok(match message {
204 ChatMessage::Assistant {
205 content: Some(chat_content),
206 ..
207 } => chat_content.to_string(),
208 ChatMessage::User { content, .. } => content.to_string(),
209 ChatMessage::System { content, .. } => content.to_string(),
210 ChatMessage::Developer { content, .. } => content.to_string(),
211 ChatMessage::Tool { content, .. } => content.clone(),
212 _ => String::new(),
213 })
214 }
215
216 #[cfg(feature = "cld")]
217 ClientType::Anthropic(client) => {
218 let body = CreateMessageParams::new(RequiredMessageParams {
219 model: "claude-3-7-sonnet-latest".to_string(),
220 messages: vec![AnthMessage::new_text(Role::User, request.to_string())],
221 max_tokens: 1024,
222 });
223
224 let chat_response = client.create_message(Some(&body)).await?;
225 Ok(chat_response
226 .content
227 .iter()
228 .filter_map(|block| match block {
229 ContentBlock::Text { text, .. } => Some(text.as_str()),
230 _ => None,
231 })
232 .collect::<Vec<_>>()
233 .join("\n"))
234 }
235
236 #[cfg(feature = "xai")]
237 ClientType::Xai(xai_client) => {
238 let messages = vec![XaiMessage {
239 role: "user".into(),
240 content: request.to_string(),
241 }];
242
243 let rb = ChatCompletionsRequestBuilder::new(
244 xai_client.clone(),
245 "grok-beta".into(),
246 messages,
247 )
248 .temperature(0.0)
249 .stream(false);
250
251 let req = rb.clone().build()?;
252 let chat = rb.create_chat_completion(req).await?;
253 Ok(chat.choices[0].message.content.clone())
254 }
255
256 #[cfg(feature = "co")]
257 ClientType::Cohere(co_client) => {
258 use cohere_rust::api::chat::ChatRequest;
259 use cohere_rust::api::GenerateModel;
260
261 let chat_request = ChatRequest {
262 message: request,
263 ..Default::default()
264 };
265
266 let mut receiver = match co_client.chat(&chat_request).await {
267 Ok(rx) => rx,
268 Err(e) => return Err(anyhow::anyhow!("Cohere API initialization failed: {}", e)),
269 };
270 let mut full_text = String::new();
271 while let Some(res) = receiver.recv().await {
272 match res {
273 Ok(cohere_rust::api::chat::ChatStreamResponse::ChatTextGeneration { text, .. }) => {
274 full_text.push_str(&text);
275 }
276 Ok(_) => {}
277 Err(_) => {},
279 }
280 }
281 Ok(full_text)
282 }
283
284 #[allow(unreachable_patterns)]
285 _ => {
286 return Err(anyhow!(
287 "No valid AI client configured. Enable `co`, `gem`, `oai`, `cld`, or `xai` feature."
288 ));
289 }
290 }
291 }
292
293 async fn imagen(&mut self, request: &str) -> Result<Vec<u8>> {
294 match &mut self.client {
295 #[cfg(feature = "gem")]
296 ClientType::Gemini(gem_client) => {
297 gem_client.set_model(Model::Imagen4);
298
299 let input = gems::messages::Message::User {
300 content: Content::Text(request.into()),
301 name: None,
302 };
303
304 let params = ImageGenBuilder::default()
305 .model(Model::Imagen4)
306 .input(input)
307 .build()?;
308
309 let image_bytes = gem_client.images().generate(params).await;
310 Ok(image_bytes.unwrap_or_default())
311 }
312
313 #[cfg(feature = "oai")]
314 ClientType::OpenAI(oai_client) => {
315 Ok(Default::default())
317 }
318
319 #[cfg(feature = "cld")]
320 ClientType::Anthropic(client) => {
321 Ok(Default::default())
323 }
324
325 #[cfg(feature = "xai")]
326 ClientType::Xai(xai_client) => {
327 Ok(Default::default())
329 }
330
331 #[cfg(feature = "co")]
332 ClientType::Cohere(_co_client) => {
333 Ok(Default::default())
335 }
336
337 #[allow(unreachable_patterns)]
338 _ => {
339 return Err(anyhow!(
340 "No valid AI client configured. Enable `co`, `gem`, `oai`, `cld`, or `xai` feature."
341 ));
342 }
343 }
344 }
345
346 async fn stream(&mut self, request: &str) -> Result<ReqResponse> {
347 match &mut self.client {
348 #[cfg(feature = "gem")]
349 ClientType::Gemini(gem_client) => {
350 let parameters = StreamBuilder::default()
351 .model(Model::Flash3Preview)
352 .input(gems::messages::Message::User {
353 content: Content::Text(request.into()),
354 name: None,
355 })
356 .build()?;
357
358 Ok(ReqResponse(Some(gem_client.stream().generate(parameters).await?)))
359 }
360
361 #[cfg(feature = "oai")]
362 ClientType::OpenAI(oai_client) => {
363 Ok(Default::default())
365 }
366
367 #[cfg(feature = "cld")]
368 ClientType::Anthropic(client) => {
369 Ok(Default::default())
371 }
372
373 #[cfg(feature = "xai")]
374 ClientType::Xai(xai_client) => {
375 Ok(Default::default())
377 }
378
379 #[cfg(feature = "co")]
380 ClientType::Cohere(_co_client) => {
381 Ok(Default::default())
383 }
384
385 #[allow(unreachable_patterns)]
386 _ => {
387 return Err(anyhow!(
388 "No valid AI client configured. Enable `co`, `gem`, `oai`, `cld`, or `xai` feature."
389 ));
390 }
391 }
392 }
393 }
394 };
395
396 proc_macro::TokenStream::from(expanded)
397}
398
399