Skip to main content

auto_derive/
lib.rs

1// Copyright 2026 Mahmoud Harmouch.
2//
3// Licensed under the MIT license
4// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
5// option. This file may not be copied, modified, or distributed
6// except according to those terms.
7
8extern crate proc_macro;
9
10use quote::quote;
11use syn::{DeriveInput, parse_macro_input};
12
13#[proc_macro_derive(Auto)]
14pub fn derive_agent(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
15    let input = parse_macro_input!(input as DeriveInput);
16    let name = &input.ident;
17
18    let expanded = quote! {
19        impl Agent for #name {
20            fn new(persona: Cow<'static, str>, behavior: Cow<'static, str>) -> Self {
21                let mut agent = Self::default();
22                agent.agent.persona = persona;
23                agent.agent.behavior = behavior;
24                agent
25            }
26
27            fn update(&mut self, status: Status) {
28                self.agent.update(status);
29            }
30
31            fn behavior(&self) -> &std::borrow::Cow<'static, str> {
32                &self.agent.behavior
33            }
34
35            fn persona(&self) -> &std::borrow::Cow<'static, str> {
36                &self.agent.persona
37            }
38
39            fn status(&self) -> &Status {
40                &self.agent.status
41            }
42
43            fn memory(&self) -> &Vec<Message> {
44                &self.agent.memory
45            }
46
47            fn tools(&self) -> &Vec<Tool> {
48                &self.agent.tools
49            }
50
51            fn knowledge(&self) -> &Knowledge {
52                &self.agent.knowledge
53            }
54
55            fn planner(&self) -> Option<&Planner> {
56                self.agent.planner.as_ref()
57            }
58
59            fn profile(&self) -> &Persona {
60                &self.agent.profile
61            }
62
63            #[cfg(feature = "net")]
64            fn collaborators(&self) -> Vec<Collaborator> {
65                let mut all = Vec::new();
66                all.extend(self.agent.local_collaborators.values().cloned());
67                all.extend(self.agent.remote_collaborators.values().cloned());
68                all
69            }
70
71            fn reflection(&self) -> Option<&Reflection> {
72                self.agent.reflection.as_ref()
73            }
74
75            fn scheduler(&self) -> Option<&TaskScheduler> {
76                self.agent.scheduler.as_ref()
77            }
78
79            fn capabilities(&self) -> &std::collections::HashSet<Capability> {
80                &self.agent.capabilities
81            }
82
83            fn context(&self) -> &ContextManager {
84                &self.agent.context
85            }
86
87            fn tasks(&self) -> &Vec<Task> {
88                &self.agent.tasks
89            }
90
91            fn memory_mut(&mut self) -> &mut Vec<Message> {
92                &mut self.agent.memory
93            }
94
95            fn planner_mut(&mut self) -> Option<&mut Planner> {
96                self.agent.planner.as_mut()
97            }
98
99            fn context_mut(&mut self) -> &mut ContextManager {
100                &mut self.agent.context
101            }
102        }
103
104        impl Functions for #name {
105            fn get_agent(&self) -> &AgentGPT {
106                &self.agent
107            }
108        }
109
110        #[async_trait]
111        impl AsyncFunctions for #name {
112            async fn execute<'a>(
113                &'a mut self,
114                task: &'a mut Task,
115                execute: bool,
116                browse: bool,
117                max_tries: u64,
118            ) -> Result<()> {
119                <#name as Executor>::execute(self, task, execute, browse, max_tries).await
120            }
121
122            /// Saves a communication to long-term memory for the agent.
123            ///
124            /// # Arguments
125            ///
126            /// * `communication` - The communication to save, which contains the role and content.
127            ///
128            /// # Returns
129            ///
130            /// (`Result<()>`): Result indicating the success or failure of saving the communication.
131            ///
132            /// # Business Logic
133            ///
134            /// - This method uses the `save_long_term_memory` util function to save the communication into the agent's long-term memory.
135            /// - The communication is embedded and stored using the agent's unique ID as the namespace.
136            /// - It handles the embedding and metadata for the communication, ensuring it's stored correctly.
137            #[cfg(feature = "mem")]
138            async fn save_ltm(&mut self, message: Message) -> Result<()> {
139                save_long_term_memory(&mut self.client, self.agent.id.clone(), message).await
140            }
141
142            /// Retrieves all communications stored in the agent's long-term memory.
143            ///
144            /// # Returns
145            ///
146            /// (`Result<Vec<Message>>`): A result containing a vector of communications retrieved from the agent's long-term memory.
147            ///
148            /// # Business Logic
149            ///
150            /// - This method fetches the stored communications for the agent by interacting with the `load_long_term_memory` function.
151            /// - The function will return a list of communications that are indexed by the agent's unique ID.
152            /// - It handles the retrieval of the stored metadata and content for each communication.
153            #[cfg(feature = "mem")]
154            async fn get_ltm(&self) -> Result<Vec<Message>> {
155                load_long_term_memory(self.agent.id.clone()).await
156            }
157
158            /// Retrieves the concatenated context of all communications in the agent's long-term memory.
159            ///
160            /// # Returns
161            ///
162            /// (`String`): A string containing the concatenated role and content of all communications stored in the agent's long-term memory.
163            ///
164            /// # Business Logic
165            ///
166            /// - This method calls the `long_term_memory_context` function to generate a string representation of the agent's entire long-term memory.
167            /// - The context string is composed of each communication's role and content, joined by new lines.
168            /// - It provides a quick overview of the agent's memory in a human-readable format.
169            #[cfg(feature = "mem")]
170            async fn ltm_context(&self) -> String {
171                long_term_memory_context(self.agent.id.clone()).await
172            }
173
174            async fn generate(&mut self, request: &str) -> Result<String> {
175                match &mut self.client {
176                    #[cfg(feature = "gem")]
177                    ClientType::Gemini(gem_client) => {
178                        let parameters = ChatBuilder::default()
179                            .messages(vec![gems::messages::Message::User {
180                                content: Content::Text(request.to_string()),
181                                name: None,
182                            }])
183                            .build()?;
184
185                        let result = gem_client.chat().generate(parameters).await;
186                        Ok(result.unwrap_or_default())
187                    }
188
189                    #[cfg(feature = "oai")]
190                    ClientType::OpenAI(oai_client) => {
191                        let parameters = ChatCompletionParametersBuilder::default()
192                            .model(FlagshipModel::Gpt4O.to_string())
193                            .messages(vec![ChatMessage::User {
194                                content: ChatMessageContent::Text(request.to_string()),
195                                name: None,
196                            }])
197                            .response_format(ChatCompletionResponseFormat::Text)
198                            .build()?;
199
200                        let result = oai_client.chat().create(parameters).await?;
201                        let message = &result.choices[0].message;
202
203                        Ok(match message {
204                            ChatMessage::Assistant {
205                                content: Some(chat_content),
206                                ..
207                            } => chat_content.to_string(),
208                            ChatMessage::User { content, .. } => content.to_string(),
209                            ChatMessage::System { content, .. } => content.to_string(),
210                            ChatMessage::Developer { content, .. } => content.to_string(),
211                            ChatMessage::Tool { content, .. } => content.clone(),
212                            _ => String::new(),
213                        })
214                    }
215
216                    #[cfg(feature = "cld")]
217                    ClientType::Anthropic(client) => {
218                        let body = CreateMessageParams::new(RequiredMessageParams {
219                            model: "claude-3-7-sonnet-latest".to_string(),
220                            messages: vec![AnthMessage::new_text(Role::User, request.to_string())],
221                            max_tokens: 1024,
222                        });
223
224                        let chat_response = client.create_message(Some(&body)).await?;
225                        Ok(chat_response
226                            .content
227                            .iter()
228                            .filter_map(|block| match block {
229                                ContentBlock::Text { text, .. } => Some(text.as_str()),
230                                _ => None,
231                            })
232                            .collect::<Vec<_>>()
233                            .join("\n"))
234                    }
235
236                    #[cfg(feature = "xai")]
237                    ClientType::Xai(xai_client) => {
238                        let messages = vec![XaiMessage::text("user", request)];
239
240                        let rb = ChatCompletionsRequestBuilder::new(
241                            xai_client.clone(),
242                            "grok-beta".into(),
243                            messages,
244                        )
245                        .temperature(0.0)
246                        .stream(false);
247
248                        let req = rb.clone().build()?;
249                        let chat = rb.create_chat_completion(req).await?;
250                        Ok(chat.choices[0].message.content.to_string())
251                    }
252
253                    #[cfg(feature = "co")]
254                    ClientType::Cohere(co_client) => {
255                        use cohere_rust::api::chat::ChatRequest;
256                        use cohere_rust::api::GenerateModel;
257
258                        let chat_request = ChatRequest {
259                            message: request,
260                            ..Default::default()
261                        };
262
263                        let mut receiver = match co_client.chat(&chat_request).await {
264                            Ok(rx) => rx,
265                            Err(e) => return Err(anyhow::anyhow!("Cohere API initialization failed: {}", e)),
266                        };
267                        let mut full_text = String::new();
268                        while let Some(res) = receiver.recv().await {
269                            match res {
270                                Ok(cohere_rust::api::chat::ChatStreamResponse::ChatTextGeneration { text, .. }) => {
271                                    full_text.push_str(&text);
272                                }
273                                Ok(_) => {}
274                                // Err(e) => return Err(anyhow!("Cohere chat error: {:?}", e)),
275                                Err(_) => {},
276                            }
277                        }
278                        Ok(full_text)
279                    }
280
281                    #[allow(unreachable_patterns)]
282                    _ => {
283                        return Err(anyhow!(
284                            "No valid AI client configured. Enable `co`, `gem`, `oai`, `cld`, or `xai` feature."
285                        ));
286                    }
287                }
288            }
289
290            async fn imagen(&mut self, request: &str) -> Result<Vec<u8>> {
291                match &mut self.client {
292                    #[cfg(feature = "gem")]
293                    ClientType::Gemini(gem_client) => {
294                        gem_client.set_model(Model::Imagen4);
295
296                        let input = gems::messages::Message::User {
297                            content: Content::Text(request.into()),
298                            name: None,
299                        };
300
301                        let params = ImageGenBuilder::default()
302                            .model(Model::Imagen4)
303                            .input(input)
304                            .build()?;
305
306                        let image_bytes = gem_client.images().generate(params).await;
307                        Ok(image_bytes.unwrap_or_default())
308                    }
309
310                    #[cfg(feature = "oai")]
311                    ClientType::OpenAI(oai_client) => {
312                        // TODO: Implement this
313                        Ok(Default::default())
314                    }
315
316                    #[cfg(feature = "cld")]
317                    ClientType::Anthropic(client) => {
318                        // TODO: Implement this
319                        Ok(Default::default())
320                    }
321
322                    #[cfg(feature = "xai")]
323                    ClientType::Xai(xai_client) => {
324                        // TODO: Implement this
325                        Ok(Default::default())
326                    }
327
328                    #[cfg(feature = "co")]
329                    ClientType::Cohere(_co_client) => {
330                        // Cohere does not support image generation
331                        Ok(Default::default())
332                    }
333
334                    #[allow(unreachable_patterns)]
335                    _ => {
336                        return Err(anyhow!(
337                            "No valid AI client configured. Enable `co`, `gem`, `oai`, `cld`, or `xai` feature."
338                        ));
339                    }
340                }
341            }
342
343            async fn stream(&mut self, request: &str) -> Result<ReqResponse> {
344                match &mut self.client {
345                    #[cfg(feature = "gem")]
346                    ClientType::Gemini(gem_client) => {
347                        let parameters = StreamBuilder::default()
348                            .model(Model::Flash3Preview)
349                            .input(gems::messages::Message::User {
350                                content: Content::Text(request.into()),
351                                name: None,
352                            })
353                            .build()?;
354
355                        Ok(ReqResponse(Some(gem_client.stream().generate(parameters).await?)))
356                    }
357
358                    #[cfg(feature = "oai")]
359                    ClientType::OpenAI(oai_client) => {
360                        // TODO: Implement this
361                        Ok(Default::default())
362                    }
363
364                    #[cfg(feature = "cld")]
365                    ClientType::Anthropic(client) => {
366                        // TODO: Implement this
367                        Ok(Default::default())
368                    }
369
370                    #[cfg(feature = "xai")]
371                    ClientType::Xai(xai_client) => {
372                        // TODO: Implement this
373                        Ok(Default::default())
374                    }
375
376                    #[cfg(feature = "co")]
377                    ClientType::Cohere(_co_client) => {
378                        // TODO: Implement this
379                        Ok(Default::default())
380                    }
381
382                    #[allow(unreachable_patterns)]
383                    _ => {
384                        return Err(anyhow!(
385                            "No valid AI client configured. Enable `co`, `gem`, `oai`, `cld`, or `xai` feature."
386                        ));
387                    }
388                }
389            }
390        }
391    };
392
393    proc_macro::TokenStream::from(expanded)
394}
395
396// Copyright 2026 Mahmoud Harmouch.
397//
398// Licensed under the MIT license
399// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
400// option. This file may not be copied, modified, or distributed
401// except according to those terms.