auto_derive/
lib.rs

1extern crate proc_macro;
2
3use quote::quote;
4use syn::{DeriveInput, parse_macro_input};
5
6#[proc_macro_derive(Auto)]
7pub fn derive_agent(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
8    let input = parse_macro_input!(input as DeriveInput);
9    let name = &input.ident;
10
11    let expanded = quote! {
12        impl Agent for #name {
13            fn new(objective: Cow<'static, str>, position: Cow<'static, str>) -> Self {
14                let mut agent = Self::default();
15                agent.agent.objective = objective;
16                agent.agent.position = position;
17                agent
18            }
19
20            fn update(&mut self, status: Status) {
21                self.agent.update(status);
22            }
23
24            fn objective(&self) -> &std::borrow::Cow<'static, str> {
25                &self.agent.objective
26            }
27
28            fn position(&self) -> &std::borrow::Cow<'static, str> {
29                &self.agent.position
30            }
31
32            fn status(&self) -> &Status {
33                &self.agent.status
34            }
35
36            fn memory(&self) -> &Vec<Communication> {
37                &self.agent.memory
38            }
39
40            fn tools(&self) -> &Vec<Tool> {
41                &self.agent.tools
42            }
43
44            fn knowledge(&self) -> &Knowledge {
45                &self.agent.knowledge
46            }
47
48            fn planner(&self) -> Option<&Planner> {
49                self.agent.planner.as_ref()
50            }
51
52            fn persona(&self) -> &Persona {
53                &self.agent.persona
54            }
55
56            #[cfg(feature = "net")]
57            fn collaborators(&self) -> Vec<Collaborator> {
58                let mut all = Vec::new();
59                all.extend(self.agent.local_collaborators.values().cloned());
60                all.extend(self.agent.remote_collaborators.values().cloned());
61                all
62            }
63
64            fn reflection(&self) -> Option<&Reflection> {
65                self.agent.reflection.as_ref()
66            }
67
68            fn scheduler(&self) -> Option<&TaskScheduler> {
69                self.agent.scheduler.as_ref()
70            }
71
72            fn capabilities(&self) -> &std::collections::HashSet<Capability> {
73                &self.agent.capabilities
74            }
75
76            fn context(&self) -> &ContextManager {
77                &self.agent.context
78            }
79
80            fn tasks(&self) -> &Vec<Task> {
81                &self.agent.tasks
82            }
83
84            fn memory_mut(&mut self) -> &mut Vec<Communication> {
85                &mut self.agent.memory
86            }
87
88            fn planner_mut(&mut self) -> Option<&mut Planner> {
89                self.agent.planner.as_mut()
90            }
91
92            fn context_mut(&mut self) -> &mut ContextManager {
93                &mut self.agent.context
94            }
95        }
96
97        impl Functions for #name {
98            fn get_agent(&self) -> &AgentGPT {
99                &self.agent
100            }
101        }
102
103        #[async_trait]
104        impl AsyncFunctions for #name {
105            async fn execute<'a>(
106                &'a mut self,
107                tasks: &'a mut Task,
108                execute: bool,
109                browse: bool,
110                max_tries: u64,
111            ) -> Result<()> {
112                <#name as Executor>::execute(self, tasks, execute, browse, max_tries).await
113            }
114
115            /// Saves a communication to long-term memory for the agent.
116            ///
117            /// # Arguments
118            ///
119            /// * `communication` - The communication to save, which contains the role and content.
120            ///
121            /// # Returns
122            ///
123            /// (`Result<()>`): Result indicating the success or failure of saving the communication.
124            ///
125            /// # Business Logic
126            ///
127            /// - This method uses the `save_long_term_memory` util function to save the communication into the agent's long-term memory.
128            /// - The communication is embedded and stored using the agent's unique ID as the namespace.
129            /// - It handles the embedding and metadata for the communication, ensuring it's stored correctly.
130            #[cfg(feature = "mem")]
131            async fn save_ltm(&mut self, communication: Communication) -> Result<()> {
132                save_long_term_memory(&mut self.client, self.agent.id.clone(), communication).await
133            }
134
135            /// Retrieves all communications stored in the agent's long-term memory.
136            ///
137            /// # Returns
138            ///
139            /// (`Result<Vec<Communication>>`): A result containing a vector of communications retrieved from the agent's long-term memory.
140            ///
141            /// # Business Logic
142            ///
143            /// - This method fetches the stored communications for the agent by interacting with the `load_long_term_memory` function.
144            /// - The function will return a list of communications that are indexed by the agent's unique ID.
145            /// - It handles the retrieval of the stored metadata and content for each communication.
146            #[cfg(feature = "mem")]
147            async fn get_ltm(&self) -> Result<Vec<Communication>> {
148                load_long_term_memory(self.agent.id.clone()).await
149            }
150
151            /// Retrieves the concatenated context of all communications in the agent's long-term memory.
152            ///
153            /// # Returns
154            ///
155            /// (`String`): A string containing the concatenated role and content of all communications stored in the agent's long-term memory.
156            ///
157            /// # Business Logic
158            ///
159            /// - This method calls the `long_term_memory_context` function to generate a string representation of the agent's entire long-term memory.
160            /// - The context string is composed of each communication's role and content, joined by new lines.
161            /// - It provides a quick overview of the agent's memory in a human-readable format.
162            #[cfg(feature = "mem")]
163            async fn ltm_context(&self) -> String {
164                long_term_memory_context(self.agent.id.clone()).await
165            }
166
167            async fn generate(&mut self, request: &str) -> Result<String> {
168                match &mut self.client {
169                    #[cfg(feature = "gem")]
170                    ClientType::Gemini(gem_client) => {
171                        let parameters = ChatBuilder::default()
172                            .messages(vec![Message::User {
173                                content: Content::Text(request.to_string()),
174                                name: None,
175                            }])
176                            .build()?;
177
178                        let result = gem_client.chat().generate(parameters).await;
179                        Ok(result.unwrap_or_default())
180                    }
181
182                    #[cfg(feature = "oai")]
183                    ClientType::OpenAI(oai_client) => {
184                        let parameters = ChatCompletionParametersBuilder::default()
185                            .model(FlagshipModel::Gpt4O.to_string())
186                            .messages(vec![ChatMessage::User {
187                                content: ChatMessageContent::Text(request.to_string()),
188                                name: None,
189                            }])
190                            .response_format(ChatCompletionResponseFormat::Text)
191                            .build()?;
192
193                        let result = oai_client.chat().create(parameters).await?;
194                        let message = &result.choices[0].message;
195
196                        Ok(match message {
197                            ChatMessage::Assistant {
198                                content: Some(chat_content),
199                                ..
200                            } => chat_content.to_string(),
201                            ChatMessage::User { content, .. } => content.to_string(),
202                            ChatMessage::System { content, .. } => content.to_string(),
203                            ChatMessage::Developer { content, .. } => content.to_string(),
204                            ChatMessage::Tool { content, .. } => content.clone(),
205                            _ => String::new(),
206                        })
207                    }
208
209                    #[cfg(feature = "cld")]
210                    ClientType::Anthropic(client) => {
211                        let body = CreateMessageParams::new(RequiredMessageParams {
212                            model: "claude-3-7-sonnet-latest".to_string(),
213                            messages: vec![AnthMessage::new_text(Role::User, request.to_string())],
214                            max_tokens: 1024,
215                        });
216
217                        let chat_response = client.create_message(Some(&body)).await?;
218                        Ok(chat_response
219                            .content
220                            .iter()
221                            .filter_map(|block| match block {
222                                ContentBlock::Text { text, .. } => Some(text.as_str()),
223                                _ => None,
224                            })
225                            .collect::<Vec<_>>()
226                            .join("\n"))
227                    }
228
229                    #[cfg(feature = "xai")]
230                    ClientType::Xai(xai_client) => {
231                        let messages = vec![XaiMessage {
232                            role: "user".into(),
233                            content: request.to_string(),
234                        }];
235
236                        let rb = ChatCompletionsRequestBuilder::new(
237                            xai_client.clone(),
238                            "grok-beta".into(),
239                            messages,
240                        )
241                        .temperature(0.0)
242                        .stream(false);
243
244                        let req = rb.clone().build()?;
245                        let chat = rb.create_chat_completion(req).await?;
246                        Ok(chat.choices[0].message.content.clone())
247                    }
248
249
250                    #[allow(unreachable_patterns)]
251                    _ => {
252                        return Err(anyhow!(
253                            "No valid AI client configured. Enable `gem`, `oai`, `cld`, or `xai` feature."
254                        ));
255                    }
256                }
257            }
258
259            async fn imagen(&mut self, request: &str) -> Result<Vec<u8>> {
260                match &mut self.client {
261                    #[cfg(feature = "gem")]
262                    ClientType::Gemini(gem_client) => {
263                        gem_client.set_model(Model::FlashExpImage);
264
265                        let input = Message::User {
266                            content: Content::Text(request.into()),
267                            name: None,
268                        };
269
270                        let params = ImageGenBuilder::default()
271                            .model(Model::FlashExpImage)
272                            .input(input)
273                            .build()?;
274
275                        let image_bytes = gem_client.images().generate(params).await;
276                        Ok(image_bytes.unwrap_or_default())
277                    }
278
279                    #[cfg(feature = "oai")]
280                    ClientType::OpenAI(oai_client) => {
281                        // TODO: Implement this
282                        Ok(Default::default())
283                    }
284
285                    #[cfg(feature = "cld")]
286                    ClientType::Anthropic(client) => {
287                        // TODO: Implement this
288                        Ok(Default::default())
289                    }
290
291                    #[cfg(feature = "xai")]
292                    ClientType::Xai(xai_client) => {
293                        // TODO: Implement this
294                        Ok(Default::default())
295                    }
296
297
298                    #[allow(unreachable_patterns)]
299                    _ => {
300                        return Err(anyhow!(
301                            "No valid AI client configured. Enable `gem`, `oai`, `cld`, or `xai` feature."
302                        ));
303                    }
304                }
305            }
306
307            async fn stream(&mut self, request: &str) -> Result<ReqResponse> {
308                match &mut self.client {
309                    #[cfg(feature = "gem")]
310                    ClientType::Gemini(gem_client) => {
311                        let parameters = StreamBuilder::default()
312                            .model(Model::Flash20)
313                            .input(Message::User {
314                                content: Content::Text(request.into()),
315                                name: None,
316                            })
317                            .build()?;
318
319                        Ok(ReqResponse(Some(gem_client.stream().generate(parameters).await?)))
320                    }
321
322                    #[cfg(feature = "oai")]
323                    ClientType::OpenAI(oai_client) => {
324                        // TODO: Implement this
325                        Ok(Default::default())
326                    }
327
328                    #[cfg(feature = "cld")]
329                    ClientType::Anthropic(client) => {
330                        // TODO: Implement this
331                        Ok(Default::default())
332                    }
333
334                    #[cfg(feature = "xai")]
335                    ClientType::Xai(xai_client) => {
336                        // TODO: Implement this
337                        Ok(Default::default())
338                    }
339
340
341                    #[allow(unreachable_patterns)]
342                    _ => {
343                        return Err(anyhow!(
344                            "No valid AI client configured. Enable `gem`, `oai`, `cld`, or `xai` feature."
345                        ));
346                    }
347                }
348            }
349        }
350    };
351
352    proc_macro::TokenStream::from(expanded)
353}