auto_derive/
lib.rs

1extern crate proc_macro;
2
3use quote::quote;
4use syn::{DeriveInput, parse_macro_input};
5
6#[proc_macro_derive(Auto)]
7pub fn derive_agent(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
8    let input = parse_macro_input!(input as DeriveInput);
9    let name = &input.ident;
10
11    let expanded = quote! {
12        impl Agent for #name {
13            fn new(objective: Cow<'static, str>, position: Cow<'static, str>) -> Self {
14                let mut agent = Self::default();
15                agent.agent.objective = objective;
16                agent.agent.position = position;
17                agent
18            }
19
20            fn update(&mut self, status: Status) {
21                self.agent.update(status);
22            }
23
24            fn objective(&self) -> &std::borrow::Cow<'static, str> {
25                &self.agent.objective
26            }
27
28            fn position(&self) -> &std::borrow::Cow<'static, str> {
29                &self.agent.position
30            }
31
32            fn status(&self) -> &Status {
33                &self.agent.status
34            }
35
36            fn memory(&self) -> &Vec<Communication> {
37                &self.agent.memory
38            }
39
40            fn tools(&self) -> &Vec<Tool> {
41                &self.agent.tools
42            }
43
44            fn knowledge(&self) -> &Knowledge {
45                &self.agent.knowledge
46            }
47
48            fn planner(&self) -> Option<&Planner> {
49                self.agent.planner.as_ref()
50            }
51
52            fn persona(&self) -> &Persona {
53                &self.agent.persona
54            }
55
56            fn collaborators(&self) -> &Vec<Collaborator> {
57                &self.agent.collaborators
58            }
59
60            fn reflection(&self) -> Option<&Reflection> {
61                self.agent.reflection.as_ref()
62            }
63
64            fn scheduler(&self) -> Option<&TaskScheduler> {
65                self.agent.scheduler.as_ref()
66            }
67
68            fn capabilities(&self) -> &std::collections::HashSet<Capability> {
69                &self.agent.capabilities
70            }
71
72            fn context(&self) -> &ContextManager {
73                &self.agent.context
74            }
75
76            fn tasks(&self) -> &Vec<Task> {
77                &self.agent.tasks
78            }
79
80            fn memory_mut(&mut self) -> &mut Vec<Communication> {
81                &mut self.agent.memory
82            }
83
84            fn planner_mut(&mut self) -> Option<&mut Planner> {
85                self.agent.planner.as_mut()
86            }
87
88            fn context_mut(&mut self) -> &mut ContextManager {
89                &mut self.agent.context
90            }
91        }
92
93        impl Functions for #name {
94            fn get_agent(&self) -> &AgentGPT {
95                &self.agent
96            }
97        }
98
99        #[async_trait::async_trait]
100        impl AsyncFunctions for #name {
101            async fn execute<'a>(
102                &'a mut self,
103                tasks: &'a mut Task,
104                execute: bool,
105                browse: bool,
106                max_tries: u64,
107            ) -> Result<()> {
108                <#name as Executor>::execute(self, tasks, execute, browse, max_tries).await
109            }
110
111            /// Saves a communication to long-term memory for the agent.
112            ///
113            /// # Arguments
114            ///
115            /// * `communication` - The communication to save, which contains the role and content.
116            ///
117            /// # Returns
118            ///
119            /// (`Result<()>`): Result indicating the success or failure of saving the communication.
120            ///
121            /// # Business Logic
122            ///
123            /// - This method uses the `save_long_term_memory` util function to save the communication into the agent's long-term memory.
124            /// - The communication is embedded and stored using the agent's unique ID as the namespace.
125            /// - It handles the embedding and metadata for the communication, ensuring it's stored correctly.
126            #[cfg(feature = "mem")]
127            async fn save_ltm(&mut self, communication: Communication) -> Result<()> {
128                save_long_term_memory(&mut self.client, self.agent.id.clone(), communication).await
129            }
130
131            /// Retrieves all communications stored in the agent's long-term memory.
132            ///
133            /// # Returns
134            ///
135            /// (`Result<Vec<Communication>>`): A result containing a vector of communications retrieved from the agent's long-term memory.
136            ///
137            /// # Business Logic
138            ///
139            /// - This method fetches the stored communications for the agent by interacting with the `load_long_term_memory` function.
140            /// - The function will return a list of communications that are indexed by the agent's unique ID.
141            /// - It handles the retrieval of the stored metadata and content for each communication.
142            #[cfg(feature = "mem")]
143            async fn get_ltm(&self) -> Result<Vec<Communication>> {
144                load_long_term_memory(self.agent.id.clone()).await
145            }
146
147            /// Retrieves the concatenated context of all communications in the agent's long-term memory.
148            ///
149            /// # Returns
150            ///
151            /// (`String`): A string containing the concatenated role and content of all communications stored in the agent's long-term memory.
152            ///
153            /// # Business Logic
154            ///
155            /// - This method calls the `long_term_memory_context` function to generate a string representation of the agent's entire long-term memory.
156            /// - The context string is composed of each communication's role and content, joined by new lines.
157            /// - It provides a quick overview of the agent's memory in a human-readable format.
158            #[cfg(feature = "mem")]
159            async fn ltm_context(&self) -> String {
160                long_term_memory_context(self.agent.id.clone()).await
161            }
162
163            #[cfg(any(feature = "oai", feature = "gem", feature = "cld", feature = "xai"))]
164            async fn send_request(&mut self, request: &str) -> Result<String> {
165                match &mut self.client {
166                    #[cfg(feature = "gem")]
167                    ClientType::Gemini(gem_client) => {
168                        let parameters = ChatBuilder::default()
169                            .messages(vec![Message::User {
170                                content: Content::Text(request.to_string()),
171                                name: None,
172                            }])
173                            .build()?;
174
175                        let result = gem_client.chat().generate(parameters).await;
176                        Ok(result.unwrap_or_default())
177                    }
178
179                    #[cfg(feature = "oai")]
180                    ClientType::OpenAI(oai_client) => {
181                        let parameters = ChatCompletionParametersBuilder::default()
182                            .model(FlagshipModel::Gpt4O.to_string())
183                            .messages(vec![ChatMessage::User {
184                                content: ChatMessageContent::Text(request.to_string()),
185                                name: None,
186                            }])
187                            .response_format(ChatCompletionResponseFormat::Text)
188                            .build()?;
189
190                        let result = oai_client.chat().create(parameters).await?;
191                        let message = &result.choices[0].message;
192
193                        Ok(match message {
194                            ChatMessage::Assistant {
195                                content: Some(chat_content),
196                                ..
197                            } => chat_content.to_string(),
198                            ChatMessage::User { content, .. } => content.to_string(),
199                            ChatMessage::System { content, .. } => content.to_string(),
200                            ChatMessage::Developer { content, .. } => content.to_string(),
201                            ChatMessage::Tool { content, .. } => content.clone(),
202                            _ => String::new(),
203                        })
204                    }
205
206                    #[cfg(feature = "cld")]
207                    ClientType::Anthropic(client) => {
208                        let body = CreateMessageParams::new(RequiredMessageParams {
209                            model: "claude-3-7-sonnet-latest".to_string(),
210                            messages: vec![AnthMessage::new_text(Role::User, request.to_string())],
211                            max_tokens: 1024,
212                        });
213
214                        let chat_response = client.create_message(Some(&body)).await?;
215                        Ok(chat_response
216                            .content
217                            .iter()
218                            .filter_map(|block| match block {
219                                ContentBlock::Text { text, .. } => Some(text.as_str()),
220                                _ => None,
221                            })
222                            .collect::<Vec<_>>()
223                            .join("\n"))
224                    }
225
226                    #[cfg(feature = "xai")]
227                    ClientType::Xai(xai_client) => {
228                        let messages = vec![XaiMessage {
229                            role: "user".into(),
230                            content: request.to_string(),
231                        }];
232
233                        let rb = ChatCompletionsRequestBuilder::new(
234                            xai_client.clone(),
235                            "grok-beta".into(),
236                            messages,
237                        )
238                        .temperature(0.0)
239                        .stream(false);
240
241                        let req = rb.clone().build()?;
242                        let chat = rb.create_chat_completion(req).await?;
243                        Ok(chat.choices[0].message.content.clone())
244                    }
245
246
247                    #[allow(unreachable_patterns)]
248                    _ => {
249                        return Err(anyhow::anyhow!(
250                            "No valid AI client configured. Enable `gem`, `oai`, `cld`, or `xai` feature."
251                        ));
252                    }
253                }
254            }
255
256        }
257    };
258
259    proc_macro::TokenStream::from(expanded)
260}