1use proc_macro::TokenStream;
9use quote::quote;
10use syn::{parse_macro_input, LitStr};
11use aes_gcm::{
12 aead::{Aead, KeyInit},
13 Aes256Gcm, Nonce,
14};
15use base64::{engine::general_purpose, Engine as _};
16
17#[proc_macro]
30pub fn ai_slot(input: TokenStream) -> TokenStream {
31 let input_str = parse_macro_input!(input as LitStr);
32 let prompt = input_str.value();
33
34 let output = quote! {
35 aether_core::Slot::new("generated", #prompt)
36 };
37
38 output.into()
39}
40
41#[proc_macro]
51pub fn ai_template(input: TokenStream) -> TokenStream {
52 let input_str = parse_macro_input!(input as LitStr);
53 let content = input_str.value();
54
55 let output = quote! {
56 aether_core::Template::new(#content)
57 };
58
59 output.into()
60}
61
62#[proc_macro]
80pub fn ai(input: TokenStream) -> TokenStream {
81 let input_tokens: proc_macro2::TokenStream = input.into();
82
83 let output = quote! {
85 {
86 async {
87 use aether_core::{InjectionEngine, Template};
88
89 let (prompt, provider) = (#input_tokens);
90 let template = Template::new("{{AI:generated}}")
91 .with_slot("generated", prompt);
92
93 let engine = InjectionEngine::new(provider);
94 engine.render(&template).await
95 }
96 }
97 };
98
99 output.into()
100}
101
102#[proc_macro_attribute]
117pub fn ai_generate(attr: TokenStream, item: TokenStream) -> TokenStream {
118 let _prompt = parse_macro_input!(attr as LitStr);
119 let item_tokens: proc_macro2::TokenStream = item.into();
120
121 let output = quote! {
122 #item_tokens
124 };
125
126 output.into()
127}
128
129#[proc_macro_attribute]
142pub fn aether_secure(attr: TokenStream, item: TokenStream) -> TokenStream {
143 let input = parse_macro_input!(item as syn::ItemFn);
144 let fn_name = &input.sig.ident;
145 let fn_vis = &input.vis;
146 let fn_args = &input.sig.inputs;
147 let fn_output = &input.sig.output;
148
149 let attr_str = attr.to_string();
151 let (prompt, is_encrypted) = if let Some(p) = attr_str.split("prompt =").nth(1).and_then(|s| s.split('"').nth(1)) {
152 let p_str = p.to_string();
153
154 if let Ok(key_str) = std::env::var("AETHER_SHIELD_KEY") {
156 let key = derive_key(&key_str);
157 let cipher = Aes256Gcm::new(&key.into());
158 let nonce = Nonce::from_slice(b"aether_nonce");
159 let ciphertext = cipher.encrypt(nonce, p_str.as_bytes()).expect("Shield encryption failed");
160 let encrypted = general_purpose::STANDARD.encode(ciphertext);
161 (encrypted, true)
162 } else {
163 (p_str, false)
164 }
165 } else {
166 ("Generate logic for this function".to_string(), false)
167 };
168
169 let arg_names: Vec<_> = fn_args.iter().filter_map(|arg| {
170 if let syn::FnArg::Typed(pat_type) = arg {
171 if let syn::Pat::Ident(pat_id) = &*pat_type.pat {
172 return Some(&pat_id.ident);
173 }
174 }
175 None
176 }).collect();
177
178 let output = quote! {
179 #fn_vis async fn #fn_name(#fn_args) #fn_output {
180 use aether_core::prelude::*;
181 use aether_core::AetherRuntime;
182 use std::collections::HashMap;
183
184 let provider_type = std::env::var("AETHER_PROVIDER").unwrap_or_else(|_| "openai".to_string());
188
189 let prompt_text = if #is_encrypted {
191 let key = std::env::var("AETHER_SHIELD_KEY").unwrap_or_else(|_| "default_key".to_string());
192 aether_core::shield::Shield::decrypt(#prompt, &key).expect("Aether Shield: Decryption failed. Possible tampered binary or incorrect key.")
193 } else {
194 #prompt.to_string()
195 };
196
197 let script_prompt = format!(
198 "Implement this logic in Rhai script: {}. Output ONLY the raw Rhai script code. The inputs available are: {:?}. Return the result directly. Do not wrap in markdown.",
199 prompt_text,
200 vec![#(stringify!(#arg_names)),*]
201 );
202
203 let template = Template::new("{{AI:script}}")
204 .configure_slot(Slot::new("script", script_prompt).with_temperature(0.0));
205
206 let script = match provider_type.to_lowercase().as_str() {
207 "anthropic" | "claude" => {
208 let p = aether_ai::AnthropicProvider::from_env().expect("Anthropic Provider not configured");
209 let engine = InjectionEngine::new(p);
210 engine.render(&template).await.expect("AI script generation failed")
211 },
212 "gemini" => {
213 let p = aether_ai::GeminiProvider::from_env().expect("Gemini Provider not configured");
214 let engine = InjectionEngine::new(p);
215 engine.render(&template).await.expect("AI script generation failed")
216 },
217 "ollama" => {
218 let model = std::env::var("AETHER_MODEL").unwrap_or_else(|_| "llama3".to_string());
219 let p = aether_ai::OllamaProvider::new(&model);
220 let engine = InjectionEngine::new(p);
221 engine.render(&template).await.expect("AI script generation failed")
222 },
223 _ => {
224 let p = aether_ai::OpenAiProvider::from_env().expect("OpenAI Provider not configured");
225 let engine = InjectionEngine::new(p);
226 engine.render(&template).await.expect("AI script generation failed")
227 }
228 };
229
230 let runtime = AetherRuntime::new();
232 let mut inputs = HashMap::new();
233 #(
234 inputs.insert(stringify!(#arg_names).to_string(), rhai::Dynamic::from(#arg_names));
235 )*
236
237 let result = runtime.execute(&script, inputs).expect("Runtime execution failed");
238
239 result.cast()
241 }
242 };
243
244 output.into()
245}
246
247fn derive_key(key_str: &str) -> [u8; 32] {
248 let mut key = [0u8; 32];
249 let bytes = key_str.as_bytes();
250 for i in 0..32 {
251 if i < bytes.len() {
252 key[i] = bytes[i];
253 } else {
254 key[i] = (i as u8).wrapping_mul(0xAF);
255 }
256 }
257 key
258}