aether_macros/
lib.rs

1//! # Aether Macros
2//!
3//! Procedural macros for one-line AI code injection.
4//!
5//! This crate provides the `ai!` macro for compile-time code generation markers
6//! and runtime injection helpers.
7
8use proc_macro::TokenStream;
9use quote::quote;
10use syn::{parse_macro_input, LitStr};
11use aes_gcm::{
12    aead::{Aead, KeyInit},
13    Aes256Gcm, Nonce,
14};
15use base64::{engine::general_purpose, Engine as _};
16
17/// Generate a template slot marker.
18///
19/// This macro creates a slot definition that will be filled by AI at runtime.
20///
21/// # Example
22///
23/// ```rust,ignore
24/// use aether_macros::ai_slot;
25///
26/// // Creates a slot named "button" with the given prompt
27/// let code = ai_slot!("button", "Create a submit button with hover effects");
28/// ```
29#[proc_macro]
30pub fn ai_slot(input: TokenStream) -> TokenStream {
31    let input_str = parse_macro_input!(input as LitStr);
32    let prompt = input_str.value();
33
34    let output = quote! {
35        aether_core::Slot::new("generated", #prompt)
36    };
37
38    output.into()
39}
40
41/// Create an AI injection template inline.
42///
43/// # Example
44///
45/// ```rust,ignore
46/// use aether_macros::ai_template;
47///
48/// let template = ai_template!("<div>{{AI:content}}</div>");
49/// ```
50#[proc_macro]
51pub fn ai_template(input: TokenStream) -> TokenStream {
52    let input_str = parse_macro_input!(input as LitStr);
53    let content = input_str.value();
54
55    let output = quote! {
56        aether_core::Template::new(#content)
57    };
58
59    output.into()
60}
61
62/// One-line AI code generation (async).
63///
64/// This macro creates a future that generates code using the specified
65/// provider and prompt.
66///
67/// # Example
68///
69/// ```rust,ignore
70/// use aether_macros::ai;
71/// use aether_ai::OpenAiProvider;
72///
73/// async fn example() {
74///     let provider = OpenAiProvider::from_env().unwrap();
75///     let code = ai!("Create a login form", provider).await.unwrap();
76///     println!("{}", code);
77/// }
78/// ```
79#[proc_macro]
80pub fn ai(input: TokenStream) -> TokenStream {
81    let input_tokens: proc_macro2::TokenStream = input.into();
82
83    // Parse as: prompt, provider
84    let output = quote! {
85        {
86            async {
87                use aether_core::{InjectionEngine, Template};
88
89                let (prompt, provider) = (#input_tokens);
90                let template = Template::new("{{AI:generated}}")
91                    .with_slot("generated", prompt);
92
93                let engine = InjectionEngine::with_config(provider, aether_core::AetherConfig::from_env());
94                engine.render(&template).await
95            }
96        }
97    };
98
99    output.into()
100}
101
102/// Mark a code section for AI generation (placeholder).
103///
104/// This attribute marks a function or item for AI-assisted generation.
105/// Use with build tools that preprocess source files.
106///
107/// # Example
108///
109/// ```rust,ignore
110/// #[ai_generate("Implement a function that validates email addresses")]
111/// fn validate_email(email: &str) -> bool {
112///     // AI will generate this implementation
113///     todo!()
114/// }
115/// ```
116#[proc_macro_attribute]
117pub fn ai_generate(attr: TokenStream, item: TokenStream) -> TokenStream {
118    let _prompt = parse_macro_input!(attr as LitStr);
119    let item_tokens: proc_macro2::TokenStream = item.into();
120
121    let output = quote! {
122        // AI Generation prompt: #prompt
123        #item_tokens
124    };
125
126    output.into()
127}
128
129/// Transform a function into a secure, polymorphic AI-powered runtime call.
130/// 
131/// This macro removes the function body and replaces it with logic that:
132/// 1. Fetches a script from AI at runtime.
133/// 2. Executes it using the AetherRuntime (Rhai).
134/// 
135/// # Example
136/// 
137/// ```rust,ignore
138/// #[aether_secure(prompt = "Calculate complex score based on inputs", temp = 0.0)]
139/// fn calculate_score(a: i64, b: i64) -> i64;
140/// ```
141#[proc_macro_attribute]
142pub fn aether_secure(attr: TokenStream, item: TokenStream) -> TokenStream {
143    let input = parse_macro_input!(item as syn::ItemFn);
144    let fn_name = &input.sig.ident;
145    let fn_vis = &input.vis;
146    let fn_args = &input.sig.inputs;
147    let fn_output = &input.sig.output;
148
149    // Simplified attribute parsing (in production use syn::AttributeArgs)
150    let attr_str = attr.to_string();
151    let (prompt, is_encrypted) = if let Some(p) = attr_str.split("prompt =").nth(1).and_then(|s| s.split('"').nth(1)) {
152        let p_str = p.to_string();
153        
154        // Compile-time Encryption Logic
155        if let Ok(key_str) = std::env::var("AETHER_SHIELD_KEY") {
156            let key = derive_key(&key_str);
157            let cipher = Aes256Gcm::new(&key.into());
158            let nonce = Nonce::from_slice(b"aether_nonce");
159            let ciphertext = cipher.encrypt(nonce, p_str.as_bytes()).expect("Shield encryption failed");
160            let encrypted = general_purpose::STANDARD.encode(ciphertext);
161            (encrypted, true)
162        } else {
163            (p_str, false)
164        }
165    } else {
166        ("Generate logic for this function".to_string(), false)
167    };
168
169    let arg_names: Vec<_> = fn_args.iter().filter_map(|arg| {
170        if let syn::FnArg::Typed(pat_type) = arg {
171            if let syn::Pat::Ident(pat_id) = &*pat_type.pat {
172                return Some(&pat_id.ident);
173            }
174        }
175        None
176    }).collect();
177
178    let output = quote! {
179        #fn_vis async fn #fn_name(#fn_args) #fn_output {
180            use aether_core::prelude::*;
181            use aether_core::AetherRuntime;
182            use std::collections::HashMap;
183
184            // 1. Setup Engine & Request Script (Dynamic Provider Selection)
185            // We must handle rendering inside match arms because InjectionEngine<P> types differ.
186            
187            let provider_type = std::env::var("AETHER_PROVIDER").unwrap_or_else(|_| "openai".to_string());
188            
189            // Prepare template (decryption if needed)
190            let prompt_text = if #is_encrypted {
191                let key = std::env::var("AETHER_SHIELD_KEY").unwrap_or_else(|_| "default_key".to_string());
192                aether_core::shield::Shield::decrypt(#prompt, &key).expect("Aether Shield: Decryption failed. Possible tampered binary or incorrect key.")
193            } else {
194                #prompt.to_string()
195            };
196
197            let script_prompt = format!(
198                "Implement this logic in Rhai script: {}. Output ONLY the raw Rhai script code. The inputs available are: {:?}. Return the result directly. Do not wrap in markdown.",
199                prompt_text,
200                vec![#(stringify!(#arg_names)),*]
201            );
202            
203            let template = Template::new("{{AI:script}}")
204                .configure_slot(Slot::new("script", script_prompt).with_temperature(0.0));
205
206            let script = match provider_type.to_lowercase().as_str() {
207                "anthropic" | "claude" => {
208                    let p = aether_ai::AnthropicProvider::from_env().expect("Anthropic Provider not configured");
209                    let engine = InjectionEngine::new(p);
210                    engine.render(&template).await.expect("AI script generation failed")
211                },
212                "gemini" => {
213                    let p = aether_ai::GeminiProvider::from_env().expect("Gemini Provider not configured");
214                    let engine = InjectionEngine::new(p);
215                    engine.render(&template).await.expect("AI script generation failed")
216                },
217                "ollama" => {
218                    let model = std::env::var("AETHER_MODEL").unwrap_or_else(|_| "llama3".to_string());
219                    let p = aether_ai::OllamaProvider::new(&model);
220                    let engine = InjectionEngine::new(p);
221                    engine.render(&template).await.expect("AI script generation failed")
222                },
223                _ => {
224                   let p = aether_ai::OpenAiProvider::from_env().expect("OpenAI Provider not configured");
225                   let engine = InjectionEngine::new(p);
226                   engine.render(&template).await.expect("AI script generation failed")
227                }
228            };
229
230            // 3. Execute in Runtime
231            let runtime = AetherRuntime::new();
232            let mut inputs = HashMap::new();
233            #(
234                inputs.insert(stringify!(#arg_names).to_string(), rhai::Dynamic::from(#arg_names));
235             )*
236
237            let result = runtime.execute(&script, inputs).expect("Runtime execution failed");
238            
239            // 4. Return result (simplified cast, needs more robust handling for varied types)
240            result.cast()
241        }
242    };
243
244    output.into()
245}
246
247fn derive_key(key_str: &str) -> [u8; 32] {
248    let mut key = [0u8; 32];
249    let bytes = key_str.as_bytes();
250    for i in 0..32 {
251        if i < bytes.len() {
252            key[i] = bytes[i];
253        } else {
254            key[i] = (i as u8).wrapping_mul(0xAF);
255        }
256    }
257    key
258}