ai-transform 0.1.0

Procedural macro for AI-powered data transformations between JSON-serializable types
Documentation
#![warn(clippy::pedantic)]

//! Procedural macro for AI-powered data transformations between JSON-serializable types.
//!
//! Transforms data from one JSON-serializable type to another using `OpenAI`'s
//! language models. The idea is to enable semantic understanding when that is
//! useful and easier than writing deterministic parser by hand.
//!
//! **Important**: This crate requires [`ai_transform_runtime`] to run properly.
//!
//! ```toml
//! [dependencies]
//! ai-transform = "0.1.0"
//! ai-transform-runtime = "0.1.0"  # Required!
//! ```
//!
//! # How It Works
//!
//! 1. **Macro Expansion**: Generates `ai_transform_runtime::transform::<S, T>(value)`
//! 2. **Serialization**: Converts source value to JSON
//! 3. **Schema Generation**: Creates example JSON for both types using `Default`
//! 4. **AI Request**: Sends transformation prompt to `OpenAI` with context
//! 5. **Response Processing**: Extracts and validates JSON from AI response
//! 6. **Deserialization**: Converts result to target type
//!
//! # Considerations
//!
//! - Each call makes an HTTP request to `OpenAI`'s API
//! - Consider caching for repeated transformations
//! - Response time: ~1-5 seconds depending on complexity
//!
//! # Configuration
//!
//! Environment variables:
//! - `OPENAI_API_KEY`: Your API key (required)
//! - `OPENAI_MODEL`: Model to use (default: `"gpt-4o"`)
//! - `OPENAI_BASE_URL`: API endpoint (default: `OpenAI`'s URL)
//!
//! See [`transform!`] for usage info.
//!
//! [`ai_transform_runtime`]: https://docs.rs/ai-transform-runtime

use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, Type};

/// Type representing macro input for parsing.
struct TransformInput {
    source_type: Type,
    target_type: Type,
    source_value: syn::Expr,
}

impl syn::parse::Parse for TransformInput {
    fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
        let source_type: Type = input.parse()?;
        input.parse::<syn::Token![,]>()?;
        let target_type: Type = input.parse()?;
        input.parse::<syn::Token![,]>()?;
        let source_value: syn::Expr = input.parse()?;

        Ok(TransformInput {
            source_type,
            target_type,
            source_value,
        })
    }
}

/// AI-powered data transformation macro.
///
/// **Important:** It is intended to be used when you understand that such a
/// transformation from source type to target type makes sense.
///
/// # Syntax
///
/// ```rust,ignore
/// transform!(SourceType, TargetType, source_value)
/// ```
///
/// # Requirements
///
/// **Dependencies**: Add both this and runtime to your `Cargo.toml`:
/// ```toml
/// [dependencies]
/// ai-transform = "0.1.0"
/// ai-transform-runtime = "0.1.0"
/// serde = { version = "1.0", features = ["derive"] }
/// tokio = { version = "1.0", features = ["macros"] }
/// ```
///
/// **Environment**: Set your `OpenAI` API key:
/// ```bash
/// export OPENAI_API_KEY="your-api-key-here"
/// ```
///
/// **Type Requirements**: Both types must implement:
/// - `serde::Serialize` + `serde::Deserialize` + `Default`
///
/// # Arguments
///
/// * `SourceType` - Input data type
/// * `TargetType` - Output data type
/// * `source_value` - Expression evaluating to a `SourceType` value
///
/// # Returns
///
/// `Future<Result<TargetType, ai_transform_runtime::error::TransformError>>`
///
/// # Examples
///
/// ## Basic Field Mapping
///
/// ```rust,ignore
/// use ai_transform::transform;
/// use serde::{Deserialize, Serialize};
///
/// #[derive(Serialize, Deserialize, Default)]
/// struct User { name: String, age: u32 }
///
/// #[derive(Serialize, Deserialize, Default)]
/// struct Profile { full_name: String, years_old: u32, is_adult: bool }
///
/// # #[tokio::main]
/// # async fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let user = User { name: "Alice".into(), age: 28 };
///
/// // AI automatically maps: name → full_name, age → years_old
/// // and computes: is_adult from age
/// let profile: Profile = transform!(User, Profile, user).await?;
/// # Ok(())
/// # }
/// ```
///
/// ## Error Handling
///
/// ```rust,ignore
/// # use ai_transform::transform;
/// # use serde::{Deserialize, Serialize};
/// use ai_transform_runtime::error::TransformError;
///
/// # #[derive(Serialize, Deserialize, Default, Debug)]
/// # struct Source { data: String }
/// # #[derive(Serialize, Deserialize, Default, Debug)]
/// # struct Target { result: String }
///
/// # #[tokio::main]
/// # async fn main() -> Result<(), Box<dyn std::error::Error>> {
/// # let source = Source::default();
/// match transform!(Source, Target, source).await {
///     Ok(result) => println!("Success: {:?}", result),
///     Err(TransformError::EnvVarError(var)) => {
///         eprintln!("Missing environment variable: {}", var);
///     }
///     Err(TransformError::ApiError { status, body }) => {
///         eprintln!("OpenAI API error {}: {}", status, body);
///     }
///     Err(e) => eprintln!("Other error: {}", e),
/// }
/// # Ok(())
/// # }
/// ```
///
/// # Errors
///
/// See [`ai_transform_runtime::error::TransformError`] for details and all error variants.
#[proc_macro]
pub fn transform(input: TokenStream) -> TokenStream {
    let input = parse_macro_input!(input as TransformInput);

    let source_type = &input.source_type;
    let target_type = &input.target_type;
    let source_value = &input.source_value;

    TokenStream::from(quote! {
        ai_transform_runtime::transform::<#source_type, #target_type>(#source_value)
    })
}