menta 0.0.5

Minimal Rust library for non-UI LLM and AI primitives
Documentation
use futures_util::StreamExt;
use menta::{GenerateTextRequest, StreamEvent, stream_text};
use std::io::{self, Write};

#[tokio::main]
async fn main() {
    let mut events = stream_text(
        GenerateTextRequest::new()
            .model("openai/gpt-4.1-mini")
            .prompt("tell me a short story"),
    )
    .await
    .expect("stream_text failed");

    while let Some(event) = events.next().await {
        match event {
            StreamEvent::TextDelta(text) => {
                print!("{text}");
                io::stdout().flush().expect("flush failed");
            }
            StreamEvent::Finish { reason, .. } => println!("\nfinish: {:?}", reason),
            _ => {}
        }
    }
}