#![allow(clippy::print_stdout)]
use std::io::{Write, stdout};
use std::sync::Arc;
use futures::StreamExt;
use machi::prelude::*;
#[tokio::main]
async fn main() -> Result<()> {
let provider: SharedChatProvider = Arc::new(OpenAI::from_env()?);
let agent = Agent::new("poet")
.instructions("You are a creative poet. Write vivid, expressive poetry.")
.model("gpt-4o-mini")
.provider(provider);
let mut stream = agent.run_streamed("Write a haiku about Rust.", RunConfig::default());
while let Some(event) = stream.next().await {
match event? {
RunEvent::TextDelta(text) => {
print!("{text}");
stdout().flush()?;
}
RunEvent::RunCompleted { result } => {
println!(
"\n\nCompleted in {} step(s), {}",
result.steps, result.usage
);
}
_ => {}
}
}
Ok(())
}