use openrouter_rust::{
OpenRouterClient,
ChatCompletionBuilder,
collect_stream,
};
use futures::StreamExt;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = OpenRouterClient::builder()
.api_key(std::env::var("OPENROUTER_API_KEY")?)
.build()?;
let request = ChatCompletionBuilder::new("openai/gpt-3.5-turbo")
.user_message("Write a short poem about Rust programming.")
.stream(true)
.build();
println!("Streaming response:\n");
let stream = client.chat_completion_stream(request.clone()).await?;
let mut stream = stream;
while let Some(result) = stream.next().await {
match result {
Ok(chunk) => {
for choice in &chunk.choices {
if let Some(ref content) = choice.delta.content {
print!("{}", content);
}
}
}
Err(e) => {
eprintln!("\nStream error: {}", e);
break;
}
}
}
println!("\n\n---\n");
let stream = client.chat_completion_stream(request).await?;
let full_response = collect_stream(stream).await?;
if let Some(choice) = full_response.choices.first() {
println!("Collected response:");
if let Some(ref content) = choice.message.content {
println!("{}", content);
}
}
Ok(())
}