use agentix::{ImageContent, ImageData, LlmEvent, Message, Request, UserContent};
use futures::StreamExt;
use std::env;
use std::io::{self, Write};
fn encode_base64(data: &[u8]) -> String {
const CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
let mut out = String::with_capacity((data.len() / 3 + 1) * 4);
for chunk in data.chunks(3) {
let n = chunk.len();
let n0 = chunk[0] as u32;
let n1 = if n > 1 { chunk[1] as u32 } else { 0 };
let n2 = if n > 2 { chunk[2] as u32 } else { 0 };
let val = (n0 << 16) | (n1 << 8) | n2;
out.push(CHARS[(val >> 18) as usize] as char);
out.push(CHARS[((val >> 12) & 0x3F) as usize] as char);
out.push(if n > 1 {
CHARS[((val >> 6) & 0x3F) as usize] as char
} else {
'='
});
out.push(if n > 2 {
CHARS[(val & 0x3F) as usize] as char
} else {
'='
});
}
out
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let api_key = env::var("ANTHROPIC_API_KEY")
.expect("ANTHROPIC_API_KEY must be set in your environment variables");
let http = reqwest::Client::new();
let mut messages: Vec<Message> = Vec::new();
println!("Starting multimodal conversation with Claude. Type 'quit' or 'exit' to end.");
println!("Hint: You can type `image:/path/to/pic.jpg` to send a local image.");
loop {
print!("\nUser: ");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let input = input.trim();
if input.eq_ignore_ascii_case("quit") || input.eq_ignore_ascii_case("exit") {
break;
}
if input.is_empty() {
continue;
}
let mut user_parts = Vec::new();
if let Some(path) = input.strip_prefix("image:") {
let path = path.trim();
match std::fs::read(path) {
Ok(bytes) => {
let ext = path.split('.').last().unwrap_or("jpeg");
let mime_type = format!("image/{}", ext);
user_parts.push(UserContent::Image(ImageContent {
data: ImageData::Base64(encode_base64(&bytes)),
mime_type,
}));
user_parts.push(UserContent::Text { text: "Please describe this image.".to_string() });
println!("(Attached local image: {path})");
}
Err(e) => {
eprintln!("Failed to read image at '{path}': {e}");
continue;
}
}
} else {
user_parts.push(UserContent::Text { text: input.to_string() });
}
messages.push(Message::User(user_parts));
let mut stream = Request::anthropic(api_key.clone())
.system_prompt(
"You're a helpful assistant that can understand both text and images. Respond to the user's messages accordingly.",
)
.messages(messages.clone())
.stream(&http)
.await?;
print!("Claude: ");
io::stdout().flush()?;
let mut assistant_reply = String::new();
while let Some(event) = stream.next().await {
match event {
LlmEvent::Token(t) => {
print!("{t}");
io::stdout().flush()?;
assistant_reply.push_str(&t);
}
LlmEvent::Error(e) => {
eprintln!("\nError: {e}");
}
LlmEvent::Done => break,
_ => {}
}
}
println!();
if !assistant_reply.is_empty() {
messages.push(Message::Assistant {
content: Some(assistant_reply),
reasoning: None,
tool_calls: vec![],
});
}
}
Ok(())
}