chat-rs 0.0.6

Build LLM clients with ease, attach them to your tools
Documentation
use chat_rs::{chat::ChatBuilder, gemini, messages, messages::content};

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    let client = gemini::GeminiBuilder::new()
        .with_model("gemini-2.5-flash".to_string())
        .with_code_execution()
        .build();

    let mut chat = ChatBuilder::new()
        .with_model(client)
        .with_max_steps(5)
        .build();

    let mut messages = messages::Messages::default();
    messages.push(content::from_system(vec![
        "You are a helpful assistant. Your job is to be as useful as possible.",
    ]));

    loop {
        let mut user_input = String::new();
        println!("User:\t");
        std::io::stdin().read_line(&mut user_input)?;
        let user_message = content::from_user(vec![&user_input]);
        messages.push(user_message);

        let response = chat.complete(&mut messages).await.map_err(|err| err.err)?;
        messages.push(response.content.clone());
        println!("Model:\t{:?}", response.content.parts);
        //println!("Metadata:\t{:?}", response.metadata);
    }
}