use rig::pipeline::{self, Op, TryOp};
use rig::prelude::*;
use rig::providers::openai;
use rig::providers::openai::client::Client;
#[tokio::main]
async fn main() -> Result<(), anyhow::Error> {
let openai_client = Client::from_env();
let animal_agent = openai_client.agent(openai::GPT_4)
.preamble("
Your role is to categorise the user's statement using the following values: [sheep, cow, dog]
Return only the value.
")
.build();
let default_agent = openai_client.agent(openai::GPT_4).build();
let chain = pipeline::new()
.prompt(animal_agent)
.map_ok(|x: String| match x.trim() {
"cow" => Ok("Tell me a fact about the United States of America.".to_string()),
"sheep" => Ok("Calculate 5+5 for me. Return only the number.".to_string()),
"dog" => Ok("Write me a poem about cashews".to_string()),
message => Err(format!("Could not process - received category: {message}")),
})
.map(|x| x.unwrap().unwrap())
.prompt(default_agent);
let response = chain.try_call("Sheep can self-medicate").await?;
println!("Pipeline result: {response:?}");
Ok(())
}