use openmodex::{ChatCompletionRequest, ChatMessage, Error, OpenModex};
#[tokio::main]
async fn main() -> Result<(), Error> {
let client = OpenModex::builder()
.fallback_models(["claude-3.5-sonnet", "gpt-4o-mini", "gemini-1.5-pro"])
.max_retries(2)
.build()?;
let response = client
.chat()
.completions()
.create(
ChatCompletionRequest::new("gpt-4o")
.message(ChatMessage::user("What are the benefits of model fallbacks?"))
.max_tokens(256),
)
.await?;
let content = response.choices[0]
.message
.as_ref()
.and_then(|m| m.content.as_deref())
.unwrap_or("");
println!("Response: {content}");
if let Some(meta) = &response.openmodex {
println!("\nActual model used: {}", meta.model_used);
println!("Provider: {}", meta.provider);
println!("Fallback used: {}", meta.fallback_used);
}
Ok(())
}