use chatdelta::{create_client, ClientConfig};
use std::time::Duration;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("--- Azure OpenAI Example ---");
let azure_config = ClientConfig::builder()
.timeout(Duration::from_secs(30))
.base_url("https://your-resource.openai.azure.com/openai/deployments/your-deployment")
.temperature(0.7)
.build();
let azure_client = create_client(
"openai",
std::env::var("AZURE_OPENAI_API_KEY").unwrap_or_else(|_| "demo-key".to_string()),
"gpt-4", azure_config,
)?;
println!("\n--- Local Model Example ---");
let local_config = ClientConfig::builder()
.timeout(Duration::from_secs(60)) .base_url("http://localhost:11434/v1") .temperature(0.8)
.max_tokens(500)
.build();
let local_client = create_client(
"openai", "no-key-needed-for-local", "llama2", local_config,
)?;
println!("\n--- API Gateway Example ---");
let gateway_config = ClientConfig::builder()
.timeout(Duration::from_secs(45))
.base_url("https://api-gateway.company.com/ai/v1")
.temperature(0.7)
.retries(3) .build();
let gateway_client = create_client(
"openai",
std::env::var("COMPANY_API_KEY").unwrap_or_else(|_| "demo-key".to_string()),
"gpt-4",
gateway_config,
)?;
println!("\nNote: These examples show how to configure custom endpoints.");
println!("To run them, you'll need to set up the corresponding services and API keys.");
Ok(())
}