#![allow(clippy::doc_markdown, clippy::unnecessary_wraps)]
use do_memory_core::embeddings::{AzureOpenAIConfig, CustomConfig, MistralConfig, OpenAIConfig};
fn main() {
println!("🌐 Multi-Provider Embedding Configuration Examples\n");
println!("{}", "=".repeat(60));
println!("\n📘 Example 1: OpenAI Standard API");
println!("{}", "-".repeat(60));
let openai_config = OpenAIConfig::text_embedding_3_small();
println!("Model: {}", openai_config.model.model_name());
println!("Dimension: {}", openai_config.effective_dimension());
println!("Full Endpoint: {}", openai_config.embeddings_url());
#[cfg(feature = "openai")]
{
}
println!("\n🟣 Example 2: Mistral AI");
println!("{}", "-".repeat(60));
let mistral_config = MistralConfig::mistral_embed();
println!("Model: {}", mistral_config.model.model_name());
println!("Dimension: {}", mistral_config.effective_dimension());
#[cfg(feature = "openai")]
{
}
println!("\n☁️ Example 3: Azure OpenAI Service");
println!("{}", "-".repeat(60));
let azure_config = AzureOpenAIConfig::new(
"my-embedding-deployment",
"my-openai-resource",
"2023-05-15",
1536,
);
println!("Deployment: {}", azure_config.deployment_name);
println!("Dimension: {}", azure_config.embedding_dimension);
println!("Full Endpoint: {}", azure_config.endpoint_url());
#[cfg(feature = "openai")]
{
}
println!("\n🔧 Example 4: Custom OpenAI-Compatible API");
println!("{}", "-".repeat(60));
let custom_config = CustomConfig::new(
"text-embedding-model",
768,
"http://localhost:1234/v1", );
println!("Model: {}", custom_config.model_name);
println!("Dimension: {}", custom_config.embedding_dimension);
println!("Full Endpoint: {}", custom_config.embeddings_url());
#[cfg(feature = "openai")]
{
}
println!("\n🛠️ Example 5: Custom API with Custom Endpoint Path");
println!("{}", "-".repeat(60));
let custom_endpoint_config =
CustomConfig::new("custom-embed-model", 1024, "https://api.mycompany.com/ml")
.with_endpoint("/api/v2/embeddings");
println!("Model: {}", custom_endpoint_config.model_name);
println!("Dimension: {}", custom_endpoint_config.embedding_dimension);
println!("Full Endpoint: {}", custom_endpoint_config.embeddings_url());
println!("\n📚 Example 6: All OpenAI Model Options");
println!("{}", "-".repeat(60));
let ada_002 = OpenAIConfig::ada_002();
println!(
"Legacy: {} ({} dims) - Most cost-effective",
ada_002.model.model_name(),
ada_002.effective_dimension()
);
let small = OpenAIConfig::text_embedding_3_small();
println!(
"Balanced: {} ({} dims) - Best price/performance",
small.model.model_name(),
small.effective_dimension()
);
let large = OpenAIConfig::text_embedding_3_large();
println!(
"Quality: {} ({} dims) - Highest quality",
large.model.model_name(),
large.effective_dimension()
);
println!("\n✅ Configuration examples complete!");
println!("\nTo use these configurations:");
println!("1. Set the appropriate API key environment variable");
println!("2. Enable the 'openai' feature: cargo build --features openai");
println!("3. Create a provider with your chosen config");
println!("4. Call embed_text() or embed_batch() on the provider");
#[cfg(not(feature = "openai"))]
{
println!("\n⚠️ Note: This example was compiled without the 'openai' feature.");
println!("To see full functionality, run:");
println!(" cargo run --example multi_provider_embeddings --features openai");
}
}