langfuse_simple/
langfuse_simple.rs

1//! Simple example of using the Langfuse interceptor for LLM observability.
2//!
3//! ## Setup
4//!
5//! Before running this example, set the following environment variables:
6//! - `OPENAI_API_KEY`: Your `OpenAI` API key
7//! - `LANGFUSE_PUBLIC_KEY`: Your Langfuse public key (starts with "pk-lf-")
8//! - `LANGFUSE_SECRET_KEY`: Your Langfuse secret key (starts with "sk-lf-")
9//! - `LANGFUSE_HOST` (optional): Langfuse API host (defaults to <https://cloud.langfuse.com>)
10//!
11//! ## Running the example
12//!
13//! ```bash
14//! cargo run --example langfuse_simple
15//! ```
16
17use openai_ergonomic::{Builder, Client, LangfuseConfig, LangfuseInterceptor};
18use opentelemetry::{global, trace::TracerProvider};
19use opentelemetry_langfuse::ExporterBuilder;
20use opentelemetry_sdk::{
21    runtime::Tokio,
22    trace::{span_processor_with_async_runtime::BatchSpanProcessor, SdkTracerProvider},
23};
24
25#[tokio::main]
26async fn main() -> Result<(), Box<dyn std::error::Error>> {
27    // Initialize tracing for logging
28    tracing_subscriber::fmt()
29        .with_env_filter(
30            tracing_subscriber::EnvFilter::from_default_env()
31                .add_directive("openai_ergonomic=debug".parse()?),
32        )
33        .init();
34
35    println!(" Initializing OpenAI client with Langfuse observability...\n");
36
37    // 1. Build Langfuse exporter from environment variables
38    let exporter = ExporterBuilder::from_env()?.build()?;
39
40    // 2. Create tracer provider with batch processor
41    let provider = SdkTracerProvider::builder()
42        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
43        .build();
44
45    // Set as global provider
46    global::set_tracer_provider(provider.clone());
47
48    // 3. Get tracer and create interceptor
49    let tracer = provider.tracer("openai-ergonomic");
50    let langfuse_interceptor = LangfuseInterceptor::new(tracer, LangfuseConfig::new());
51
52    // 4. Create the OpenAI client and add the Langfuse interceptor
53    let client = Client::from_env()?
54        .with_interceptor(Box::new(langfuse_interceptor))
55        .build();
56
57    println!(" Client initialized successfully!");
58    println!(" Traces will be sent to Langfuse for monitoring\n");
59
60    // Make a simple chat completion - tracing is automatic!
61    println!(" Making a simple chat completion request...");
62    let request = client
63        .chat_simple("What is 2 + 2? Answer with just the number.")
64        .build()?;
65    let response = client.execute_chat(request).await?;
66
67    println!(" Response: {:?}", response.content());
68
69    println!("\n Done! Check your Langfuse dashboard to see the traces.");
70    println!("   - Look for traces with the operation name 'chat'");
71    println!("   - Each trace includes request/response details and token usage");
72
73    // Shutdown the tracer provider to flush all spans
74    println!("\n⏳ Flushing spans to Langfuse...");
75    provider.shutdown()?;
76
77    Ok(())
78}