langfuse_simple/
langfuse_simple.rs1use openai_ergonomic::{Builder, Client, LangfuseConfig, LangfuseInterceptor};
18use opentelemetry::{global, trace::TracerProvider};
19use opentelemetry_langfuse::ExporterBuilder;
20use opentelemetry_sdk::{
21 runtime::Tokio,
22 trace::{span_processor_with_async_runtime::BatchSpanProcessor, SdkTracerProvider},
23};
24
25#[tokio::main]
26async fn main() -> Result<(), Box<dyn std::error::Error>> {
27 tracing_subscriber::fmt()
29 .with_env_filter(
30 tracing_subscriber::EnvFilter::from_default_env()
31 .add_directive("openai_ergonomic=debug".parse()?),
32 )
33 .init();
34
35 println!(" Initializing OpenAI client with Langfuse observability...\n");
36
37 let exporter = ExporterBuilder::from_env()?.build()?;
39
40 let provider = SdkTracerProvider::builder()
42 .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
43 .build();
44
45 global::set_tracer_provider(provider.clone());
47
48 let tracer = provider.tracer("openai-ergonomic");
50 let langfuse_interceptor = LangfuseInterceptor::new(tracer, LangfuseConfig::new());
51
52 let client = Client::from_env()?
54 .with_interceptor(Box::new(langfuse_interceptor))
55 .build();
56
57 println!(" Client initialized successfully!");
58 println!(" Traces will be sent to Langfuse for monitoring\n");
59
60 println!(" Making a simple chat completion request...");
62 let request = client
63 .chat_simple("What is 2 + 2? Answer with just the number.")
64 .build()?;
65 let response = client.execute_chat(request).await?;
66
67 println!(" Response: {:?}", response.content());
68
69 println!("\n Done! Check your Langfuse dashboard to see the traces.");
70 println!(" - Look for traces with the operation name 'chat'");
71 println!(" - Each trace includes request/response details and token usage");
72
73 println!("\n⏳ Flushing spans to Langfuse...");
75 provider.shutdown()?;
76
77 Ok(())
78}