LangfuseInterceptor

Struct LangfuseInterceptor 

Source
pub struct LangfuseInterceptor<T: Tracer + Send + Sync> { /* private fields */ }
Expand description

Langfuse interceptor for OpenTelemetry-based observability.

This interceptor automatically creates spans for API calls. Spans are maintained across before_request and after_response using a global registry and request metadata, requiring no user code changes.

The tracer must be configured externally - this interceptor only instruments API calls.

Implementations§

Source§

impl<T: Tracer + Send + Sync> LangfuseInterceptor<T>
where T::Span: Send + Sync + 'static,

Source

pub fn new(tracer: T, config: LangfuseConfig) -> Self

Create a new Langfuse interceptor with the given tracer.

The tracer should be configured to export to Langfuse using opentelemetry_langfuse::ExporterBuilder.

§Example
use opentelemetry::global;
use opentelemetry::trace::TracerProvider;
use opentelemetry_langfuse::ExporterBuilder;
use opentelemetry_sdk::runtime::Tokio;
use opentelemetry_sdk::trace::span_processor_with_async_runtime::BatchSpanProcessor;
use opentelemetry_sdk::trace::SdkTracerProvider;

// Build exporter
let exporter = ExporterBuilder::from_env()?.build()?;

// Create tracer provider with batch processor
let provider = SdkTracerProvider::builder()
    .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
    .build();

// Set as global provider
global::set_tracer_provider(provider.clone());

// Get tracer for interceptor
let tracer = provider.tracer("openai-ergonomic");

// Create interceptor with tracer
use openai_ergonomic::langfuse_interceptor::{LangfuseInterceptor, LangfuseConfig};
let interceptor = LangfuseInterceptor::new(tracer, LangfuseConfig::new());
Examples found in repository?
examples/langfuse_simple.rs (line 50)
26async fn main() -> Result<(), Box<dyn std::error::Error>> {
27    // Initialize tracing for logging
28    tracing_subscriber::fmt()
29        .with_env_filter(
30            tracing_subscriber::EnvFilter::from_default_env()
31                .add_directive("openai_ergonomic=debug".parse()?),
32        )
33        .init();
34
35    println!(" Initializing OpenAI client with Langfuse observability...\n");
36
37    // 1. Build Langfuse exporter from environment variables
38    let exporter = ExporterBuilder::from_env()?.build()?;
39
40    // 2. Create tracer provider with batch processor
41    let provider = SdkTracerProvider::builder()
42        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
43        .build();
44
45    // Set as global provider
46    global::set_tracer_provider(provider.clone());
47
48    // 3. Get tracer and create interceptor
49    let tracer = provider.tracer("openai-ergonomic");
50    let langfuse_interceptor = LangfuseInterceptor::new(tracer, LangfuseConfig::new());
51
52    // 4. Create the OpenAI client and add the Langfuse interceptor
53    let client = Client::from_env()?
54        .with_interceptor(Box::new(langfuse_interceptor))
55        .build();
56
57    println!(" Client initialized successfully!");
58    println!(" Traces will be sent to Langfuse for monitoring\n");
59
60    // Make a simple chat completion - tracing is automatic!
61    println!(" Making a simple chat completion request...");
62    let request = client
63        .chat_simple("What is 2 + 2? Answer with just the number.")
64        .build()?;
65    let response = client.execute_chat(request).await?;
66
67    println!(" Response: {:?}", response.content());
68
69    println!("\n Done! Check your Langfuse dashboard to see the traces.");
70    println!("   - Look for traces with the operation name 'chat'");
71    println!("   - Each trace includes request/response details and token usage");
72
73    // Shutdown the tracer provider to flush all spans
74    println!("\n⏳ Flushing spans to Langfuse...");
75    provider.shutdown()?;
76
77    Ok(())
78}
More examples
Hide additional examples
examples/langfuse_streaming.rs (line 55)
31async fn main() -> std::result::Result<(), Box<dyn std::error::Error>> {
32    // Initialize tracing for logging
33    tracing_subscriber::fmt()
34        .with_env_filter(
35            tracing_subscriber::EnvFilter::from_default_env()
36                .add_directive("openai_ergonomic=debug".parse()?),
37        )
38        .init();
39
40    println!("🚀 Initializing OpenAI client with Langfuse streaming observability...\n");
41
42    // 1. Build Langfuse exporter from environment variables
43    let exporter = ExporterBuilder::from_env()?.build()?;
44
45    // 2. Create tracer provider with batch processor
46    let provider = SdkTracerProvider::builder()
47        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
48        .build();
49
50    // Set as global provider
51    global::set_tracer_provider(provider.clone());
52
53    // 3. Get tracer and create interceptor
54    let tracer = provider.tracer("openai-ergonomic");
55    let langfuse_interceptor = LangfuseInterceptor::new(tracer, LangfuseConfig::new());
56
57    // 4. Create the OpenAI client and add the Langfuse interceptor
58    let client = Client::from_env()?
59        .with_interceptor(Box::new(langfuse_interceptor))
60        .build();
61
62    println!("✅ Client initialized successfully!");
63    println!("📊 Streaming traces will be sent to Langfuse for monitoring\n");
64
65    // Example 1: Basic streaming with tracing
66    println!("=== Example 1: Basic Streaming ===");
67    basic_streaming(&client).await?;
68
69    // Example 2: Streaming with parameters
70    println!("\n=== Example 2: Streaming with Parameters ===");
71    streaming_with_parameters(&client).await?;
72
73    // Example 3: Collect full content
74    println!("\n=== Example 3: Collect Full Content ===");
75    collect_content(&client).await?;
76
77    println!("\n✅ Done! Check your Langfuse dashboard to see the streaming traces.");
78    println!("   - Look for traces with operation names 'chat' or 'responses'");
79    println!("   - Each trace includes:");
80    println!("     • before_request: Initial request details");
81    println!("     • on_stream_chunk: Each chunk as it arrives (real-time)");
82    println!("     • on_stream_end: Final token usage and duration");
83
84    // Give spawned interceptor tasks time to complete
85    tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
86
87    // Shutdown the tracer provider to flush all spans
88    println!("\n⏳ Flushing spans to Langfuse...");
89    provider.shutdown()?;
90
91    Ok(())
92}
examples/langfuse.rs (line 54)
31async fn main() -> Result<(), Box<dyn std::error::Error>> {
32    // Initialize tracing for logging
33    tracing_subscriber::fmt()
34        .with_env_filter(
35            tracing_subscriber::EnvFilter::from_default_env()
36                .add_directive("openai_ergonomic=debug".parse()?),
37        )
38        .init();
39
40    // 1. Build Langfuse exporter from environment variables
41    let exporter = ExporterBuilder::from_env()?.build()?;
42
43    // 2. Create tracer provider with batch processor
44    let provider = SdkTracerProvider::builder()
45        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
46        .build();
47
48    // Set as global provider
49    global::set_tracer_provider(provider.clone());
50
51    // 3. Get tracer and create interceptor
52    let tracer = provider.tracer("openai-ergonomic");
53    let langfuse_interceptor =
54        std::sync::Arc::new(LangfuseInterceptor::new(tracer, LangfuseConfig::new()));
55
56    // 4. Create the OpenAI client and add the Langfuse interceptor
57    // Keep a reference to the interceptor so we can update context later
58    let client = Client::from_env()?
59        .with_interceptor(Box::new(langfuse_interceptor.clone()))
60        .build();
61
62    println!(" OpenAI client initialized with Langfuse observability");
63    println!(" Traces will be sent to Langfuse for monitoring\n");
64
65    // Example 1: Simple chat completion
66    println!("Example 1: Simple chat completion");
67    println!("---------------------------------");
68    let chat_builder = client
69        .chat_simple("What is the capital of France? Answer in one word.")
70        .build()?;
71    let response = client.execute_chat(chat_builder).await?;
72    println!("Response: {:?}\n", response.content());
73
74    // Example 2: Chat completion with builder pattern
75    println!("Example 2: Chat with builder pattern");
76    println!("-------------------------------------");
77    let chat_builder = client
78        .chat()
79        .system("You are a helpful assistant that speaks like a pirate.")
80        .user("Tell me about the ocean in 2 sentences.")
81        .temperature(0.7)
82        .max_tokens(100)
83        .build()?;
84    let response = client.execute_chat(chat_builder).await?;
85    println!("Response: {:?}\n", response.content());
86
87    // Example 3: Multiple messages in a conversation
88    println!("Example 3: Conversation");
89    println!("-----------------------");
90    let chat_builder = client
91        .chat()
92        .system("You are a math tutor.")
93        .user("What is 2 + 2?")
94        .assistant("2 + 2 equals 4.")
95        .user("And what about 3 + 3?")
96        .build()?;
97    let response = client.execute_chat(chat_builder).await?;
98    println!("Response: {:?}\n", response.content());
99
100    // Example 4: Error handling (intentionally trigger an error)
101    println!("Example 4: Error handling");
102    println!("-------------------------");
103    // Create a builder with a non-existent model
104    let chat_builder = ChatCompletionBuilder::new("non-existent-model")
105        .user("This should fail")
106        .build()?;
107    let result = client.execute_chat(chat_builder).await;
108
109    match result {
110        Ok(_) => println!("Unexpected success"),
111        Err(e) => println!("Expected error captured: {e}\n"),
112    }
113
114    // Example 5: Embeddings
115    println!("Example 5: Embeddings");
116    println!("--------------------");
117    let embeddings_builder = client.embeddings().text(
118        "text-embedding-ada-002",
119        "The quick brown fox jumps over the lazy dog",
120    );
121    let embeddings = client.embeddings().create(embeddings_builder).await?;
122    println!("Generated {} embedding(s)\n", embeddings.data.len());
123
124    // Example 6: Using custom metadata via interceptor context
125    println!("Example 6: Custom metadata via interceptor context");
126    println!("---------------------------------------------------");
127
128    // Set session and user IDs on the interceptor's context
129    langfuse_interceptor.set_session_id("demo-session-123");
130    langfuse_interceptor.set_user_id("demo-user-456");
131    langfuse_interceptor.add_tags(vec!["example".to_string(), "demo".to_string()]);
132
133    let chat_builder = client
134        .chat_simple("Say 'Hello from custom session!'")
135        .build()?;
136    let response = client.execute_chat(chat_builder).await?;
137    println!("Response with custom metadata: {:?}\n", response.content());
138
139    // Clear context for subsequent calls
140    langfuse_interceptor.clear_context();
141
142    println!(" All examples completed!");
143    println!(" Check your Langfuse dashboard to see the traces");
144    println!("   - Look for traces with operation name 'chat'");
145    println!("   - Each trace includes request/response details, token usage, and timing");
146    println!("   - Example 6 will have custom session_id, user_id, and tags");
147
148    // Shutdown the tracer provider to flush all spans
149    println!("\n⏳ Flushing spans to Langfuse...");
150    provider.shutdown()?;
151
152    Ok(())
153}
Source

pub fn set_session_id(&self, session_id: impl Into<String>)

Set the session ID for traces created by this interceptor.

Examples found in repository?
examples/langfuse.rs (line 129)
31async fn main() -> Result<(), Box<dyn std::error::Error>> {
32    // Initialize tracing for logging
33    tracing_subscriber::fmt()
34        .with_env_filter(
35            tracing_subscriber::EnvFilter::from_default_env()
36                .add_directive("openai_ergonomic=debug".parse()?),
37        )
38        .init();
39
40    // 1. Build Langfuse exporter from environment variables
41    let exporter = ExporterBuilder::from_env()?.build()?;
42
43    // 2. Create tracer provider with batch processor
44    let provider = SdkTracerProvider::builder()
45        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
46        .build();
47
48    // Set as global provider
49    global::set_tracer_provider(provider.clone());
50
51    // 3. Get tracer and create interceptor
52    let tracer = provider.tracer("openai-ergonomic");
53    let langfuse_interceptor =
54        std::sync::Arc::new(LangfuseInterceptor::new(tracer, LangfuseConfig::new()));
55
56    // 4. Create the OpenAI client and add the Langfuse interceptor
57    // Keep a reference to the interceptor so we can update context later
58    let client = Client::from_env()?
59        .with_interceptor(Box::new(langfuse_interceptor.clone()))
60        .build();
61
62    println!(" OpenAI client initialized with Langfuse observability");
63    println!(" Traces will be sent to Langfuse for monitoring\n");
64
65    // Example 1: Simple chat completion
66    println!("Example 1: Simple chat completion");
67    println!("---------------------------------");
68    let chat_builder = client
69        .chat_simple("What is the capital of France? Answer in one word.")
70        .build()?;
71    let response = client.execute_chat(chat_builder).await?;
72    println!("Response: {:?}\n", response.content());
73
74    // Example 2: Chat completion with builder pattern
75    println!("Example 2: Chat with builder pattern");
76    println!("-------------------------------------");
77    let chat_builder = client
78        .chat()
79        .system("You are a helpful assistant that speaks like a pirate.")
80        .user("Tell me about the ocean in 2 sentences.")
81        .temperature(0.7)
82        .max_tokens(100)
83        .build()?;
84    let response = client.execute_chat(chat_builder).await?;
85    println!("Response: {:?}\n", response.content());
86
87    // Example 3: Multiple messages in a conversation
88    println!("Example 3: Conversation");
89    println!("-----------------------");
90    let chat_builder = client
91        .chat()
92        .system("You are a math tutor.")
93        .user("What is 2 + 2?")
94        .assistant("2 + 2 equals 4.")
95        .user("And what about 3 + 3?")
96        .build()?;
97    let response = client.execute_chat(chat_builder).await?;
98    println!("Response: {:?}\n", response.content());
99
100    // Example 4: Error handling (intentionally trigger an error)
101    println!("Example 4: Error handling");
102    println!("-------------------------");
103    // Create a builder with a non-existent model
104    let chat_builder = ChatCompletionBuilder::new("non-existent-model")
105        .user("This should fail")
106        .build()?;
107    let result = client.execute_chat(chat_builder).await;
108
109    match result {
110        Ok(_) => println!("Unexpected success"),
111        Err(e) => println!("Expected error captured: {e}\n"),
112    }
113
114    // Example 5: Embeddings
115    println!("Example 5: Embeddings");
116    println!("--------------------");
117    let embeddings_builder = client.embeddings().text(
118        "text-embedding-ada-002",
119        "The quick brown fox jumps over the lazy dog",
120    );
121    let embeddings = client.embeddings().create(embeddings_builder).await?;
122    println!("Generated {} embedding(s)\n", embeddings.data.len());
123
124    // Example 6: Using custom metadata via interceptor context
125    println!("Example 6: Custom metadata via interceptor context");
126    println!("---------------------------------------------------");
127
128    // Set session and user IDs on the interceptor's context
129    langfuse_interceptor.set_session_id("demo-session-123");
130    langfuse_interceptor.set_user_id("demo-user-456");
131    langfuse_interceptor.add_tags(vec!["example".to_string(), "demo".to_string()]);
132
133    let chat_builder = client
134        .chat_simple("Say 'Hello from custom session!'")
135        .build()?;
136    let response = client.execute_chat(chat_builder).await?;
137    println!("Response with custom metadata: {:?}\n", response.content());
138
139    // Clear context for subsequent calls
140    langfuse_interceptor.clear_context();
141
142    println!(" All examples completed!");
143    println!(" Check your Langfuse dashboard to see the traces");
144    println!("   - Look for traces with operation name 'chat'");
145    println!("   - Each trace includes request/response details, token usage, and timing");
146    println!("   - Example 6 will have custom session_id, user_id, and tags");
147
148    // Shutdown the tracer provider to flush all spans
149    println!("\n⏳ Flushing spans to Langfuse...");
150    provider.shutdown()?;
151
152    Ok(())
153}
Source

pub fn set_user_id(&self, user_id: impl Into<String>)

Set the user ID for traces created by this interceptor.

Examples found in repository?
examples/langfuse.rs (line 130)
31async fn main() -> Result<(), Box<dyn std::error::Error>> {
32    // Initialize tracing for logging
33    tracing_subscriber::fmt()
34        .with_env_filter(
35            tracing_subscriber::EnvFilter::from_default_env()
36                .add_directive("openai_ergonomic=debug".parse()?),
37        )
38        .init();
39
40    // 1. Build Langfuse exporter from environment variables
41    let exporter = ExporterBuilder::from_env()?.build()?;
42
43    // 2. Create tracer provider with batch processor
44    let provider = SdkTracerProvider::builder()
45        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
46        .build();
47
48    // Set as global provider
49    global::set_tracer_provider(provider.clone());
50
51    // 3. Get tracer and create interceptor
52    let tracer = provider.tracer("openai-ergonomic");
53    let langfuse_interceptor =
54        std::sync::Arc::new(LangfuseInterceptor::new(tracer, LangfuseConfig::new()));
55
56    // 4. Create the OpenAI client and add the Langfuse interceptor
57    // Keep a reference to the interceptor so we can update context later
58    let client = Client::from_env()?
59        .with_interceptor(Box::new(langfuse_interceptor.clone()))
60        .build();
61
62    println!(" OpenAI client initialized with Langfuse observability");
63    println!(" Traces will be sent to Langfuse for monitoring\n");
64
65    // Example 1: Simple chat completion
66    println!("Example 1: Simple chat completion");
67    println!("---------------------------------");
68    let chat_builder = client
69        .chat_simple("What is the capital of France? Answer in one word.")
70        .build()?;
71    let response = client.execute_chat(chat_builder).await?;
72    println!("Response: {:?}\n", response.content());
73
74    // Example 2: Chat completion with builder pattern
75    println!("Example 2: Chat with builder pattern");
76    println!("-------------------------------------");
77    let chat_builder = client
78        .chat()
79        .system("You are a helpful assistant that speaks like a pirate.")
80        .user("Tell me about the ocean in 2 sentences.")
81        .temperature(0.7)
82        .max_tokens(100)
83        .build()?;
84    let response = client.execute_chat(chat_builder).await?;
85    println!("Response: {:?}\n", response.content());
86
87    // Example 3: Multiple messages in a conversation
88    println!("Example 3: Conversation");
89    println!("-----------------------");
90    let chat_builder = client
91        .chat()
92        .system("You are a math tutor.")
93        .user("What is 2 + 2?")
94        .assistant("2 + 2 equals 4.")
95        .user("And what about 3 + 3?")
96        .build()?;
97    let response = client.execute_chat(chat_builder).await?;
98    println!("Response: {:?}\n", response.content());
99
100    // Example 4: Error handling (intentionally trigger an error)
101    println!("Example 4: Error handling");
102    println!("-------------------------");
103    // Create a builder with a non-existent model
104    let chat_builder = ChatCompletionBuilder::new("non-existent-model")
105        .user("This should fail")
106        .build()?;
107    let result = client.execute_chat(chat_builder).await;
108
109    match result {
110        Ok(_) => println!("Unexpected success"),
111        Err(e) => println!("Expected error captured: {e}\n"),
112    }
113
114    // Example 5: Embeddings
115    println!("Example 5: Embeddings");
116    println!("--------------------");
117    let embeddings_builder = client.embeddings().text(
118        "text-embedding-ada-002",
119        "The quick brown fox jumps over the lazy dog",
120    );
121    let embeddings = client.embeddings().create(embeddings_builder).await?;
122    println!("Generated {} embedding(s)\n", embeddings.data.len());
123
124    // Example 6: Using custom metadata via interceptor context
125    println!("Example 6: Custom metadata via interceptor context");
126    println!("---------------------------------------------------");
127
128    // Set session and user IDs on the interceptor's context
129    langfuse_interceptor.set_session_id("demo-session-123");
130    langfuse_interceptor.set_user_id("demo-user-456");
131    langfuse_interceptor.add_tags(vec!["example".to_string(), "demo".to_string()]);
132
133    let chat_builder = client
134        .chat_simple("Say 'Hello from custom session!'")
135        .build()?;
136    let response = client.execute_chat(chat_builder).await?;
137    println!("Response with custom metadata: {:?}\n", response.content());
138
139    // Clear context for subsequent calls
140    langfuse_interceptor.clear_context();
141
142    println!(" All examples completed!");
143    println!(" Check your Langfuse dashboard to see the traces");
144    println!("   - Look for traces with operation name 'chat'");
145    println!("   - Each trace includes request/response details, token usage, and timing");
146    println!("   - Example 6 will have custom session_id, user_id, and tags");
147
148    // Shutdown the tracer provider to flush all spans
149    println!("\n⏳ Flushing spans to Langfuse...");
150    provider.shutdown()?;
151
152    Ok(())
153}
Source

pub fn add_tags(&self, tags: Vec<String>)

Add tags to traces created by this interceptor.

Examples found in repository?
examples/langfuse.rs (line 131)
31async fn main() -> Result<(), Box<dyn std::error::Error>> {
32    // Initialize tracing for logging
33    tracing_subscriber::fmt()
34        .with_env_filter(
35            tracing_subscriber::EnvFilter::from_default_env()
36                .add_directive("openai_ergonomic=debug".parse()?),
37        )
38        .init();
39
40    // 1. Build Langfuse exporter from environment variables
41    let exporter = ExporterBuilder::from_env()?.build()?;
42
43    // 2. Create tracer provider with batch processor
44    let provider = SdkTracerProvider::builder()
45        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
46        .build();
47
48    // Set as global provider
49    global::set_tracer_provider(provider.clone());
50
51    // 3. Get tracer and create interceptor
52    let tracer = provider.tracer("openai-ergonomic");
53    let langfuse_interceptor =
54        std::sync::Arc::new(LangfuseInterceptor::new(tracer, LangfuseConfig::new()));
55
56    // 4. Create the OpenAI client and add the Langfuse interceptor
57    // Keep a reference to the interceptor so we can update context later
58    let client = Client::from_env()?
59        .with_interceptor(Box::new(langfuse_interceptor.clone()))
60        .build();
61
62    println!(" OpenAI client initialized with Langfuse observability");
63    println!(" Traces will be sent to Langfuse for monitoring\n");
64
65    // Example 1: Simple chat completion
66    println!("Example 1: Simple chat completion");
67    println!("---------------------------------");
68    let chat_builder = client
69        .chat_simple("What is the capital of France? Answer in one word.")
70        .build()?;
71    let response = client.execute_chat(chat_builder).await?;
72    println!("Response: {:?}\n", response.content());
73
74    // Example 2: Chat completion with builder pattern
75    println!("Example 2: Chat with builder pattern");
76    println!("-------------------------------------");
77    let chat_builder = client
78        .chat()
79        .system("You are a helpful assistant that speaks like a pirate.")
80        .user("Tell me about the ocean in 2 sentences.")
81        .temperature(0.7)
82        .max_tokens(100)
83        .build()?;
84    let response = client.execute_chat(chat_builder).await?;
85    println!("Response: {:?}\n", response.content());
86
87    // Example 3: Multiple messages in a conversation
88    println!("Example 3: Conversation");
89    println!("-----------------------");
90    let chat_builder = client
91        .chat()
92        .system("You are a math tutor.")
93        .user("What is 2 + 2?")
94        .assistant("2 + 2 equals 4.")
95        .user("And what about 3 + 3?")
96        .build()?;
97    let response = client.execute_chat(chat_builder).await?;
98    println!("Response: {:?}\n", response.content());
99
100    // Example 4: Error handling (intentionally trigger an error)
101    println!("Example 4: Error handling");
102    println!("-------------------------");
103    // Create a builder with a non-existent model
104    let chat_builder = ChatCompletionBuilder::new("non-existent-model")
105        .user("This should fail")
106        .build()?;
107    let result = client.execute_chat(chat_builder).await;
108
109    match result {
110        Ok(_) => println!("Unexpected success"),
111        Err(e) => println!("Expected error captured: {e}\n"),
112    }
113
114    // Example 5: Embeddings
115    println!("Example 5: Embeddings");
116    println!("--------------------");
117    let embeddings_builder = client.embeddings().text(
118        "text-embedding-ada-002",
119        "The quick brown fox jumps over the lazy dog",
120    );
121    let embeddings = client.embeddings().create(embeddings_builder).await?;
122    println!("Generated {} embedding(s)\n", embeddings.data.len());
123
124    // Example 6: Using custom metadata via interceptor context
125    println!("Example 6: Custom metadata via interceptor context");
126    println!("---------------------------------------------------");
127
128    // Set session and user IDs on the interceptor's context
129    langfuse_interceptor.set_session_id("demo-session-123");
130    langfuse_interceptor.set_user_id("demo-user-456");
131    langfuse_interceptor.add_tags(vec!["example".to_string(), "demo".to_string()]);
132
133    let chat_builder = client
134        .chat_simple("Say 'Hello from custom session!'")
135        .build()?;
136    let response = client.execute_chat(chat_builder).await?;
137    println!("Response with custom metadata: {:?}\n", response.content());
138
139    // Clear context for subsequent calls
140    langfuse_interceptor.clear_context();
141
142    println!(" All examples completed!");
143    println!(" Check your Langfuse dashboard to see the traces");
144    println!("   - Look for traces with operation name 'chat'");
145    println!("   - Each trace includes request/response details, token usage, and timing");
146    println!("   - Example 6 will have custom session_id, user_id, and tags");
147
148    // Shutdown the tracer provider to flush all spans
149    println!("\n⏳ Flushing spans to Langfuse...");
150    provider.shutdown()?;
151
152    Ok(())
153}
Source

pub fn add_tag(&self, tag: impl Into<String>)

Add a single tag to traces created by this interceptor.

Source

pub fn set_metadata(&self, metadata: Value)

Set metadata for traces created by this interceptor.

Source

pub fn clear_context(&self)

Clear all context attributes.

Examples found in repository?
examples/langfuse.rs (line 140)
31async fn main() -> Result<(), Box<dyn std::error::Error>> {
32    // Initialize tracing for logging
33    tracing_subscriber::fmt()
34        .with_env_filter(
35            tracing_subscriber::EnvFilter::from_default_env()
36                .add_directive("openai_ergonomic=debug".parse()?),
37        )
38        .init();
39
40    // 1. Build Langfuse exporter from environment variables
41    let exporter = ExporterBuilder::from_env()?.build()?;
42
43    // 2. Create tracer provider with batch processor
44    let provider = SdkTracerProvider::builder()
45        .with_span_processor(BatchSpanProcessor::builder(exporter, Tokio).build())
46        .build();
47
48    // Set as global provider
49    global::set_tracer_provider(provider.clone());
50
51    // 3. Get tracer and create interceptor
52    let tracer = provider.tracer("openai-ergonomic");
53    let langfuse_interceptor =
54        std::sync::Arc::new(LangfuseInterceptor::new(tracer, LangfuseConfig::new()));
55
56    // 4. Create the OpenAI client and add the Langfuse interceptor
57    // Keep a reference to the interceptor so we can update context later
58    let client = Client::from_env()?
59        .with_interceptor(Box::new(langfuse_interceptor.clone()))
60        .build();
61
62    println!(" OpenAI client initialized with Langfuse observability");
63    println!(" Traces will be sent to Langfuse for monitoring\n");
64
65    // Example 1: Simple chat completion
66    println!("Example 1: Simple chat completion");
67    println!("---------------------------------");
68    let chat_builder = client
69        .chat_simple("What is the capital of France? Answer in one word.")
70        .build()?;
71    let response = client.execute_chat(chat_builder).await?;
72    println!("Response: {:?}\n", response.content());
73
74    // Example 2: Chat completion with builder pattern
75    println!("Example 2: Chat with builder pattern");
76    println!("-------------------------------------");
77    let chat_builder = client
78        .chat()
79        .system("You are a helpful assistant that speaks like a pirate.")
80        .user("Tell me about the ocean in 2 sentences.")
81        .temperature(0.7)
82        .max_tokens(100)
83        .build()?;
84    let response = client.execute_chat(chat_builder).await?;
85    println!("Response: {:?}\n", response.content());
86
87    // Example 3: Multiple messages in a conversation
88    println!("Example 3: Conversation");
89    println!("-----------------------");
90    let chat_builder = client
91        .chat()
92        .system("You are a math tutor.")
93        .user("What is 2 + 2?")
94        .assistant("2 + 2 equals 4.")
95        .user("And what about 3 + 3?")
96        .build()?;
97    let response = client.execute_chat(chat_builder).await?;
98    println!("Response: {:?}\n", response.content());
99
100    // Example 4: Error handling (intentionally trigger an error)
101    println!("Example 4: Error handling");
102    println!("-------------------------");
103    // Create a builder with a non-existent model
104    let chat_builder = ChatCompletionBuilder::new("non-existent-model")
105        .user("This should fail")
106        .build()?;
107    let result = client.execute_chat(chat_builder).await;
108
109    match result {
110        Ok(_) => println!("Unexpected success"),
111        Err(e) => println!("Expected error captured: {e}\n"),
112    }
113
114    // Example 5: Embeddings
115    println!("Example 5: Embeddings");
116    println!("--------------------");
117    let embeddings_builder = client.embeddings().text(
118        "text-embedding-ada-002",
119        "The quick brown fox jumps over the lazy dog",
120    );
121    let embeddings = client.embeddings().create(embeddings_builder).await?;
122    println!("Generated {} embedding(s)\n", embeddings.data.len());
123
124    // Example 6: Using custom metadata via interceptor context
125    println!("Example 6: Custom metadata via interceptor context");
126    println!("---------------------------------------------------");
127
128    // Set session and user IDs on the interceptor's context
129    langfuse_interceptor.set_session_id("demo-session-123");
130    langfuse_interceptor.set_user_id("demo-user-456");
131    langfuse_interceptor.add_tags(vec!["example".to_string(), "demo".to_string()]);
132
133    let chat_builder = client
134        .chat_simple("Say 'Hello from custom session!'")
135        .build()?;
136    let response = client.execute_chat(chat_builder).await?;
137    println!("Response with custom metadata: {:?}\n", response.content());
138
139    // Clear context for subsequent calls
140    langfuse_interceptor.clear_context();
141
142    println!(" All examples completed!");
143    println!(" Check your Langfuse dashboard to see the traces");
144    println!("   - Look for traces with operation name 'chat'");
145    println!("   - Each trace includes request/response details, token usage, and timing");
146    println!("   - Example 6 will have custom session_id, user_id, and tags");
147
148    // Shutdown the tracer provider to flush all spans
149    println!("\n⏳ Flushing spans to Langfuse...");
150    provider.shutdown()?;
151
152    Ok(())
153}
Source

pub fn context(&self) -> &Arc<LangfuseContext>

Get a reference to the Langfuse context.

Trait Implementations§

Source§

impl<T: Tracer + Send + Sync> Interceptor<LangfuseState<<T as Tracer>::Span>> for LangfuseInterceptor<T>
where T::Span: Send + Sync + 'static,

Source§

fn before_request<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, ctx: &'life1 mut BeforeRequestContext<'life2, LangfuseState<T::Span>>, ) -> Pin<Box<dyn Future<Output = Result<()>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Called before a request is sent. Read more
Source§

fn after_response<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, ctx: &'life1 AfterResponseContext<'life2, LangfuseState<T::Span>>, ) -> Pin<Box<dyn Future<Output = Result<()>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Called after a successful non-streaming response is received.
Source§

fn on_stream_chunk<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, _ctx: &'life1 StreamChunkContext<'life2, LangfuseState<T::Span>>, ) -> Pin<Box<dyn Future<Output = Result<()>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Called for each chunk in a streaming response.
Source§

fn on_stream_end<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, ctx: &'life1 StreamEndContext<'life2, LangfuseState<T::Span>>, ) -> Pin<Box<dyn Future<Output = Result<()>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Called when a streaming response completes successfully.
Source§

fn on_error<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, ctx: &'life1 ErrorContext<'life2, LangfuseState<T::Span>>, ) -> Pin<Box<dyn Future<Output = ()> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Called when an error occurs at any stage. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> FutureExt for T

Source§

fn with_context(self, otel_cx: Context) -> WithContext<Self>

Attaches the provided Context to this type, returning a WithContext wrapper. Read more
Source§

fn with_current_context(self) -> WithContext<Self>

Attaches the current Context to this type, returning a WithContext wrapper. Read more
Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> ErasedDestructor for T
where T: 'static,