Skip to main content

rig_core/telemetry/
mod.rs

1//! This module primarily concerns being able to orchestrate telemetry across a given pipeline or workflow.
2//! This includes tracing, being able to send traces to an OpenTelemetry collector, setting up your
3//! agents with the correct tracing style so you can emit the right traces for platforms like Langfuse,
4//! and more.
5
6use crate::completion::GetTokenUsage;
7use serde::Serialize;
8
9/// Provider request metadata used to populate GenAI telemetry spans.
10pub trait ProviderRequestExt {
11    /// Provider-native message type used for serialized input messages.
12    type InputMessage: Serialize;
13
14    /// Returns serialized input messages sent to the provider.
15    fn get_input_messages(&self) -> Vec<Self::InputMessage>;
16    /// Returns the system prompt, if represented separately by the provider.
17    fn get_system_prompt(&self) -> Option<String>;
18    /// Returns the model name requested from the provider.
19    fn get_model_name(&self) -> String;
20    /// Returns the primary prompt text, when available.
21    fn get_prompt(&self) -> Option<String>;
22}
23
24/// Provider response metadata used to populate GenAI telemetry spans.
25pub trait ProviderResponseExt {
26    /// Provider-native output message type.
27    type OutputMessage: Serialize;
28    /// Provider-native usage type.
29    type Usage: Serialize;
30
31    /// Returns the provider response ID, if supplied.
32    fn get_response_id(&self) -> Option<String>;
33
34    /// Returns the provider response model name, if supplied.
35    fn get_response_model_name(&self) -> Option<String>;
36
37    /// Returns serialized output messages produced by the provider.
38    fn get_output_messages(&self) -> Vec<Self::OutputMessage>;
39
40    /// Returns the primary text response, when available.
41    fn get_text_response(&self) -> Option<String>;
42
43    /// Returns provider-native usage metrics, if supplied.
44    fn get_usage(&self) -> Option<Self::Usage>;
45}
46
47/// A trait designed specifically to be used with Spans for the purpose of recording telemetry.
48/// Implemented for [`tracing::Span`] to record GenAI semantic convention fields.
49pub trait SpanCombinator {
50    /// Record Rig-normalized token usage fields on the span.
51    fn record_token_usage<U>(&self, usage: &U)
52    where
53        U: GetTokenUsage;
54
55    /// Record provider response metadata such as response ID and model name.
56    fn record_response_metadata<R>(&self, response: &R)
57    where
58        R: ProviderResponseExt;
59
60    /// Record serialized model input messages.
61    fn record_model_input<T>(&self, messages: &T)
62    where
63        T: Serialize;
64
65    /// Record serialized model output messages.
66    fn record_model_output<T>(&self, messages: &T)
67    where
68        T: Serialize;
69}
70
71impl SpanCombinator for tracing::Span {
72    fn record_token_usage<U>(&self, usage: &U)
73    where
74        U: GetTokenUsage,
75    {
76        if self.is_disabled() {
77            return;
78        }
79
80        if let Some(usage) = usage.token_usage() {
81            self.record("gen_ai.usage.input_tokens", usage.input_tokens);
82            self.record("gen_ai.usage.output_tokens", usage.output_tokens);
83            self.record(
84                "gen_ai.usage.cache_read.input_tokens",
85                usage.cached_input_tokens,
86            );
87            self.record(
88                "gen_ai.usage.cache_creation.input_tokens",
89                usage.cache_creation_input_tokens,
90            );
91            self.record("gen_ai.usage.reasoning_tokens", usage.reasoning_tokens);
92        }
93    }
94
95    fn record_response_metadata<R>(&self, response: &R)
96    where
97        R: ProviderResponseExt,
98    {
99        if self.is_disabled() {
100            return;
101        }
102
103        if let Some(id) = response.get_response_id() {
104            self.record("gen_ai.response.id", id);
105        }
106
107        if let Some(model_name) = response.get_response_model_name() {
108            self.record("gen_ai.response.model", model_name);
109        }
110    }
111
112    fn record_model_input<T>(&self, input: &T)
113    where
114        T: Serialize,
115    {
116        if self.is_disabled() {
117            return;
118        }
119
120        if let Ok(input_as_json_string) = serde_json::to_string(input) {
121            self.record("gen_ai.input.messages", input_as_json_string);
122        }
123    }
124
125    fn record_model_output<T>(&self, output: &T)
126    where
127        T: Serialize,
128    {
129        if self.is_disabled() {
130            return;
131        }
132
133        if let Ok(output_as_json_string) = serde_json::to_string(output) {
134            self.record("gen_ai.output.messages", output_as_json_string);
135        }
136    }
137}