Skip to main content

adk_telemetry/
spans.rs

1//! Span helpers for common ADK operations
2//!
3//! Provides pre-configured spans for instrumenting agent, model, and tool operations.
4
5use tracing::Span;
6
7/// Create a span for agent execution
8///
9/// # Arguments
10/// * `agent_name` - Name of the agent being executed
11/// * `invocation_id` - Unique ID for this invocation
12///
13/// # Example
14/// ```
15/// use adk_telemetry::agent_run_span;
16/// let span = agent_run_span("my-agent", "inv-123");
17/// let _enter = span.enter();
18/// // Agent execution code here
19/// ```
20pub fn agent_run_span(agent_name: &str, invocation_id: &str) -> Span {
21    tracing::info_span!(
22        "agent.run",
23        agent.name = agent_name,
24        invocation.id = invocation_id,
25        otel.kind = "internal"
26    )
27}
28
29/// Create a span for model API calls
30///
31/// # Arguments
32/// * `model_name` - Name of the LLM model being called
33///
34/// # Example
35/// ```
36/// use adk_telemetry::model_call_span;
37/// let span = model_call_span("gemini-2.5-flash");
38/// let _enter = span.enter();
39/// // Model call code here
40/// ```
41pub fn model_call_span(model_name: &str) -> Span {
42    tracing::info_span!("model.call", model.name = model_name, otel.kind = "client")
43}
44
45/// Create a span for LLM generate_content calls with pre-declared token usage fields.
46///
47/// This span follows [OpenTelemetry GenAI semantic conventions](https://opentelemetry.io/docs/specs/semconv/gen-ai/)
48/// and pre-declares all `gen_ai.*` fields so they can be recorded after the response arrives.
49///
50/// # Arguments
51/// * `provider` - Provider name (e.g., "gemini", "openai", "anthropic")
52/// * `model_name` - Model identifier (e.g., "gemini-2.5-flash", "gpt-5-mini")
53/// * `stream` - Whether this is a streaming request
54///
55/// # Example
56/// ```
57/// use adk_telemetry::llm_generate_span;
58/// let span = llm_generate_span("openai", "gpt-5-mini", true);
59/// let _enter = span.enter();
60/// // After response: adk_telemetry::record_llm_usage(&usage_metadata);
61/// ```
62pub fn llm_generate_span(provider: &str, model_name: &str, stream: bool) -> Span {
63    tracing::info_span!(
64        "gen_ai.generate",
65        gen_ai.system = %provider,
66        gen_ai.request.model = %model_name,
67        gen_ai.request.stream = stream,
68        gen_ai.usage.input_tokens = tracing::field::Empty,
69        gen_ai.usage.output_tokens = tracing::field::Empty,
70        gen_ai.usage.total_tokens = tracing::field::Empty,
71        gen_ai.usage.cache_read_tokens = tracing::field::Empty,
72        gen_ai.usage.cache_creation_tokens = tracing::field::Empty,
73        gen_ai.usage.thinking_tokens = tracing::field::Empty,
74        gen_ai.usage.audio_input_tokens = tracing::field::Empty,
75        gen_ai.usage.audio_output_tokens = tracing::field::Empty,
76        otel.kind = "client",
77    )
78}
79
80/// Token usage data for recording on tracing spans.
81///
82/// Mirrors the token count fields from `adk_core::UsageMetadata` without
83/// depending on `adk-core`. Callers in `adk-model` convert from `UsageMetadata`
84/// to this struct before recording.
85///
86/// # Example
87/// ```
88/// use adk_telemetry::LlmUsage;
89/// let usage = LlmUsage {
90///     input_tokens: 100,
91///     output_tokens: 50,
92///     total_tokens: 150,
93///     ..Default::default()
94/// };
95/// ```
96#[derive(Debug, Clone, Default)]
97pub struct LlmUsage {
98    /// Prompt / input token count.
99    pub input_tokens: i32,
100    /// Completion / output token count.
101    pub output_tokens: i32,
102    /// Total token count.
103    pub total_tokens: i32,
104    /// Tokens read from cache.
105    pub cache_read_tokens: Option<i32>,
106    /// Tokens used to create cache.
107    pub cache_creation_tokens: Option<i32>,
108    /// Tokens used for chain-of-thought reasoning.
109    pub thinking_tokens: Option<i32>,
110    /// Audio input token count.
111    pub audio_input_tokens: Option<i32>,
112    /// Audio output token count.
113    pub audio_output_tokens: Option<i32>,
114}
115
116/// Record LLM token usage on the current span.
117///
118/// Call this after receiving the final `LlmResponse` to populate the
119/// `gen_ai.usage.*` fields declared by [`llm_generate_span`].
120///
121/// Fields are only recorded when present (non-zero / Some). This is safe to call
122/// even if the current span was not created by `llm_generate_span` — unknown
123/// fields are silently ignored by `tracing`.
124///
125/// # Example
126/// ```
127/// use adk_telemetry::{LlmUsage, record_llm_usage};
128/// let usage = LlmUsage {
129///     input_tokens: 100,
130///     output_tokens: 50,
131///     total_tokens: 150,
132///     cache_read_tokens: Some(80),
133///     ..Default::default()
134/// };
135/// record_llm_usage(&usage);
136/// ```
137pub fn record_llm_usage(usage: &LlmUsage) {
138    let span = Span::current();
139    span.record("gen_ai.usage.input_tokens", i64::from(usage.input_tokens));
140    span.record("gen_ai.usage.output_tokens", i64::from(usage.output_tokens));
141    span.record("gen_ai.usage.total_tokens", i64::from(usage.total_tokens));
142    if let Some(v) = usage.cache_read_tokens {
143        span.record("gen_ai.usage.cache_read_tokens", i64::from(v));
144    }
145    if let Some(v) = usage.cache_creation_tokens {
146        span.record("gen_ai.usage.cache_creation_tokens", i64::from(v));
147    }
148    if let Some(v) = usage.thinking_tokens {
149        span.record("gen_ai.usage.thinking_tokens", i64::from(v));
150    }
151    if let Some(v) = usage.audio_input_tokens {
152        span.record("gen_ai.usage.audio_input_tokens", i64::from(v));
153    }
154    if let Some(v) = usage.audio_output_tokens {
155        span.record("gen_ai.usage.audio_output_tokens", i64::from(v));
156    }
157}
158
159/// Create a span for tool execution
160///
161/// # Arguments
162/// * `tool_name` - Name of the tool being executed
163///
164/// # Example
165/// ```
166/// use adk_telemetry::tool_execute_span;
167/// let span = tool_execute_span("weather_tool");
168/// let _enter = span.enter();
169/// // Tool execution code here
170/// ```
171pub fn tool_execute_span(tool_name: &str) -> Span {
172    tracing::info_span!("tool.execute", tool.name = tool_name, otel.kind = "internal")
173}
174
175/// Create a span for callback execution
176///
177/// # Arguments
178/// * `callback_type` - Type of callback (e.g., "before_model", "after_agent")
179///
180/// # Example
181/// ```
182/// use adk_telemetry::callback_span;
183/// let span = callback_span("before_model");
184/// let _enter = span.enter();
185/// // Callback code here
186/// ```
187pub fn callback_span(callback_type: &str) -> Span {
188    tracing::debug_span!(
189        "callback",
190        callback.type = callback_type,
191    )
192}
193
194/// Add common attributes to the current span
195///
196/// # Arguments
197/// * `user_id` - User ID from context
198/// * `session_id` - Session ID from context
199pub fn add_context_attributes(user_id: &str, session_id: &str) {
200    let span = Span::current();
201    span.record("user.id", user_id);
202    span.record("session.id", session_id);
203}