rig/telemetry/
mod.rs

1//! This module primarily concerns being able to orchestrate telemetry across a given pipeline or workflow.
2//! This includes tracing, being able to send traces to an OpenTelemetry collector, setting up your
3//! agents with the correct tracing style so you can emit the right traces for platforms like Langfuse,
4//! and more.
5
6use crate::completion::GetTokenUsage;
7use serde::Serialize;
8
9pub trait ProviderRequestExt {
10    type InputMessage: Serialize;
11
12    fn get_input_messages(&self) -> Vec<Self::InputMessage>;
13    fn get_system_prompt(&self) -> Option<String>;
14    fn get_model_name(&self) -> String;
15    fn get_prompt(&self) -> Option<String>;
16}
17
18pub trait ProviderResponseExt {
19    type OutputMessage: Serialize;
20    type Usage: Serialize;
21
22    fn get_response_id(&self) -> Option<String>;
23
24    fn get_response_model_name(&self) -> Option<String>;
25
26    fn get_output_messages(&self) -> Vec<Self::OutputMessage>;
27
28    fn get_text_response(&self) -> Option<String>;
29
30    fn get_usage(&self) -> Option<Self::Usage>;
31}
32
33/// A trait designed specifically to be used with Spans for the purpose of recording telemetry.
34/// Nearly all methods
35pub trait SpanCombinator {
36    fn record_token_usage<U>(&self, usage: &U)
37    where
38        U: GetTokenUsage;
39
40    fn record_response_metadata<R>(&self, response: &R)
41    where
42        R: ProviderResponseExt;
43
44    fn record_model_input<T>(&self, messages: &T)
45    where
46        T: Serialize;
47
48    fn record_model_output<T>(&self, messages: &T)
49    where
50        T: Serialize;
51}
52
53impl SpanCombinator for tracing::Span {
54    fn record_token_usage<U>(&self, usage: &U)
55    where
56        U: GetTokenUsage,
57    {
58        if self.is_disabled() {
59            return;
60        }
61
62        if let Some(usage) = usage.token_usage() {
63            self.record("gen_ai.usage.input_tokens", usage.input_tokens);
64            self.record("gen_ai.usage.output_tokens", usage.output_tokens);
65        }
66    }
67
68    fn record_response_metadata<R>(&self, response: &R)
69    where
70        R: ProviderResponseExt,
71    {
72        if self.is_disabled() {
73            return;
74        }
75
76        if let Some(id) = response.get_response_id() {
77            self.record("gen_ai.response.id", id);
78        }
79
80        if let Some(model_name) = response.get_response_model_name() {
81            self.record("gen_ai.response.model_name", model_name);
82        }
83    }
84
85    fn record_model_input<T>(&self, input: &T)
86    where
87        T: Serialize,
88    {
89        if self.is_disabled() {
90            return;
91        }
92
93        let input_as_json_string =
94            serde_json::to_string(input).expect("Serializing a Rust type to JSON should not break");
95
96        self.record("gen_ai.input.messages", input_as_json_string);
97    }
98
99    fn record_model_output<T>(&self, output: &T)
100    where
101        T: Serialize,
102    {
103        if self.is_disabled() {
104            return;
105        }
106
107        let output_as_json_string = serde_json::to_string(output)
108            .expect("Serializing a Rust type to JSON should not break");
109
110        self.record("gen_ai.output.messages", output_as_json_string);
111    }
112}