use opentelemetry::Key;
pub const OPERATION_NAME: Key = Key::from_static_str("gen_ai.operation.name");
pub const PROVIDER_NAME: Key = Key::from_static_str("gen_ai.provider.name");
pub const SYSTEM: Key = Key::from_static_str("gen_ai.system");
pub mod request {
use opentelemetry::Key;
pub const MODEL: Key = Key::from_static_str("gen_ai.request.model");
pub const TEMPERATURE: Key = Key::from_static_str("gen_ai.request.temperature");
pub const TOP_P: Key = Key::from_static_str("gen_ai.request.top_p");
pub const TOP_K: Key = Key::from_static_str("gen_ai.request.top_k");
pub const MAX_TOKENS: Key = Key::from_static_str("gen_ai.request.max_tokens");
pub const STOP_SEQUENCES: Key = Key::from_static_str("gen_ai.request.stop_sequences");
pub const FREQUENCY_PENALTY: Key = Key::from_static_str("gen_ai.request.frequency_penalty");
pub const PRESENCE_PENALTY: Key = Key::from_static_str("gen_ai.request.presence_penalty");
pub const FINISH_REASONS: Key = Key::from_static_str("gen_ai.request.finish_reasons");
pub const SYSTEM_INSTRUCTIONS: Key = Key::from_static_str("gen_ai.system_instructions");
pub const INPUT_MESSAGES: Key = Key::from_static_str("gen_ai.input.messages");
}
pub mod response {
use opentelemetry::Key;
pub const MODEL: Key = Key::from_static_str("gen_ai.response.model");
pub const ID: Key = Key::from_static_str("gen_ai.response.id");
pub const FINISH_REASONS: Key = Key::from_static_str("gen_ai.response.finish_reasons");
pub const OUTPUT_MESSAGES: Key = Key::from_static_str("gen_ai.output.messages");
}
pub mod usage {
use opentelemetry::Key;
pub const INPUT_TOKENS: Key = Key::from_static_str("gen_ai.usage.input_tokens");
pub const OUTPUT_TOKENS: Key = Key::from_static_str("gen_ai.usage.output_tokens");
}
pub mod token {
use opentelemetry::Key;
pub const TYPE: Key = Key::from_static_str("gen_ai.token.type");
}
pub mod choice {
use opentelemetry::Key;
pub const FINISH_REASON: Key = Key::from_static_str("gen_ai.choice.finish_reason");
pub const INDEX: Key = Key::from_static_str("gen_ai.choice.index");
}
pub mod prompt {
use opentelemetry::Key;
pub const TEMPLATE: Key = Key::from_static_str("gen_ai.prompt.template");
pub const VERSION: Key = Key::from_static_str("gen_ai.prompt.version");
}
pub mod tool {
use opentelemetry::Key;
pub const NAME: Key = Key::from_static_str("gen_ai.tool.name");
pub const CALL_ID: Key = Key::from_static_str("gen_ai.tool.call.id");
pub const ARGUMENTS: Key = Key::from_static_str("gen_ai.tool.arguments");
pub const RESULT: Key = Key::from_static_str("gen_ai.tool.result");
}
pub mod agent {
use opentelemetry::Key;
pub const NAME: Key = Key::from_static_str("gen_ai.agent.name");
pub const DESCRIPTION: Key = Key::from_static_str("gen_ai.agent.description");
pub const ID: Key = Key::from_static_str("gen_ai.agent.id");
}
pub mod events {
pub const CONTENT: &str = "gen_ai.content";
pub const TOOL_CALL: &str = "gen_ai.tool.call";
pub const CHOICE: &str = "gen_ai.choice";
pub const SYSTEM_PROMPT: &str = "gen_ai.system.prompt";
pub const USER_PROMPT: &str = "gen_ai.user.prompt";
pub const ASSISTANT_RESPONSE: &str = "gen_ai.assistant.response";
}
pub mod metrics {
pub const CLIENT_REQUEST_DURATION: &str = "gen_ai.client.request.duration";
pub const CLIENT_TOKEN_USAGE: &str = "gen_ai.client.token.usage";
pub const SERVER_REQUEST_DURATION: &str = "gen_ai.server.request.duration";
pub const SERVER_TIME_TO_FIRST_TOKEN: &str = "gen_ai.server.time_to_first_token";
pub const SERVER_TIME_PER_OUTPUT_TOKEN: &str = "gen_ai.server.time_per_output_token";
}
pub fn map_openinference_to_gen_ai(openinference_key: &str) -> Option<Key> {
match openinference_key {
"llm.model_name" => Some(request::MODEL),
"llm.provider" => Some(PROVIDER_NAME),
"llm.system" => Some(SYSTEM),
"llm.token_count.prompt" => Some(usage::INPUT_TOKENS),
"llm.token_count.completion" => Some(usage::OUTPUT_TOKENS),
_ => None,
}
}
pub fn map_gen_ai_to_openinference(gen_ai_key: &str) -> Option<Key> {
match gen_ai_key {
"gen_ai.request.model" => Some(crate::attributes::llm::MODEL_NAME),
"gen_ai.provider.name" => Some(crate::attributes::llm::PROVIDER),
"gen_ai.system" => Some(crate::attributes::llm::SYSTEM),
"gen_ai.usage.input_tokens" => Some(crate::attributes::llm::token_count::PROMPT),
"gen_ai.usage.output_tokens" => Some(crate::attributes::llm::token_count::COMPLETION),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_attribute_mapping_roundtrip() {
assert_eq!(
map_openinference_to_gen_ai("llm.model_name"),
Some(request::MODEL)
);
assert_eq!(
map_openinference_to_gen_ai("llm.token_count.prompt"),
Some(usage::INPUT_TOKENS)
);
assert_eq!(
map_gen_ai_to_openinference("gen_ai.request.model"),
Some(crate::attributes::llm::MODEL_NAME)
);
}
#[test]
fn test_unknown_attributes_return_none() {
assert_eq!(map_openinference_to_gen_ai("unknown.attribute"), None);
assert_eq!(map_gen_ai_to_openinference("unknown.attribute"), None);
}
}