use crate::traits::{ChatMessage, ChatRole};
use serde::{Deserialize, Serialize};
use std::env;
pub fn should_capture_content() -> bool {
env::var("EDGECODE_CAPTURE_CONTENT")
.map(|v| v.to_lowercase() == "true" || v == "1")
.unwrap_or(false)
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum GenAIMessagePart {
Text { text: String },
ToolCall { tool_call: GenAIToolCall },
ToolResult { tool_result: GenAIToolResult },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GenAIToolCall {
pub id: String,
pub name: String,
pub arguments: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GenAIToolResult {
pub tool_call_id: String,
pub content: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GenAIMessage {
pub role: String,
pub content: Vec<GenAIMessagePart>,
}
pub fn convert_to_genai_messages(messages: &[ChatMessage]) -> Vec<GenAIMessage> {
messages
.iter()
.map(|msg| {
let role = match msg.role {
ChatRole::System => "system",
ChatRole::User => "user",
ChatRole::Assistant => "assistant",
ChatRole::Tool => "tool",
ChatRole::Function => "function",
};
let mut all_parts = vec![GenAIMessagePart::Text {
text: msg.content.clone(),
}];
if let Some(tool_calls) = &msg.tool_calls {
for tc in tool_calls {
all_parts.push(GenAIMessagePart::ToolCall {
tool_call: GenAIToolCall {
id: tc.id.clone(),
name: tc.function.name.clone(),
arguments: tc.function.arguments.clone(),
},
});
}
}
GenAIMessage {
role: role.to_string(),
content: all_parts,
}
})
.collect()
}
pub fn emit_inference_event(
input_messages: &[ChatMessage],
output_messages: &[ChatMessage],
response: &crate::traits::LLMResponse,
options: Option<&crate::traits::CompletionOptions>,
) {
if !should_capture_content() {
tracing::debug!("Content capture disabled (EDGECODE_CAPTURE_CONTENT not set to true)");
return;
}
let input = convert_to_genai_messages(input_messages);
let output = convert_to_genai_messages(output_messages);
let input_json = match serde_json::to_string(&input) {
Ok(json) => json,
Err(e) => {
tracing::warn!("Failed to serialize input messages: {}", e);
return;
}
};
let output_json = match serde_json::to_string(&output) {
Ok(json) => json,
Err(e) => {
tracing::warn!("Failed to serialize output messages: {}", e);
return;
}
};
let response_id = response
.metadata
.get("id")
.and_then(|v| v.as_str())
.unwrap_or("");
let finish_reason = response.finish_reason.as_deref().unwrap_or("");
let temperature_val = options.and_then(|o| o.temperature).unwrap_or(-1.0) as f64;
let max_tokens_val = options.and_then(|o| o.max_tokens).unwrap_or(0) as i64;
let top_p_val = options.and_then(|o| o.top_p).unwrap_or(-1.0) as f64;
let frequency_penalty_val = options.and_then(|o| o.frequency_penalty).unwrap_or(-999.0) as f64;
let presence_penalty_val = options.and_then(|o| o.presence_penalty).unwrap_or(-999.0) as f64;
let cache_hit_tokens_val = response.cache_hit_tokens.unwrap_or(0) as i64;
tracing::event!(
target: "gen_ai.events",
tracing::Level::INFO,
event.name = "gen_ai.client.inference.operation.details",
gen_ai.input.messages = %input_json,
gen_ai.output.messages = %output_json,
gen_ai.response.id = %response_id,
gen_ai.response.finish_reasons = %finish_reason,
gen_ai.usage.input_tokens = response.prompt_tokens as i64,
gen_ai.usage.output_tokens = response.completion_tokens as i64,
gen_ai.usage.cache_hit_tokens = cache_hit_tokens_val,
gen_ai.request.temperature = temperature_val,
gen_ai.request.max_tokens = max_tokens_val,
gen_ai.request.top_p = top_p_val,
gen_ai.request.frequency_penalty = frequency_penalty_val,
gen_ai.request.presence_penalty = presence_penalty_val,
"GenAI inference completed"
);
tracing::debug!(
"Emitted gen_ai.client.inference.operation.details event with response_id={} finish_reason={}",
response_id,
finish_reason
);
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ToolCall;
#[test]
fn test_convert_simple_text_message() {
let messages = vec![ChatMessage {
role: ChatRole::User,
content: "Hello, world!".to_string(),
name: None,
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
}];
let genai = convert_to_genai_messages(&messages);
assert_eq!(genai.len(), 1);
assert_eq!(genai[0].role, "user");
assert_eq!(genai[0].content.len(), 1);
match &genai[0].content[0] {
GenAIMessagePart::Text { text } => {
assert_eq!(text, "Hello, world!");
}
_ => panic!("Expected text part"),
}
}
#[test]
fn test_convert_with_tool_calls() {
let messages = vec![ChatMessage {
role: ChatRole::Assistant,
content: "Let me search for that.".to_string(),
name: None,
tool_calls: Some(vec![ToolCall {
id: "call_123".to_string(),
call_type: "function".to_string(),
function: crate::traits::FunctionCall {
name: "web_search".to_string(),
arguments: r#"{"query":"test"}"#.to_string(),
},
thought_signature: None,
}]),
tool_call_id: None,
cache_control: None,
images: None,
}];
let genai = convert_to_genai_messages(&messages);
assert_eq!(genai.len(), 1);
assert_eq!(genai[0].role, "assistant");
assert_eq!(genai[0].content.len(), 2); }
#[test]
fn test_should_capture_content_enabled() {
env::set_var("EDGECODE_CAPTURE_CONTENT", "true");
assert!(should_capture_content());
env::remove_var("EDGECODE_CAPTURE_CONTENT");
}
#[test]
fn test_should_capture_content_disabled() {
env::remove_var("EDGECODE_CAPTURE_CONTENT");
assert!(!should_capture_content());
}
#[test]
fn test_json_serialization() {
let genai = GenAIMessage {
role: "user".to_string(),
content: vec![GenAIMessagePart::Text {
text: "Test message".to_string(),
}],
};
let json = serde_json::to_string(&genai).unwrap();
assert!(json.contains("user"));
assert!(json.contains("Test message"));
assert!(json.contains("\"type\":\"text\""));
}
#[test]
fn test_convert_system_role() {
let messages = vec![ChatMessage {
role: ChatRole::System,
content: "You are a helper.".to_string(),
name: None,
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
}];
let genai = convert_to_genai_messages(&messages);
assert_eq!(genai[0].role, "system");
}
#[test]
fn test_convert_tool_role() {
let messages = vec![ChatMessage {
role: ChatRole::Tool,
content: "result data".to_string(),
name: None,
tool_calls: None,
tool_call_id: Some("call_123".to_string()),
cache_control: None,
images: None,
}];
let genai = convert_to_genai_messages(&messages);
assert_eq!(genai[0].role, "tool");
}
#[test]
fn test_convert_function_role() {
let messages = vec![ChatMessage {
role: ChatRole::Function,
content: "function output".to_string(),
name: Some("my_function".to_string()),
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
}];
let genai = convert_to_genai_messages(&messages);
assert_eq!(genai[0].role, "function");
}
#[test]
fn test_convert_assistant_role() {
let messages = vec![ChatMessage {
role: ChatRole::Assistant,
content: "I can help.".to_string(),
name: None,
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
}];
let genai = convert_to_genai_messages(&messages);
assert_eq!(genai[0].role, "assistant");
}
#[test]
fn test_should_capture_content_with_1() {
env::set_var("EDGECODE_CAPTURE_CONTENT", "1");
assert!(should_capture_content());
env::remove_var("EDGECODE_CAPTURE_CONTENT");
}
#[test]
fn test_should_capture_content_false_string() {
env::set_var("EDGECODE_CAPTURE_CONTENT", "false");
assert!(!should_capture_content());
env::remove_var("EDGECODE_CAPTURE_CONTENT");
}
#[test]
fn test_tool_call_serialization() {
let part = GenAIMessagePart::ToolCall {
tool_call: GenAIToolCall {
id: "call_1".to_string(),
name: "search".to_string(),
arguments: r#"{"q":"test"}"#.to_string(),
},
};
let json = serde_json::to_string(&part).unwrap();
assert!(json.contains("tool_call"));
assert!(json.contains("search"));
}
#[test]
fn test_tool_result_serialization() {
let part = GenAIMessagePart::ToolResult {
tool_result: GenAIToolResult {
tool_call_id: "call_1".to_string(),
content: "search results".to_string(),
},
};
let json = serde_json::to_string(&part).unwrap();
assert!(json.contains("tool_result"));
assert!(json.contains("search results"));
}
#[test]
fn test_genai_message_deserialization() {
let json = r#"{"role":"user","content":[{"type":"text","text":"hello"}]}"#;
let msg: GenAIMessage = serde_json::from_str(json).unwrap();
assert_eq!(msg.role, "user");
assert_eq!(msg.content.len(), 1);
}
#[test]
fn test_convert_multiple_messages() {
let messages = vec![
ChatMessage {
role: ChatRole::System,
content: "System prompt".to_string(),
name: None,
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
},
ChatMessage {
role: ChatRole::User,
content: "User message".to_string(),
name: None,
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
},
];
let genai = convert_to_genai_messages(&messages);
assert_eq!(genai.len(), 2);
assert_eq!(genai[0].role, "system");
assert_eq!(genai[1].role, "user");
}
#[test]
fn test_emit_inference_event_disabled() {
env::remove_var("EDGECODE_CAPTURE_CONTENT");
let input = vec![ChatMessage {
role: ChatRole::User,
content: "Hello".to_string(),
name: None,
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
}];
let output = vec![ChatMessage {
role: ChatRole::Assistant,
content: "Hi there".to_string(),
name: None,
tool_calls: None,
tool_call_id: None,
cache_control: None,
images: None,
}];
let response = crate::traits::LLMResponse {
content: "Hi there".to_string(),
prompt_tokens: 10,
completion_tokens: 5,
total_tokens: 15,
model: "gpt-4".to_string(),
finish_reason: Some("stop".to_string()),
metadata: Default::default(),
cache_hit_tokens: None,
tool_calls: vec![],
thinking_tokens: None,
thinking_content: None,
};
emit_inference_event(&input, &output, &response, None);
}
}