langfuse_client/models/
open_ai_response_usage_schema.rs1use crate::models;
12use serde::{Deserialize, Serialize};
13
14#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
16#[cfg_attr(feature="bon", derive(bon::Builder))]
17pub struct OpenAiResponseUsageSchema {
18 #[serde(rename = "input_tokens")]
19 pub input_tokens: i32,
20 #[serde(rename = "output_tokens")]
21 pub output_tokens: i32,
22 #[serde(rename = "total_tokens")]
23 pub total_tokens: i32,
24 #[serde(rename = "input_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
25 pub input_tokens_details: Option<Option<std::collections::HashMap<String, i32>>>,
26 #[serde(rename = "output_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
27 pub output_tokens_details: Option<Option<std::collections::HashMap<String, i32>>>,
28}
29
30impl OpenAiResponseUsageSchema {
31 pub fn new(input_tokens: i32, output_tokens: i32, total_tokens: i32) -> OpenAiResponseUsageSchema {
33 OpenAiResponseUsageSchema {
34 input_tokens,
35 output_tokens,
36 total_tokens,
37 input_tokens_details: None,
38 output_tokens_details: None,
39 }
40 }
41}
42