langfuse_client/models/
open_ai_response_usage_schema.rs1use crate::models;
12use serde::{Deserialize, Serialize};
13#[cfg(not(feature = "ahash"))]
14use std::collections::HashMap;
15#[cfg(feature = "ahash")]
16use ahash::HashMap;
17
18#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
20#[cfg_attr(feature="bon", derive(bon::Builder))]
21pub struct OpenAiResponseUsageSchema {
22 #[serde(rename = "input_tokens")]
23 pub input_tokens: i32,
24 #[serde(rename = "output_tokens")]
25 pub output_tokens: i32,
26 #[serde(rename = "total_tokens")]
27 pub total_tokens: i32,
28 #[serde(rename = "input_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
29 pub input_tokens_details: Option<Option<HashMap<String, i32>>>,
30 #[serde(rename = "output_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
31 pub output_tokens_details: Option<Option<HashMap<String, i32>>>,
32}
33
34impl OpenAiResponseUsageSchema {
35 pub fn new(input_tokens: i32, output_tokens: i32, total_tokens: i32) -> OpenAiResponseUsageSchema {
37 OpenAiResponseUsageSchema {
38 input_tokens,
39 output_tokens,
40 total_tokens,
41 input_tokens_details: None,
42 output_tokens_details: None,
43 }
44 }
45}
46