langfuse_client/models/
open_ai_response_usage_schema.rs

1/*
2 * langfuse
3 *
4 * ## Authentication  Authenticate with the API using [Basic Auth](https://en.wikipedia.org/wiki/Basic_access_authentication), get API keys in the project settings:  - username: Langfuse Public Key - password: Langfuse Secret Key  ## Exports  - OpenAPI spec: https://cloud.langfuse.com/generated/api/openapi.yml - Postman collection: https://cloud.langfuse.com/generated/postman/collection.json
5 *
6 * The version of the OpenAPI document: 
7 * 
8 * Generated by: https://openapi-generator.tech
9 */
10
11use crate::models;
12use serde::{Deserialize, Serialize};
13
14/// OpenAiResponseUsageSchema : OpenAI Usage schema from Response API
15#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
16#[cfg_attr(feature="bon", derive(bon::Builder))]
17pub struct OpenAiResponseUsageSchema {
18    #[serde(rename = "input_tokens")]
19    pub input_tokens: i32,
20    #[serde(rename = "output_tokens")]
21    pub output_tokens: i32,
22    #[serde(rename = "total_tokens")]
23    pub total_tokens: i32,
24    #[serde(rename = "input_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
25    pub input_tokens_details: Option<Option<std::collections::HashMap<String, i32>>>,
26    #[serde(rename = "output_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
27    pub output_tokens_details: Option<Option<std::collections::HashMap<String, i32>>>,
28}
29
30impl OpenAiResponseUsageSchema {
31    /// OpenAI Usage schema from Response API
32    pub fn new(input_tokens: i32, output_tokens: i32, total_tokens: i32) -> OpenAiResponseUsageSchema {
33        OpenAiResponseUsageSchema {
34            input_tokens,
35            output_tokens,
36            total_tokens,
37            input_tokens_details: None,
38            output_tokens_details: None,
39        }
40    }
41}
42