langfuse-client 0.1.22

## Authentication Authenticate with the API using [Basic Auth](https://en.wikipedia.org/wiki/Basic_access_authentication), get API keys in the project settings: - username: Langfuse Public Key - password: Langfuse Secret Key ## Exports - OpenAPI spec: https://cloud.langfuse.com/generated/api/openapi.yml - Postman collection: https://cloud.langfuse.com/generated/postman/collection.json
Documentation
/*
 * langfuse
 *
 * ## Authentication  Authenticate with the API using [Basic Auth](https://en.wikipedia.org/wiki/Basic_access_authentication), get API keys in the project settings:  - username: Langfuse Public Key - password: Langfuse Secret Key  ## Exports  - OpenAPI spec: https://cloud.langfuse.com/generated/api/openapi.yml - Postman collection: https://cloud.langfuse.com/generated/postman/collection.json
 *
 * The version of the OpenAPI document: 
 * 
 * Generated by: https://openapi-generator.tech
 */

use crate::models;
use serde::{Deserialize, Serialize};
#[cfg(not(feature = "ahash"))]
use std::collections::HashMap;
#[cfg(feature = "ahash")]
use ahash::HashMap;

/// OpenAiCompletionUsageSchema : OpenAI Usage schema from (Chat-)Completion APIs
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
#[cfg_attr(feature="bon", derive(bon::Builder))]
pub struct OpenAiCompletionUsageSchema {
    #[serde(rename = "prompt_tokens")]
    pub prompt_tokens: i32,
    #[serde(rename = "completion_tokens")]
    pub completion_tokens: i32,
    #[serde(rename = "total_tokens")]
    pub total_tokens: i32,
    #[serde(rename = "prompt_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
    pub prompt_tokens_details: Option<Option<HashMap<String, i32>>>,
    #[serde(rename = "completion_tokens_details", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
    pub completion_tokens_details: Option<Option<HashMap<String, i32>>>,
}

impl OpenAiCompletionUsageSchema {
    /// OpenAI Usage schema from (Chat-)Completion APIs
    pub fn new(prompt_tokens: i32, completion_tokens: i32, total_tokens: i32) -> OpenAiCompletionUsageSchema {
        OpenAiCompletionUsageSchema {
            prompt_tokens,
            completion_tokens,
            total_tokens,
            prompt_tokens_details: None,
            completion_tokens_details: None,
        }
    }
}