langchain_rust/language_models/
mod.rs

1use std::collections::HashMap;
2
3use serde::{Deserialize, Serialize};
4
5pub mod llm;
6pub mod options;
7
8mod error;
9pub use error::*;
10
11//TODO: check if its this should have a data:serde::Value to save all other things, like OpenAI
12//function responses
13#[derive(Debug, Serialize, Deserialize, Clone, Default)]
14pub struct GenerateResult {
15    pub tokens: Option<TokenUsage>,
16    pub generation: String,
17}
18
19impl GenerateResult {
20    pub fn to_hashmap(&self) -> HashMap<String, String> {
21        let mut map = HashMap::new();
22
23        // Insert the 'generation' field into the hashmap
24        map.insert("generation".to_string(), self.generation.clone());
25
26        // Check if 'tokens' is Some and insert its fields into the hashmap
27        if let Some(ref tokens) = self.tokens {
28            map.insert(
29                "prompt_tokens".to_string(),
30                tokens.prompt_tokens.to_string(),
31            );
32            map.insert(
33                "completion_tokens".to_string(),
34                tokens.completion_tokens.to_string(),
35            );
36            map.insert("total_tokens".to_string(), tokens.total_tokens.to_string());
37        }
38
39        map
40    }
41}
42
43#[derive(Debug, Serialize, Deserialize, Clone, Default)]
44pub struct TokenUsage {
45    pub prompt_tokens: u32,
46    pub completion_tokens: u32,
47    pub total_tokens: u32,
48}
49
50impl TokenUsage {
51    pub fn sum(&self, other: &TokenUsage) -> TokenUsage {
52        TokenUsage {
53            prompt_tokens: self.prompt_tokens + other.prompt_tokens,
54            completion_tokens: self.completion_tokens + other.completion_tokens,
55            total_tokens: self.total_tokens + other.total_tokens,
56        }
57    }
58
59    pub fn add(&mut self, other: &TokenUsage) {
60        self.prompt_tokens += other.prompt_tokens;
61        self.completion_tokens += other.completion_tokens;
62        self.total_tokens += other.total_tokens;
63    }
64}
65
66impl TokenUsage {
67    pub fn new(prompt_tokens: u32, completion_tokens: u32) -> Self {
68        Self {
69            prompt_tokens,
70            completion_tokens,
71            total_tokens: prompt_tokens + completion_tokens,
72        }
73    }
74}