language_barrier_core/
model.rs1use std::fmt;
2
3pub trait ModelInfo: Send + Sync + fmt::Debug + Clone + Copy {
5 fn context_window(&self) -> usize;
7
8 fn max_output_tokens(&self) -> usize;
13}
14
15#[derive(Debug, Clone, Copy, PartialEq, Eq)]
17pub enum Sonnet35Version {
18 V1,
19 V2,
20}
21
22#[derive(Debug, Clone, Copy, PartialEq, Eq)]
24pub enum Claude {
25 Sonnet35 { version: Sonnet35Version },
26 Sonnet37 { use_extended_thinking: bool },
27 Haiku35,
28 Haiku3,
29 Opus3,
30}
31
32impl Default for Claude {
33 fn default() -> Self {
34 Self::Opus3
35 }
36}
37
38impl ModelInfo for Claude {
39 fn context_window(&self) -> usize {
41 200_000
42 }
43
44 fn max_output_tokens(&self) -> usize {
45 match self {
46 Self::Sonnet37 {
47 use_extended_thinking: _,
48 } => 64_000,
49 Self::Sonnet35 { version: _ } | Self::Haiku35 => 8192,
50 Self::Haiku3 | Self::Opus3 => 4096,
51 }
52 }
53}
54
55#[derive(Debug, Clone, Copy, PartialEq, Eq)]
57pub enum Gemini {
58 Flash15,
60 Flash20,
62 Flash20Lite,
64 Flash25Preview,
66}
67
68impl ModelInfo for Gemini {
69 fn context_window(&self) -> usize {
70 1_048_576
72 }
73
74 fn max_output_tokens(&self) -> usize {
75 match self {
76 Self::Flash15 | Self::Flash20 | Self::Flash20Lite => 8_192,
77 Self::Flash25Preview => 65_536,
78 }
79 }
80}
81
82impl crate::provider::gemini::GeminiModelInfo for Gemini {
84 fn gemini_model_id(&self) -> String {
85 match self {
86 Self::Flash15 => "gemini-1.5-flash",
87 Self::Flash20 => "gemini-2.0-flash",
88 Self::Flash20Lite => "gemini-2.0-flash-lite",
89 Self::Flash25Preview => "gemini-2.5-flash-preview-04-17",
90 }
91 .to_string()
92 }
93}
94
95#[derive(Debug, Clone, Copy, PartialEq, Eq)]
97pub enum OpenAi {
98 GPT4o,
100 GPT4oMini,
102 GPT4Turbo,
104 GPT35Turbo,
106 O1,
108 O1Mini,
109 O1Pro,
110 O3,
112 O3Mini,
113 O4Mini,
114}
115
116impl ModelInfo for OpenAi {
117 fn context_window(&self) -> usize {
118 match self {
119 Self::O1Mini | Self::GPT4o | Self::GPT4oMini | Self::GPT4Turbo => 128_000,
120 Self::GPT35Turbo => 16_000,
121 _ => 200_000,
122 }
123 }
124
125 fn max_output_tokens(&self) -> usize {
126 match self {
127 Self::GPT4o | Self::GPT4oMini | Self::GPT4Turbo | Self::GPT35Turbo => 4_096,
128 Self::O1Mini => 65_536,
129 _ => 100_000,
130 }
131 }
132}
133
134impl crate::provider::openai::OpenAIModelInfo for OpenAi {
136 fn openai_model_id(&self) -> String {
137 match self {
138 Self::GPT4o => "gpt-4o",
139 Self::GPT4oMini => "gpt-4o-mini",
140 Self::GPT4Turbo => "gpt-4-turbo",
141 Self::GPT35Turbo => "gpt-3.5-turbo",
142 Self::O4Mini => "o4-mini-2025-04-16",
143 Self::O3 => "o3-2025-04-16",
144 Self::O3Mini => "o3-mini-2025-01-31",
145 Self::O1 => "o1-2024-12-17",
146 Self::O1Mini => "o1-mini-2024-09-12",
147 Self::O1Pro => "o1-pro-2025-03-19",
148 }
149 .to_string()
150 }
151}
152
153#[derive(Debug, Clone, Copy, PartialEq, Eq)]
155pub enum Mistral {
156 Large,
158 Small,
160 Nemo,
162 Codestral,
164 Embed,
166}
167
168impl ModelInfo for Mistral {
169 fn context_window(&self) -> usize {
170 match self {
171 Self::Large | Self::Small | Self::Nemo => 131_072, Self::Codestral => 262_144, Self::Embed => 8_192, }
175 }
176
177 fn max_output_tokens(&self) -> usize {
178 4_096
180 }
181}
182
183impl crate::provider::mistral::MistralModelInfo for Mistral {
185 fn mistral_model_id(&self) -> String {
186 match self {
187 Self::Large => "mistral-large-latest",
188 Self::Small => "mistral-small-latest",
189 Self::Nemo => "open-mistral-nemo",
190 Self::Codestral => "codestral-latest",
191 Self::Embed => "mistral-embed",
192 }
193 .to_string()
194 }
195}