alith_models/api_model/
anthropic.rs

1use super::ApiLLMModel;
2use crate::{LLMModelBase, tokenizer::Tokenizer};
3use std::sync::Arc;
4
5impl ApiLLMModel {
6    pub fn anthropic_model_from_model_id(model_id: &str) -> ApiLLMModel {
7        if model_id.starts_with("claude-3-opus") {
8            Self::claude_3_opus()
9        } else if model_id.starts_with("claude-3-sonnet") {
10            Self::claude_3_sonnet()
11        } else if model_id.starts_with("claude-3-haiku") {
12            Self::claude_3_haiku()
13        } else if model_id.starts_with("claude-3-5-sonnet") {
14            Self::claude_3_5_sonnet()
15        } else if model_id.starts_with("claude-3-7-sonnet") {
16            Self::claude_3_7_sonnet()
17        } else {
18            Self::claude(model_id)
19        }
20    }
21
22    pub fn claude_3_opus() -> ApiLLMModel {
23        let model_id = "claude-3-opus-20240229".to_string();
24        let tokenizer = model_tokenizer(&model_id);
25        ApiLLMModel {
26            model_base: LLMModelBase {
27                model_id,
28                model_ctx_size: 200000,
29                inference_ctx_size: 4096,
30                tokenizer,
31            },
32            cost_per_m_in_tokens: 15.00,
33            cost_per_m_out_tokens: 75.00,
34            tokens_per_message: 3,
35            tokens_per_name: None,
36        }
37    }
38
39    pub fn claude_3_sonnet() -> ApiLLMModel {
40        let model_id = "claude-3-sonnet-20240229".to_string();
41        let tokenizer = model_tokenizer(&model_id);
42        ApiLLMModel {
43            model_base: LLMModelBase {
44                model_id,
45                model_ctx_size: 200000,
46                inference_ctx_size: 4096,
47                tokenizer,
48            },
49            cost_per_m_in_tokens: 3.00,
50            cost_per_m_out_tokens: 15.00,
51            tokens_per_message: 3,
52            tokens_per_name: None,
53        }
54    }
55
56    pub fn claude_3_haiku() -> ApiLLMModel {
57        let model_id = "claude-3-haiku-20240307".to_string();
58        let tokenizer = model_tokenizer(&model_id);
59        ApiLLMModel {
60            model_base: LLMModelBase {
61                model_id,
62                model_ctx_size: 200000,
63                inference_ctx_size: 4096,
64                tokenizer,
65            },
66            cost_per_m_in_tokens: 0.75,
67            cost_per_m_out_tokens: 1.25,
68            tokens_per_message: 3,
69            tokens_per_name: None,
70        }
71    }
72
73    pub fn claude_3_5_sonnet() -> ApiLLMModel {
74        let model_id = "claude-3-5-sonnet-20240620".to_string();
75        let tokenizer = model_tokenizer(&model_id);
76        ApiLLMModel {
77            model_base: LLMModelBase {
78                model_id,
79                model_ctx_size: 200000,
80                inference_ctx_size: 8192,
81                tokenizer,
82            },
83            cost_per_m_in_tokens: 3.00,
84            cost_per_m_out_tokens: 15.00,
85            tokens_per_message: 3,
86            tokens_per_name: None,
87        }
88    }
89
90    pub fn claude_3_7_sonnet() -> ApiLLMModel {
91        let model_id = "claude-3-7-sonnet-20250219".to_string();
92        let tokenizer = model_tokenizer(&model_id);
93        ApiLLMModel {
94            model_base: LLMModelBase {
95                model_id,
96                model_ctx_size: 200000,
97                inference_ctx_size: 8192,
98                tokenizer,
99            },
100            cost_per_m_in_tokens: 3.00,
101            cost_per_m_out_tokens: 15.00,
102            tokens_per_message: 3,
103            tokens_per_name: None,
104        }
105    }
106
107    pub fn claude<S: ToString>(model_id: S) -> ApiLLMModel {
108        let model_id = model_id.to_string();
109        let tokenizer = model_tokenizer(&model_id);
110        ApiLLMModel {
111            model_base: LLMModelBase {
112                model_id,
113                model_ctx_size: 200000,
114                inference_ctx_size: 8192,
115                tokenizer,
116            },
117            cost_per_m_in_tokens: 3.00,
118            cost_per_m_out_tokens: 15.00,
119            tokens_per_message: 3,
120            tokens_per_name: None,
121        }
122    }
123}
124
125#[inline]
126pub fn model_tokenizer(_model_id: &str) -> Arc<Tokenizer> {
127    Arc::new(
128        Tokenizer::new_tiktoken("gpt-4")
129            .unwrap_or_else(|_| panic!("Failed to load tokenizer for gpt-4")),
130    )
131}
132
133pub trait AnthropicModelTrait: Sized {
134    fn model(&mut self) -> &mut ApiLLMModel;
135
136    /// Set the model using the model_id string.
137    fn model_id_str(mut self, model_id: &str) -> Self
138    where
139        Self: Sized,
140    {
141        *self.model() = ApiLLMModel::anthropic_model_from_model_id(model_id);
142        self
143    }
144
145    /// Use the Claude 3 Opus model for the Anthropic client.
146    fn claude_3_opus(mut self) -> Self
147    where
148        Self: Sized,
149    {
150        *self.model() = ApiLLMModel::claude_3_opus();
151        self
152    }
153
154    /// Use the Claude 3 Sonnet model for the Anthropic client.
155    fn claude_3_sonnet(mut self) -> Self
156    where
157        Self: Sized,
158    {
159        *self.model() = ApiLLMModel::claude_3_sonnet();
160        self
161    }
162
163    /// Use the Claude 3 Haiku model for the Anthropic client.
164    fn claude_3_haiku(mut self) -> Self
165    where
166        Self: Sized,
167    {
168        *self.model() = ApiLLMModel::claude_3_haiku();
169        self
170    }
171
172    /// Use the Claude 3.5 Sonnet model for the Anthropic client.
173    fn claude_3_5_sonnet(mut self) -> Self
174    where
175        Self: Sized,
176    {
177        *self.model() = ApiLLMModel::claude_3_5_sonnet();
178        self
179    }
180
181    /// Use the Claude 3.7 Sonnet model for the Anthropic client.
182    fn claude_3_7_sonnet(mut self) -> Self
183    where
184        Self: Sized,
185    {
186        *self.model() = ApiLLMModel::claude_3_7_sonnet();
187        self
188    }
189}