1use crate::{
13 agent::AgentBuilder,
14 completion::{self, CompletionError, CompletionRequest},
15 extractor::ExtractorBuilder,
16 json_utils,
17 providers::openai::Message,
18 OneOrMany,
19};
20use schemars::JsonSchema;
21use serde::{Deserialize, Serialize};
22use serde_json::json;
23
24use super::openai::AssistantContent;
25
26const HYPERBOLIC_API_BASE_URL: &str = "https://api.hyperbolic.xyz/v1";
30
31#[derive(Clone)]
32pub struct Client {
33 base_url: String,
34 http_client: reqwest::Client,
35}
36
37impl Client {
38 pub fn new(api_key: &str) -> Self {
40 Self::from_url(api_key, HYPERBOLIC_API_BASE_URL)
41 }
42
43 pub fn from_url(api_key: &str, base_url: &str) -> Self {
45 Self {
46 base_url: base_url.to_string(),
47 http_client: reqwest::Client::builder()
48 .default_headers({
49 let mut headers = reqwest::header::HeaderMap::new();
50 headers.insert(
51 "Authorization",
52 format!("Bearer {}", api_key)
53 .parse()
54 .expect("Bearer token should parse"),
55 );
56 headers
57 })
58 .build()
59 .expect("OpenAI reqwest client should build"),
60 }
61 }
62
63 pub fn from_env() -> Self {
66 let api_key = std::env::var("HYPERBOLIC_API_KEY").expect("HYPERBOLIC_API_KEY not set");
67 Self::new(&api_key)
68 }
69
70 fn post(&self, path: &str) -> reqwest::RequestBuilder {
71 let url = format!("{}/{}", self.base_url, path).replace("//", "/");
72 self.http_client.post(url)
73 }
74
75 pub fn completion_model(&self, model: &str) -> CompletionModel {
87 CompletionModel::new(self.clone(), model)
88 }
89
90 pub fn agent(&self, model: &str) -> AgentBuilder<CompletionModel> {
105 AgentBuilder::new(self.completion_model(model))
106 }
107
108 pub fn extractor<T: JsonSchema + for<'a> Deserialize<'a> + Serialize + Send + Sync>(
110 &self,
111 model: &str,
112 ) -> ExtractorBuilder<T, CompletionModel> {
113 ExtractorBuilder::new(self.completion_model(model))
114 }
115}
116
117#[derive(Debug, Deserialize)]
118struct ApiErrorResponse {
119 message: String,
120}
121
122#[derive(Debug, Deserialize)]
123#[serde(untagged)]
124enum ApiResponse<T> {
125 Ok(T),
126 Err(ApiErrorResponse),
127}
128
129#[derive(Debug, Deserialize)]
130pub struct EmbeddingData {
131 pub object: String,
132 pub embedding: Vec<f64>,
133 pub index: usize,
134}
135
136#[derive(Clone, Debug, Deserialize)]
137pub struct Usage {
138 pub prompt_tokens: usize,
139 pub total_tokens: usize,
140}
141
142impl std::fmt::Display for Usage {
143 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
144 write!(
145 f,
146 "Prompt tokens: {} Total tokens: {}",
147 self.prompt_tokens, self.total_tokens
148 )
149 }
150}
151
152pub const LLAMA_3_1_8B: &str = "meta-llama/Meta-Llama-3.1-8B-Instruct";
157pub const LLAMA_3_3_70B: &str = "meta-llama/Llama-3.3-70B-Instruct";
159pub const LLAMA_3_1_70B: &str = "meta-llama/Meta-Llama-3.1-70B-Instruct";
161pub const LLAMA_3_70B: &str = "meta-llama/Meta-Llama-3-70B-Instruct";
163pub const HERMES_3_70B: &str = "NousResearch/Hermes-3-Llama-3.1-70b";
165pub const DEEPSEEK_2_5: &str = "deepseek-ai/DeepSeek-V2.5";
167pub const QWEN_2_5_72B: &str = "Qwen/Qwen2.5-72B-Instruct";
169pub const LLAMA_3_2_3B: &str = "meta-llama/Llama-3.2-3B-Instruct";
171pub const QWEN_2_5_CODER_32B: &str = "Qwen/Qwen2.5-Coder-32B-Instruct";
173pub const QWEN_QWQ_PREVIEW_32B: &str = "Qwen/QwQ-32B-Preview";
175pub const DEEPSEEK_R1_ZERO: &str = "deepseek-ai/DeepSeek-R1-Zero";
177pub const DEEPSEEK_R1: &str = "deepseek-ai/DeepSeek-R1";
179
180#[derive(Debug, Deserialize)]
184pub struct CompletionResponse {
185 pub id: String,
186 pub object: String,
187 pub created: u64,
188 pub model: String,
189 pub choices: Vec<Choice>,
190 pub usage: Option<Usage>,
191}
192
193impl From<ApiErrorResponse> for CompletionError {
194 fn from(err: ApiErrorResponse) -> Self {
195 CompletionError::ProviderError(err.message)
196 }
197}
198
199impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
200 type Error = CompletionError;
201
202 fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
203 let choice = response.choices.first().ok_or_else(|| {
204 CompletionError::ResponseError("Response contained no choices".to_owned())
205 })?;
206
207 let content = match &choice.message {
208 Message::Assistant {
209 content,
210 tool_calls,
211 ..
212 } => {
213 let mut content = content
214 .iter()
215 .map(|c| match c {
216 AssistantContent::Text { text } => completion::AssistantContent::text(text),
217 AssistantContent::Refusal { refusal } => {
218 completion::AssistantContent::text(refusal)
219 }
220 })
221 .collect::<Vec<_>>();
222
223 content.extend(
224 tool_calls
225 .iter()
226 .map(|call| {
227 completion::AssistantContent::tool_call(
228 &call.function.name,
229 &call.function.name,
230 call.function.arguments.clone(),
231 )
232 })
233 .collect::<Vec<_>>(),
234 );
235 Ok(content)
236 }
237 _ => Err(CompletionError::ResponseError(
238 "Response did not contain a valid message or tool call".into(),
239 )),
240 }?;
241
242 let choice = OneOrMany::many(content).map_err(|_| {
243 CompletionError::ResponseError(
244 "Response contained no message or tool call (empty)".to_owned(),
245 )
246 })?;
247
248 Ok(completion::CompletionResponse {
249 choice,
250 raw_response: response,
251 })
252 }
253}
254
255#[derive(Debug, Deserialize)]
256pub struct Choice {
257 pub index: usize,
258 pub message: Message,
259 pub finish_reason: String,
260}
261
262#[derive(Clone)]
263pub struct CompletionModel {
264 client: Client,
265 pub model: String,
267}
268
269impl CompletionModel {
270 pub fn new(client: Client, model: &str) -> Self {
271 Self {
272 client,
273 model: model.to_string(),
274 }
275 }
276}
277
278impl completion::CompletionModel for CompletionModel {
279 type Response = CompletionResponse;
280
281 #[cfg_attr(feature = "worker", worker::send)]
282 async fn completion(
283 &self,
284 completion_request: CompletionRequest,
285 ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
286 let mut full_history: Vec<Message> = match &completion_request.preamble {
288 Some(preamble) => vec![Message::system(preamble)],
289 None => vec![],
290 };
291
292 let prompt: Vec<Message> = completion_request.prompt_with_context().try_into()?;
294
295 let chat_history: Vec<Message> = completion_request
297 .chat_history
298 .into_iter()
299 .map(|message| message.try_into())
300 .collect::<Result<Vec<Vec<Message>>, _>>()?
301 .into_iter()
302 .flatten()
303 .collect();
304
305 full_history.extend(chat_history);
307 full_history.extend(prompt);
308
309 let request = json!({
310 "model": self.model,
311 "messages": full_history,
312 "temperature": completion_request.temperature,
313 });
314
315 let response = self
316 .client
317 .post("/chat/completions")
318 .json(
319 &if let Some(params) = completion_request.additional_params {
320 json_utils::merge(request, params)
321 } else {
322 request
323 },
324 )
325 .send()
326 .await?;
327
328 if response.status().is_success() {
329 match response.json::<ApiResponse<CompletionResponse>>().await? {
330 ApiResponse::Ok(response) => {
331 tracing::info!(target: "rig",
332 "Hyperbolic completion token usage: {:?}",
333 response.usage.clone().map(|usage| format!("{usage}")).unwrap_or("N/A".to_string())
334 );
335
336 response.try_into()
337 }
338 ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
339 }
340 } else {
341 Err(CompletionError::ProviderError(response.text().await?))
342 }
343 }
344}