rig/providers/
hyperbolic.rs

1//! Hyperbolic Inference API client and Rig integration
2//!
3//! # Example
4//! ```
5//! use rig::providers::hyperbolic;
6//!
7//! let client = hyperbolic::Client::new("YOUR_API_KEY");
8//!
9//! let llama_3_1_8b = client.completion_model(hyperbolic::LLAMA_3_1_8B);
10//! ```
11
12use crate::{
13    agent::AgentBuilder,
14    completion::{self, CompletionError, CompletionRequest},
15    extractor::ExtractorBuilder,
16    json_utils,
17    providers::openai::Message,
18    OneOrMany,
19};
20use schemars::JsonSchema;
21use serde::{Deserialize, Serialize};
22use serde_json::json;
23
24use super::openai::AssistantContent;
25
26// ================================================================
27// Main Hyperbolic Client
28// ================================================================
29const HYPERBOLIC_API_BASE_URL: &str = "https://api.hyperbolic.xyz/v1";
30
31#[derive(Clone)]
32pub struct Client {
33    base_url: String,
34    http_client: reqwest::Client,
35}
36
37impl Client {
38    /// Create a new Hyperbolic client with the given API key.
39    pub fn new(api_key: &str) -> Self {
40        Self::from_url(api_key, HYPERBOLIC_API_BASE_URL)
41    }
42
43    /// Create a new OpenAI client with the given API key and base API URL.
44    pub fn from_url(api_key: &str, base_url: &str) -> Self {
45        Self {
46            base_url: base_url.to_string(),
47            http_client: reqwest::Client::builder()
48                .default_headers({
49                    let mut headers = reqwest::header::HeaderMap::new();
50                    headers.insert(
51                        "Authorization",
52                        format!("Bearer {}", api_key)
53                            .parse()
54                            .expect("Bearer token should parse"),
55                    );
56                    headers
57                })
58                .build()
59                .expect("OpenAI reqwest client should build"),
60        }
61    }
62
63    /// Create a new Hyperbolic client from the `HYPERBOLIC_API_KEY` environment variable.
64    /// Panics if the environment variable is not set.
65    pub fn from_env() -> Self {
66        let api_key = std::env::var("HYPERBOLIC_API_KEY").expect("HYPERBOLIC_API_KEY not set");
67        Self::new(&api_key)
68    }
69
70    fn post(&self, path: &str) -> reqwest::RequestBuilder {
71        let url = format!("{}/{}", self.base_url, path).replace("//", "/");
72        self.http_client.post(url)
73    }
74
75    /// Create a completion model with the given name.
76    ///
77    /// # Example
78    /// ```
79    /// use rig::providers::hyperbolic::{Client, self};
80    ///
81    /// // Initialize the Hyperbolic client
82    /// let hyperbolic = Client::new("your-hyperbolic-api-key");
83    ///
84    /// let llama_3_1_8b = hyperbolic.completion_model(hyperbolic::LLAMA_3_1_8B);
85    /// ```
86    pub fn completion_model(&self, model: &str) -> CompletionModel {
87        CompletionModel::new(self.clone(), model)
88    }
89
90    /// Create an agent builder with the given completion model.
91    ///
92    /// # Example
93    /// ```
94    /// use rig::providers::hyperbolic::{Client, self};
95    ///
96    /// // Initialize the Eternal client
97    /// let hyperbolic = Client::new("your-hyperbolic-api-key");
98    ///
99    /// let agent = hyperbolic.agent(hyperbolic::LLAMA_3_1_8B)
100    ///    .preamble("You are comedian AI with a mission to make people laugh.")
101    ///    .temperature(0.0)
102    ///    .build();
103    /// ```
104    pub fn agent(&self, model: &str) -> AgentBuilder<CompletionModel> {
105        AgentBuilder::new(self.completion_model(model))
106    }
107
108    /// Create an extractor builder with the given completion model.
109    pub fn extractor<T: JsonSchema + for<'a> Deserialize<'a> + Serialize + Send + Sync>(
110        &self,
111        model: &str,
112    ) -> ExtractorBuilder<T, CompletionModel> {
113        ExtractorBuilder::new(self.completion_model(model))
114    }
115}
116
117#[derive(Debug, Deserialize)]
118struct ApiErrorResponse {
119    message: String,
120}
121
122#[derive(Debug, Deserialize)]
123#[serde(untagged)]
124enum ApiResponse<T> {
125    Ok(T),
126    Err(ApiErrorResponse),
127}
128
129#[derive(Debug, Deserialize)]
130pub struct EmbeddingData {
131    pub object: String,
132    pub embedding: Vec<f64>,
133    pub index: usize,
134}
135
136#[derive(Clone, Debug, Deserialize)]
137pub struct Usage {
138    pub prompt_tokens: usize,
139    pub total_tokens: usize,
140}
141
142impl std::fmt::Display for Usage {
143    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
144        write!(
145            f,
146            "Prompt tokens: {} Total tokens: {}",
147            self.prompt_tokens, self.total_tokens
148        )
149    }
150}
151
152// ================================================================
153// Hyperbolic Completion API
154// ================================================================
155/// Meta Llama 3.1b Instruct model with 8B parameters.
156pub const LLAMA_3_1_8B: &str = "meta-llama/Meta-Llama-3.1-8B-Instruct";
157/// Meta Llama 3.3b Instruct model with 70B parameters.
158pub const LLAMA_3_3_70B: &str = "meta-llama/Llama-3.3-70B-Instruct";
159/// Meta Llama 3.1b Instruct model with 70B parameters.
160pub const LLAMA_3_1_70B: &str = "meta-llama/Meta-Llama-3.1-70B-Instruct";
161/// Meta Llama 3 Instruct model with 70B parameters.
162pub const LLAMA_3_70B: &str = "meta-llama/Meta-Llama-3-70B-Instruct";
163/// Hermes 3 Instruct model with 70B parameters.
164pub const HERMES_3_70B: &str = "NousResearch/Hermes-3-Llama-3.1-70b";
165/// Deepseek v2.5 model.
166pub const DEEPSEEK_2_5: &str = "deepseek-ai/DeepSeek-V2.5";
167/// Qwen 2.5 model with 72B parameters.
168pub const QWEN_2_5_72B: &str = "Qwen/Qwen2.5-72B-Instruct";
169/// Meta Llama 3.2b Instruct model with 3B parameters.
170pub const LLAMA_3_2_3B: &str = "meta-llama/Llama-3.2-3B-Instruct";
171/// Qwen 2.5 Coder Instruct model with 32B parameters.
172pub const QWEN_2_5_CODER_32B: &str = "Qwen/Qwen2.5-Coder-32B-Instruct";
173/// Preview (latest) version of Qwen model with 32B parameters.
174pub const QWEN_QWQ_PREVIEW_32B: &str = "Qwen/QwQ-32B-Preview";
175/// Deepseek R1 Zero model.
176pub const DEEPSEEK_R1_ZERO: &str = "deepseek-ai/DeepSeek-R1-Zero";
177/// Deepseek R1 model.
178pub const DEEPSEEK_R1: &str = "deepseek-ai/DeepSeek-R1";
179
180/// A Hyperbolic completion object.
181///
182/// For more information, see this link: <https://docs.hyperbolic.xyz/reference/create_chat_completion_v1_chat_completions_post>
183#[derive(Debug, Deserialize)]
184pub struct CompletionResponse {
185    pub id: String,
186    pub object: String,
187    pub created: u64,
188    pub model: String,
189    pub choices: Vec<Choice>,
190    pub usage: Option<Usage>,
191}
192
193impl From<ApiErrorResponse> for CompletionError {
194    fn from(err: ApiErrorResponse) -> Self {
195        CompletionError::ProviderError(err.message)
196    }
197}
198
199impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
200    type Error = CompletionError;
201
202    fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
203        let choice = response.choices.first().ok_or_else(|| {
204            CompletionError::ResponseError("Response contained no choices".to_owned())
205        })?;
206
207        let content = match &choice.message {
208            Message::Assistant {
209                content,
210                tool_calls,
211                ..
212            } => {
213                let mut content = content
214                    .iter()
215                    .map(|c| match c {
216                        AssistantContent::Text { text } => completion::AssistantContent::text(text),
217                        AssistantContent::Refusal { refusal } => {
218                            completion::AssistantContent::text(refusal)
219                        }
220                    })
221                    .collect::<Vec<_>>();
222
223                content.extend(
224                    tool_calls
225                        .iter()
226                        .map(|call| {
227                            completion::AssistantContent::tool_call(
228                                &call.function.name,
229                                &call.function.name,
230                                call.function.arguments.clone(),
231                            )
232                        })
233                        .collect::<Vec<_>>(),
234                );
235                Ok(content)
236            }
237            _ => Err(CompletionError::ResponseError(
238                "Response did not contain a valid message or tool call".into(),
239            )),
240        }?;
241
242        let choice = OneOrMany::many(content).map_err(|_| {
243            CompletionError::ResponseError(
244                "Response contained no message or tool call (empty)".to_owned(),
245            )
246        })?;
247
248        Ok(completion::CompletionResponse {
249            choice,
250            raw_response: response,
251        })
252    }
253}
254
255#[derive(Debug, Deserialize)]
256pub struct Choice {
257    pub index: usize,
258    pub message: Message,
259    pub finish_reason: String,
260}
261
262#[derive(Clone)]
263pub struct CompletionModel {
264    client: Client,
265    /// Name of the model (e.g.: deepseek-ai/DeepSeek-R1)
266    pub model: String,
267}
268
269impl CompletionModel {
270    pub fn new(client: Client, model: &str) -> Self {
271        Self {
272            client,
273            model: model.to_string(),
274        }
275    }
276}
277
278impl completion::CompletionModel for CompletionModel {
279    type Response = CompletionResponse;
280
281    #[cfg_attr(feature = "worker", worker::send)]
282    async fn completion(
283        &self,
284        completion_request: CompletionRequest,
285    ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
286        // Add preamble to chat history (if available)
287        let mut full_history: Vec<Message> = match &completion_request.preamble {
288            Some(preamble) => vec![Message::system(preamble)],
289            None => vec![],
290        };
291
292        // Convert prompt to user message
293        let prompt: Vec<Message> = completion_request.prompt_with_context().try_into()?;
294
295        // Convert existing chat history
296        let chat_history: Vec<Message> = completion_request
297            .chat_history
298            .into_iter()
299            .map(|message| message.try_into())
300            .collect::<Result<Vec<Vec<Message>>, _>>()?
301            .into_iter()
302            .flatten()
303            .collect();
304
305        // Combine all messages into a single history
306        full_history.extend(chat_history);
307        full_history.extend(prompt);
308
309        let request = json!({
310            "model": self.model,
311            "messages": full_history,
312            "temperature": completion_request.temperature,
313        });
314
315        let response = self
316            .client
317            .post("/chat/completions")
318            .json(
319                &if let Some(params) = completion_request.additional_params {
320                    json_utils::merge(request, params)
321                } else {
322                    request
323                },
324            )
325            .send()
326            .await?;
327
328        if response.status().is_success() {
329            match response.json::<ApiResponse<CompletionResponse>>().await? {
330                ApiResponse::Ok(response) => {
331                    tracing::info!(target: "rig",
332                        "Hyperbolic completion token usage: {:?}",
333                        response.usage.clone().map(|usage| format!("{usage}")).unwrap_or("N/A".to_string())
334                    );
335
336                    response.try_into()
337                }
338                ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
339            }
340        } else {
341            Err(CompletionError::ProviderError(response.text().await?))
342        }
343    }
344}