rig/providers/
galadriel.rs

1//! Galadriel API client and Rig integration
2//!
3//! # Example
4//! ```
5//! use rig::providers::galadriel;
6//!
7//! let client = galadriel::Client::new("YOUR_API_KEY", None);
8//! // to use a fine-tuned model
9//! // let client = galadriel::Client::new("YOUR_API_KEY", "FINE_TUNE_API_KEY");
10//!
11//! let gpt4o = client.completion_model(galadriel::GPT_4O);
12//! ```
13use super::openai;
14use crate::client::{
15    self, BearerAuth, Capabilities, Capable, DebugExt, Nothing, Provider, ProviderBuilder,
16    ProviderClient,
17};
18use crate::http_client::{self, HttpClientExt};
19use crate::message::MessageError;
20use crate::providers::openai::send_compatible_streaming_request;
21use crate::streaming::StreamingCompletionResponse;
22use crate::{
23    OneOrMany,
24    completion::{self, CompletionError, CompletionRequest},
25    json_utils, message,
26};
27use serde::{Deserialize, Serialize};
28use tracing::{Instrument, info_span};
29
30// ================================================================
31// Main Galadriel Client
32// ================================================================
33const GALADRIEL_API_BASE_URL: &str = "https://api.galadriel.com/v1/verified";
34
35#[derive(Debug, Default, Clone)]
36pub struct GaladrielExt {
37    fine_tune_api_key: Option<String>,
38}
39
40#[derive(Debug, Default, Clone)]
41pub struct GaladrielBuilder {
42    fine_tune_api_key: Option<String>,
43}
44
45type GaladrielApiKey = BearerAuth;
46
47impl Provider for GaladrielExt {
48    type Builder = GaladrielBuilder;
49
50    /// There is currently no way to verify a Galadriel api key without consuming tokens
51    const VERIFY_PATH: &'static str = "";
52
53    fn build<H>(
54        builder: &crate::client::ClientBuilder<
55            Self::Builder,
56            <Self::Builder as crate::client::ProviderBuilder>::ApiKey,
57            H,
58        >,
59    ) -> http_client::Result<Self> {
60        let GaladrielBuilder { fine_tune_api_key } = builder.ext().clone();
61
62        Ok(Self { fine_tune_api_key })
63    }
64}
65
66impl<H> Capabilities<H> for GaladrielExt {
67    type Completion = Capable<CompletionModel<H>>;
68    type Embeddings = Nothing;
69    type Transcription = Nothing;
70    #[cfg(feature = "image")]
71    type ImageGeneration = Nothing;
72    #[cfg(feature = "audio")]
73    type AudioGeneration = Nothing;
74}
75
76impl DebugExt for GaladrielExt {
77    fn fields(&self) -> impl Iterator<Item = (&'static str, &dyn std::fmt::Debug)> {
78        std::iter::once((
79            "fine_tune_api_key",
80            (&self.fine_tune_api_key as &dyn std::fmt::Debug),
81        ))
82    }
83}
84
85impl ProviderBuilder for GaladrielBuilder {
86    type Output = GaladrielExt;
87    type ApiKey = GaladrielApiKey;
88
89    const BASE_URL: &'static str = GALADRIEL_API_BASE_URL;
90}
91
92pub type Client<H = reqwest::Client> = client::Client<GaladrielExt, H>;
93pub type ClientBuilder<H = reqwest::Client> =
94    client::ClientBuilder<GaladrielBuilder, GaladrielApiKey, H>;
95
96impl<T> ClientBuilder<T> {
97    pub fn fine_tune_api_key<S>(mut self, fine_tune_api_key: S) -> Self
98    where
99        S: AsRef<str>,
100    {
101        *self.ext_mut() = GaladrielBuilder {
102            fine_tune_api_key: Some(fine_tune_api_key.as_ref().into()),
103        };
104
105        self
106    }
107}
108
109impl ProviderClient for Client {
110    type Input = (String, Option<String>);
111
112    /// Create a new Galadriel client from the `GALADRIEL_API_KEY` environment variable,
113    /// and optionally from the `GALADRIEL_FINE_TUNE_API_KEY` environment variable.
114    /// Panics if the `GALADRIEL_API_KEY` environment variable is not set.
115    fn from_env() -> Self {
116        let api_key = std::env::var("GALADRIEL_API_KEY").expect("GALADRIEL_API_KEY not set");
117        let fine_tune_api_key = std::env::var("GALADRIEL_FINE_TUNE_API_KEY").ok();
118
119        let mut builder = Self::builder().api_key(api_key);
120
121        if let Some(fine_tune_api_key) = fine_tune_api_key.as_deref() {
122            builder = builder.fine_tune_api_key(fine_tune_api_key);
123        }
124
125        builder.build().unwrap()
126    }
127
128    fn from_val((api_key, fine_tune_api_key): Self::Input) -> Self {
129        let mut builder = Self::builder().api_key(api_key);
130
131        if let Some(fine_tune_key) = fine_tune_api_key {
132            builder = builder.fine_tune_api_key(fine_tune_key)
133        }
134
135        builder.build().unwrap()
136    }
137}
138
139#[derive(Debug, Deserialize)]
140struct ApiErrorResponse {
141    message: String,
142}
143
144#[derive(Debug, Deserialize)]
145#[serde(untagged)]
146enum ApiResponse<T> {
147    Ok(T),
148    Err(ApiErrorResponse),
149}
150
151#[derive(Clone, Debug, Deserialize, Serialize)]
152pub struct Usage {
153    pub prompt_tokens: usize,
154    pub total_tokens: usize,
155}
156
157impl std::fmt::Display for Usage {
158    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
159        write!(
160            f,
161            "Prompt tokens: {} Total tokens: {}",
162            self.prompt_tokens, self.total_tokens
163        )
164    }
165}
166
167// ================================================================
168// Galadriel Completion API
169// ================================================================
170
171/// `o1-preview` completion model
172pub const O1_PREVIEW: &str = "o1-preview";
173/// `o1-preview-2024-09-12` completion model
174pub const O1_PREVIEW_2024_09_12: &str = "o1-preview-2024-09-12";
175/// `o1-mini completion model
176pub const O1_MINI: &str = "o1-mini";
177/// `o1-mini-2024-09-12` completion model
178pub const O1_MINI_2024_09_12: &str = "o1-mini-2024-09-12";
179/// `gpt-4o` completion model
180pub const GPT_4O: &str = "gpt-4o";
181/// `gpt-4o-2024-05-13` completion model
182pub const GPT_4O_2024_05_13: &str = "gpt-4o-2024-05-13";
183/// `gpt-4-turbo` completion model
184pub const GPT_4_TURBO: &str = "gpt-4-turbo";
185/// `gpt-4-turbo-2024-04-09` completion model
186pub const GPT_4_TURBO_2024_04_09: &str = "gpt-4-turbo-2024-04-09";
187/// `gpt-4-turbo-preview` completion model
188pub const GPT_4_TURBO_PREVIEW: &str = "gpt-4-turbo-preview";
189/// `gpt-4-0125-preview` completion model
190pub const GPT_4_0125_PREVIEW: &str = "gpt-4-0125-preview";
191/// `gpt-4-1106-preview` completion model
192pub const GPT_4_1106_PREVIEW: &str = "gpt-4-1106-preview";
193/// `gpt-4-vision-preview` completion model
194pub const GPT_4_VISION_PREVIEW: &str = "gpt-4-vision-preview";
195/// `gpt-4-1106-vision-preview` completion model
196pub const GPT_4_1106_VISION_PREVIEW: &str = "gpt-4-1106-vision-preview";
197/// `gpt-4` completion model
198pub const GPT_4: &str = "gpt-4";
199/// `gpt-4-0613` completion model
200pub const GPT_4_0613: &str = "gpt-4-0613";
201/// `gpt-4-32k` completion model
202pub const GPT_4_32K: &str = "gpt-4-32k";
203/// `gpt-4-32k-0613` completion model
204pub const GPT_4_32K_0613: &str = "gpt-4-32k-0613";
205/// `gpt-3.5-turbo` completion model
206pub const GPT_35_TURBO: &str = "gpt-3.5-turbo";
207/// `gpt-3.5-turbo-0125` completion model
208pub const GPT_35_TURBO_0125: &str = "gpt-3.5-turbo-0125";
209/// `gpt-3.5-turbo-1106` completion model
210pub const GPT_35_TURBO_1106: &str = "gpt-3.5-turbo-1106";
211/// `gpt-3.5-turbo-instruct` completion model
212pub const GPT_35_TURBO_INSTRUCT: &str = "gpt-3.5-turbo-instruct";
213
214#[derive(Debug, Deserialize, Serialize)]
215pub struct CompletionResponse {
216    pub id: String,
217    pub object: String,
218    pub created: u64,
219    pub model: String,
220    pub system_fingerprint: Option<String>,
221    pub choices: Vec<Choice>,
222    pub usage: Option<Usage>,
223}
224
225impl From<ApiErrorResponse> for CompletionError {
226    fn from(err: ApiErrorResponse) -> Self {
227        CompletionError::ProviderError(err.message)
228    }
229}
230
231impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
232    type Error = CompletionError;
233
234    fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
235        let Choice { message, .. } = response.choices.first().ok_or_else(|| {
236            CompletionError::ResponseError("Response contained no choices".to_owned())
237        })?;
238
239        let mut content = message
240            .content
241            .as_ref()
242            .map(|c| vec![completion::AssistantContent::text(c)])
243            .unwrap_or_default();
244
245        content.extend(message.tool_calls.iter().map(|call| {
246            completion::AssistantContent::tool_call(
247                &call.function.name,
248                &call.function.name,
249                call.function.arguments.clone(),
250            )
251        }));
252
253        let choice = OneOrMany::many(content).map_err(|_| {
254            CompletionError::ResponseError(
255                "Response contained no message or tool call (empty)".to_owned(),
256            )
257        })?;
258        let usage = response
259            .usage
260            .as_ref()
261            .map(|usage| completion::Usage {
262                input_tokens: usage.prompt_tokens as u64,
263                output_tokens: (usage.total_tokens - usage.prompt_tokens) as u64,
264                total_tokens: usage.total_tokens as u64,
265            })
266            .unwrap_or_default();
267
268        Ok(completion::CompletionResponse {
269            choice,
270            usage,
271            raw_response: response,
272        })
273    }
274}
275
276#[derive(Debug, Deserialize, Serialize)]
277pub struct Choice {
278    pub index: usize,
279    pub message: Message,
280    pub logprobs: Option<serde_json::Value>,
281    pub finish_reason: String,
282}
283
284#[derive(Debug, Serialize, Deserialize)]
285pub struct Message {
286    pub role: String,
287    pub content: Option<String>,
288    #[serde(default, deserialize_with = "json_utils::null_or_vec")]
289    pub tool_calls: Vec<openai::ToolCall>,
290}
291
292impl Message {
293    fn system(preamble: &str) -> Self {
294        Self {
295            role: "system".to_string(),
296            content: Some(preamble.to_string()),
297            tool_calls: Vec::new(),
298        }
299    }
300}
301
302impl TryFrom<Message> for message::Message {
303    type Error = message::MessageError;
304
305    fn try_from(message: Message) -> Result<Self, Self::Error> {
306        let tool_calls: Vec<message::ToolCall> = message
307            .tool_calls
308            .into_iter()
309            .map(|tool_call| tool_call.into())
310            .collect();
311
312        match message.role.as_str() {
313            "user" => Ok(Self::User {
314                content: OneOrMany::one(
315                    message
316                        .content
317                        .map(|content| message::UserContent::text(&content))
318                        .ok_or_else(|| {
319                            message::MessageError::ConversionError("Empty user message".to_string())
320                        })?,
321                ),
322            }),
323            "assistant" => Ok(Self::Assistant {
324                id: None,
325                content: OneOrMany::many(
326                    tool_calls
327                        .into_iter()
328                        .map(message::AssistantContent::ToolCall)
329                        .chain(
330                            message
331                                .content
332                                .map(|content| message::AssistantContent::text(&content))
333                                .into_iter(),
334                        ),
335                )
336                .map_err(|_| {
337                    message::MessageError::ConversionError("Empty assistant message".to_string())
338                })?,
339            }),
340            _ => Err(message::MessageError::ConversionError(format!(
341                "Unknown role: {}",
342                message.role
343            ))),
344        }
345    }
346}
347
348impl TryFrom<message::Message> for Message {
349    type Error = message::MessageError;
350
351    fn try_from(message: message::Message) -> Result<Self, Self::Error> {
352        match message {
353            message::Message::User { content } => Ok(Self {
354                role: "user".to_string(),
355                content: content.iter().find_map(|c| match c {
356                    message::UserContent::Text(text) => Some(text.text.clone()),
357                    _ => None,
358                }),
359                tool_calls: vec![],
360            }),
361            message::Message::Assistant { content, .. } => {
362                let mut text_content: Option<String> = None;
363                let mut tool_calls = vec![];
364
365                for c in content.iter() {
366                    match c {
367                        message::AssistantContent::Text(text) => {
368                            text_content = Some(
369                                text_content
370                                    .map(|mut existing| {
371                                        existing.push('\n');
372                                        existing.push_str(&text.text);
373                                        existing
374                                    })
375                                    .unwrap_or_else(|| text.text.clone()),
376                            );
377                        }
378                        message::AssistantContent::ToolCall(tool_call) => {
379                            tool_calls.push(tool_call.clone().into());
380                        }
381                        message::AssistantContent::Reasoning(_) => {
382                            return Err(MessageError::ConversionError(
383                                "Galadriel currently doesn't support reasoning.".into(),
384                            ));
385                        }
386                        message::AssistantContent::Image(_) => {
387                            return Err(MessageError::ConversionError(
388                                "Galadriel currently doesn't support images.".into(),
389                            ));
390                        }
391                    }
392                }
393
394                Ok(Self {
395                    role: "assistant".to_string(),
396                    content: text_content,
397                    tool_calls,
398                })
399            }
400        }
401    }
402}
403
404#[derive(Clone, Debug, Deserialize, Serialize)]
405pub struct ToolDefinition {
406    pub r#type: String,
407    pub function: completion::ToolDefinition,
408}
409
410impl From<completion::ToolDefinition> for ToolDefinition {
411    fn from(tool: completion::ToolDefinition) -> Self {
412        Self {
413            r#type: "function".into(),
414            function: tool,
415        }
416    }
417}
418
419#[derive(Debug, Deserialize)]
420pub struct Function {
421    pub name: String,
422    pub arguments: String,
423}
424
425#[derive(Debug, Serialize, Deserialize)]
426pub(super) struct GaladrielCompletionRequest {
427    model: String,
428    pub messages: Vec<Message>,
429    #[serde(flatten, skip_serializing_if = "Option::is_none")]
430    temperature: Option<f64>,
431    #[serde(skip_serializing_if = "Vec::is_empty")]
432    tools: Vec<ToolDefinition>,
433    #[serde(flatten, skip_serializing_if = "Option::is_none")]
434    tool_choice: Option<crate::providers::openai::completion::ToolChoice>,
435    #[serde(flatten, skip_serializing_if = "Option::is_none")]
436    pub additional_params: Option<serde_json::Value>,
437}
438
439impl TryFrom<(&str, CompletionRequest)> for GaladrielCompletionRequest {
440    type Error = CompletionError;
441
442    fn try_from((model, req): (&str, CompletionRequest)) -> Result<Self, Self::Error> {
443        // Build up the order of messages (context, chat_history, prompt)
444        let mut partial_history = vec![];
445        if let Some(docs) = req.normalized_documents() {
446            partial_history.push(docs);
447        }
448        partial_history.extend(req.chat_history);
449
450        // Add preamble to chat history (if available)
451        let mut full_history: Vec<Message> = match &req.preamble {
452            Some(preamble) => vec![Message::system(preamble)],
453            None => vec![],
454        };
455
456        // Convert and extend the rest of the history
457        full_history.extend(
458            partial_history
459                .into_iter()
460                .map(message::Message::try_into)
461                .collect::<Result<Vec<Message>, _>>()?,
462        );
463
464        let tool_choice = req
465            .tool_choice
466            .clone()
467            .map(crate::providers::openai::completion::ToolChoice::try_from)
468            .transpose()?;
469
470        Ok(Self {
471            model: model.to_string(),
472            messages: full_history,
473            temperature: req.temperature,
474            tools: req
475                .tools
476                .clone()
477                .into_iter()
478                .map(ToolDefinition::from)
479                .collect::<Vec<_>>(),
480            tool_choice,
481            additional_params: req.additional_params,
482        })
483    }
484}
485
486#[derive(Clone)]
487pub struct CompletionModel<T = reqwest::Client> {
488    client: Client<T>,
489    /// Name of the model (e.g.: gpt-3.5-turbo-1106)
490    pub model: String,
491}
492
493impl<T> CompletionModel<T>
494where
495    T: HttpClientExt,
496{
497    pub fn new(client: Client<T>, model: impl Into<String>) -> Self {
498        Self {
499            client,
500            model: model.into(),
501        }
502    }
503
504    pub fn with_model(client: Client<T>, model: &str) -> Self {
505        Self {
506            client,
507            model: model.into(),
508        }
509    }
510}
511
512impl<T> completion::CompletionModel for CompletionModel<T>
513where
514    T: HttpClientExt + Clone + Default + std::fmt::Debug + Send + 'static,
515{
516    type Response = CompletionResponse;
517    type StreamingResponse = openai::StreamingCompletionResponse;
518
519    type Client = Client<T>;
520
521    fn make(client: &Self::Client, model: impl Into<String>) -> Self {
522        Self::new(client.clone(), model.into())
523    }
524
525    #[cfg_attr(feature = "worker", worker::send)]
526    async fn completion(
527        &self,
528        completion_request: CompletionRequest,
529    ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
530        let preamble = completion_request.preamble.clone();
531        let request =
532            GaladrielCompletionRequest::try_from((self.model.as_ref(), completion_request))?;
533        let body = serde_json::to_vec(&request)?;
534
535        let req = self
536            .client
537            .post("/chat/completions")?
538            .body(body)
539            .map_err(http_client::Error::from)?;
540
541        let span = if tracing::Span::current().is_disabled() {
542            info_span!(
543                target: "rig::completions",
544                "chat",
545                gen_ai.operation.name = "chat",
546                gen_ai.provider.name = "galadriel",
547                gen_ai.request.model = self.model,
548                gen_ai.system_instructions = preamble,
549                gen_ai.response.id = tracing::field::Empty,
550                gen_ai.response.model = tracing::field::Empty,
551                gen_ai.usage.output_tokens = tracing::field::Empty,
552                gen_ai.usage.input_tokens = tracing::field::Empty,
553                gen_ai.input.messages = serde_json::to_string(&request.messages)?,
554                gen_ai.output.messages = tracing::field::Empty,
555            )
556        } else {
557            tracing::Span::current()
558        };
559
560        async move {
561            let response = self.client.send(req).await?;
562
563            if response.status().is_success() {
564                let t = http_client::text(response).await?;
565                tracing::debug!(target: "rig::completions", "Galadriel completion response: {t}");
566
567                match serde_json::from_str::<ApiResponse<CompletionResponse>>(&t)? {
568                    ApiResponse::Ok(response) => {
569                        let span = tracing::Span::current();
570                        span.record("gen_ai.response.id", response.id.clone());
571                        span.record("gen_ai.response.model_name", response.model.clone());
572                        span.record(
573                            "gen_ai.output.messages",
574                            serde_json::to_string(&response.choices).unwrap(),
575                        );
576                        if let Some(ref usage) = response.usage {
577                            span.record("gen_ai.usage.input_tokens", usage.prompt_tokens);
578                            span.record(
579                                "gen_ai.usage.output_tokens",
580                                usage.total_tokens - usage.prompt_tokens,
581                            );
582                        }
583                        response.try_into()
584                    }
585                    ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
586                }
587            } else {
588                let text = http_client::text(response).await?;
589
590                Err(CompletionError::ProviderError(text))
591            }
592        }
593        .instrument(span)
594        .await
595    }
596
597    #[cfg_attr(feature = "worker", worker::send)]
598    async fn stream(
599        &self,
600        completion_request: CompletionRequest,
601    ) -> Result<StreamingCompletionResponse<Self::StreamingResponse>, CompletionError> {
602        let preamble = completion_request.preamble.clone();
603        let mut request =
604            GaladrielCompletionRequest::try_from((self.model.as_ref(), completion_request))?;
605
606        let params = json_utils::merge(
607            request.additional_params.unwrap_or(serde_json::json!({})),
608            serde_json::json!({"stream": true, "stream_options": {"include_usage": true} }),
609        );
610
611        request.additional_params = Some(params);
612
613        let body = serde_json::to_vec(&request)?;
614
615        let req = self
616            .client
617            .post("/chat/completions")?
618            .body(body)
619            .map_err(http_client::Error::from)?;
620
621        let span = if tracing::Span::current().is_disabled() {
622            info_span!(
623                target: "rig::completions",
624                "chat_streaming",
625                gen_ai.operation.name = "chat_streaming",
626                gen_ai.provider.name = "galadriel",
627                gen_ai.request.model = self.model,
628                gen_ai.system_instructions = preamble,
629                gen_ai.response.id = tracing::field::Empty,
630                gen_ai.response.model = tracing::field::Empty,
631                gen_ai.usage.output_tokens = tracing::field::Empty,
632                gen_ai.usage.input_tokens = tracing::field::Empty,
633                gen_ai.input.messages = serde_json::to_string(&request.messages)?,
634                gen_ai.output.messages = tracing::field::Empty,
635            )
636        } else {
637            tracing::Span::current()
638        };
639
640        send_compatible_streaming_request(self.client.http_client().clone(), req)
641            .instrument(span)
642            .await
643    }
644}