1use reqwest_eventsource::{Event, RequestBuilderExt};
12use std::collections::HashMap;
13
14use super::openai::{CompletionResponse, StreamingToolCall, TranscriptionResponse, Usage};
15use crate::client::{
16 ClientBuilderError, CompletionClient, TranscriptionClient, VerifyClient, VerifyError,
17};
18use crate::completion::GetTokenUsage;
19use crate::json_utils::merge;
20use async_stream::stream;
21use futures::StreamExt;
22
23use crate::{
24 OneOrMany,
25 completion::{self, CompletionError, CompletionRequest},
26 json_utils,
27 message::{self, MessageError},
28 providers::openai::ToolDefinition,
29 transcription::{self, TranscriptionError},
30};
31use reqwest::RequestBuilder;
32use reqwest::multipart::Part;
33use rig::client::ProviderClient;
34use rig::impl_conversion_traits;
35use serde::{Deserialize, Serialize};
36use serde_json::{Value, json};
37
38const GROQ_API_BASE_URL: &str = "https://api.groq.com/openai/v1";
42
43pub struct ClientBuilder<'a> {
44 api_key: &'a str,
45 base_url: &'a str,
46 http_client: Option<reqwest::Client>,
47}
48
49impl<'a> ClientBuilder<'a> {
50 pub fn new(api_key: &'a str) -> Self {
51 Self {
52 api_key,
53 base_url: GROQ_API_BASE_URL,
54 http_client: None,
55 }
56 }
57
58 pub fn base_url(mut self, base_url: &'a str) -> Self {
59 self.base_url = base_url;
60 self
61 }
62
63 pub fn custom_client(mut self, client: reqwest::Client) -> Self {
64 self.http_client = Some(client);
65 self
66 }
67
68 pub fn build(self) -> Result<Client, ClientBuilderError> {
69 let http_client = if let Some(http_client) = self.http_client {
70 http_client
71 } else {
72 reqwest::Client::builder().build()?
73 };
74
75 Ok(Client {
76 base_url: self.base_url.to_string(),
77 api_key: self.api_key.to_string(),
78 http_client,
79 })
80 }
81}
82
83#[derive(Clone)]
84pub struct Client {
85 base_url: String,
86 api_key: String,
87 http_client: reqwest::Client,
88}
89
90impl std::fmt::Debug for Client {
91 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
92 f.debug_struct("Client")
93 .field("base_url", &self.base_url)
94 .field("http_client", &self.http_client)
95 .field("api_key", &"<REDACTED>")
96 .finish()
97 }
98}
99
100impl Client {
101 pub fn builder(api_key: &str) -> ClientBuilder<'_> {
112 ClientBuilder::new(api_key)
113 }
114
115 pub fn new(api_key: &str) -> Self {
120 Self::builder(api_key)
121 .build()
122 .expect("Groq client should build")
123 }
124
125 pub(crate) fn post(&self, path: &str) -> reqwest::RequestBuilder {
126 let url = format!("{}/{}", self.base_url, path).replace("//", "/");
127 self.http_client.post(url).bearer_auth(&self.api_key)
128 }
129
130 pub(crate) fn get(&self, path: &str) -> reqwest::RequestBuilder {
131 let url = format!("{}/{}", self.base_url, path).replace("//", "/");
132 self.http_client.get(url).bearer_auth(&self.api_key)
133 }
134}
135
136impl ProviderClient for Client {
137 fn from_env() -> Self {
140 let api_key = std::env::var("GROQ_API_KEY").expect("GROQ_API_KEY not set");
141 Self::new(&api_key)
142 }
143
144 fn from_val(input: crate::client::ProviderValue) -> Self {
145 let crate::client::ProviderValue::Simple(api_key) = input else {
146 panic!("Incorrect provider value type")
147 };
148 Self::new(&api_key)
149 }
150}
151
152impl CompletionClient for Client {
153 type CompletionModel = CompletionModel;
154
155 fn completion_model(&self, model: &str) -> CompletionModel {
167 CompletionModel::new(self.clone(), model)
168 }
169}
170
171impl TranscriptionClient for Client {
172 type TranscriptionModel = TranscriptionModel;
173
174 fn transcription_model(&self, model: &str) -> TranscriptionModel {
186 TranscriptionModel::new(self.clone(), model)
187 }
188}
189
190impl VerifyClient for Client {
191 #[cfg_attr(feature = "worker", worker::send)]
192 async fn verify(&self) -> Result<(), VerifyError> {
193 let response = self.get("/models").send().await?;
194 match response.status() {
195 reqwest::StatusCode::OK => Ok(()),
196 reqwest::StatusCode::UNAUTHORIZED => Err(VerifyError::InvalidAuthentication),
197 reqwest::StatusCode::INTERNAL_SERVER_ERROR
198 | reqwest::StatusCode::SERVICE_UNAVAILABLE
199 | reqwest::StatusCode::BAD_GATEWAY => {
200 Err(VerifyError::ProviderError(response.text().await?))
201 }
202 _ => {
203 response.error_for_status()?;
204 Ok(())
205 }
206 }
207 }
208}
209
210impl_conversion_traits!(
211 AsEmbeddings,
212 AsImageGeneration,
213 AsAudioGeneration for Client
214);
215
216#[derive(Debug, Deserialize)]
217struct ApiErrorResponse {
218 message: String,
219}
220
221#[derive(Debug, Deserialize)]
222#[serde(untagged)]
223enum ApiResponse<T> {
224 Ok(T),
225 Err(ApiErrorResponse),
226}
227
228#[derive(Debug, Serialize, Deserialize)]
229pub struct Message {
230 pub role: String,
231 pub content: Option<String>,
232 #[serde(skip_serializing_if = "Option::is_none")]
233 pub reasoning: Option<String>,
234}
235
236impl TryFrom<Message> for message::Message {
237 type Error = message::MessageError;
238
239 fn try_from(message: Message) -> Result<Self, Self::Error> {
240 match message.role.as_str() {
241 "user" => Ok(Self::User {
242 content: OneOrMany::one(
243 message
244 .content
245 .map(|content| message::UserContent::text(&content))
246 .ok_or_else(|| {
247 message::MessageError::ConversionError("Empty user message".to_string())
248 })?,
249 ),
250 }),
251 "assistant" => Ok(Self::Assistant {
252 id: None,
253 content: OneOrMany::one(
254 message
255 .content
256 .map(|content| message::AssistantContent::text(&content))
257 .ok_or_else(|| {
258 message::MessageError::ConversionError(
259 "Empty assistant message".to_string(),
260 )
261 })?,
262 ),
263 }),
264 _ => Err(message::MessageError::ConversionError(format!(
265 "Unknown role: {}",
266 message.role
267 ))),
268 }
269 }
270}
271
272impl TryFrom<message::Message> for Message {
273 type Error = message::MessageError;
274
275 fn try_from(message: message::Message) -> Result<Self, Self::Error> {
276 match message {
277 message::Message::User { content } => Ok(Self {
278 role: "user".to_string(),
279 content: content.iter().find_map(|c| match c {
280 message::UserContent::Text(text) => Some(text.text.clone()),
281 _ => None,
282 }),
283 reasoning: None,
284 }),
285 message::Message::Assistant { content, .. } => {
286 let mut text_content: Option<String> = None;
287 let mut groq_reasoning: Option<String> = None;
288
289 for c in content.iter() {
290 match c {
291 message::AssistantContent::Text(text) => {
292 text_content = Some(
293 text_content
294 .map(|mut existing| {
295 existing.push('\n');
296 existing.push_str(&text.text);
297 existing
298 })
299 .unwrap_or_else(|| text.text.clone()),
300 );
301 }
302 message::AssistantContent::ToolCall(_tool_call) => {
303 return Err(MessageError::ConversionError(
304 "Tool calls do not exist on this message".into(),
305 ));
306 }
307 message::AssistantContent::Reasoning(message::Reasoning {
308 reasoning,
309 ..
310 }) => {
311 groq_reasoning =
312 Some(reasoning.first().cloned().unwrap_or(String::new()));
313 }
314 }
315 }
316
317 Ok(Self {
318 role: "assistant".to_string(),
319 content: text_content,
320 reasoning: groq_reasoning,
321 })
322 }
323 }
324 }
325}
326
327pub const DEEPSEEK_R1_DISTILL_LLAMA_70B: &str = "deepseek-r1-distill-llama-70b";
332pub const GEMMA2_9B_IT: &str = "gemma2-9b-it";
334pub const LLAMA_3_1_8B_INSTANT: &str = "llama-3.1-8b-instant";
336pub const LLAMA_3_2_11B_VISION_PREVIEW: &str = "llama-3.2-11b-vision-preview";
338pub const LLAMA_3_2_1B_PREVIEW: &str = "llama-3.2-1b-preview";
340pub const LLAMA_3_2_3B_PREVIEW: &str = "llama-3.2-3b-preview";
342pub const LLAMA_3_2_90B_VISION_PREVIEW: &str = "llama-3.2-90b-vision-preview";
344pub const LLAMA_3_2_70B_SPECDEC: &str = "llama-3.2-70b-specdec";
346pub const LLAMA_3_2_70B_VERSATILE: &str = "llama-3.2-70b-versatile";
348pub const LLAMA_GUARD_3_8B: &str = "llama-guard-3-8b";
350pub const LLAMA_3_70B_8192: &str = "llama3-70b-8192";
352pub const LLAMA_3_8B_8192: &str = "llama3-8b-8192";
354pub const MIXTRAL_8X7B_32768: &str = "mixtral-8x7b-32768";
356
357#[derive(Clone, Debug)]
358pub struct CompletionModel {
359 client: Client,
360 pub model: String,
362}
363
364impl CompletionModel {
365 pub fn new(client: Client, model: &str) -> Self {
366 Self {
367 client,
368 model: model.to_string(),
369 }
370 }
371
372 fn create_completion_request(
373 &self,
374 completion_request: CompletionRequest,
375 ) -> Result<Value, CompletionError> {
376 let mut partial_history = vec![];
378 if let Some(docs) = completion_request.normalized_documents() {
379 partial_history.push(docs);
380 }
381 partial_history.extend(completion_request.chat_history);
382
383 let mut full_history: Vec<Message> =
385 completion_request
386 .preamble
387 .map_or_else(Vec::new, |preamble| {
388 vec![Message {
389 role: "system".to_string(),
390 content: Some(preamble),
391 reasoning: None,
392 }]
393 });
394
395 full_history.extend(
397 partial_history
398 .into_iter()
399 .map(message::Message::try_into)
400 .collect::<Result<Vec<Message>, _>>()?,
401 );
402
403 let request = if completion_request.tools.is_empty() {
404 json!({
405 "model": self.model,
406 "messages": full_history,
407 "temperature": completion_request.temperature,
408 })
409 } else {
410 json!({
411 "model": self.model,
412 "messages": full_history,
413 "temperature": completion_request.temperature,
414 "tools": completion_request.tools.into_iter().map(ToolDefinition::from).collect::<Vec<_>>(),
415 "tool_choice": "auto",
416 "reasoning_format": "parsed"
417 })
418 };
419
420 let request = if let Some(params) = completion_request.additional_params {
421 json_utils::merge(request, params)
422 } else {
423 request
424 };
425
426 Ok(request)
427 }
428}
429
430impl completion::CompletionModel for CompletionModel {
431 type Response = CompletionResponse;
432 type StreamingResponse = StreamingCompletionResponse;
433
434 #[cfg_attr(feature = "worker", worker::send)]
435 async fn completion(
436 &self,
437 completion_request: CompletionRequest,
438 ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
439 let request = self.create_completion_request(completion_request)?;
440
441 let response = self
442 .client
443 .post("/chat/completions")
444 .json(&request)
445 .send()
446 .await?;
447
448 if response.status().is_success() {
449 match response.json::<ApiResponse<CompletionResponse>>().await? {
450 ApiResponse::Ok(response) => {
451 tracing::info!(target: "rig",
452 "groq completion token usage: {:?}",
453 response.usage.clone().map(|usage| format!("{usage}")).unwrap_or("N/A".to_string())
454 );
455 response.try_into()
456 }
457 ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
458 }
459 } else {
460 Err(CompletionError::ProviderError(response.text().await?))
461 }
462 }
463
464 #[cfg_attr(feature = "worker", worker::send)]
465 async fn stream(
466 &self,
467 request: CompletionRequest,
468 ) -> Result<
469 crate::streaming::StreamingCompletionResponse<Self::StreamingResponse>,
470 CompletionError,
471 > {
472 let mut request = self.create_completion_request(request)?;
473
474 request = merge(
475 request,
476 json!({"stream": true, "stream_options": {"include_usage": true}}),
477 );
478
479 let builder = self.client.post("/chat/completions").json(&request);
480
481 send_compatible_streaming_request(builder).await
482 }
483}
484
485pub const WHISPER_LARGE_V3: &str = "whisper-large-v3";
489pub const WHISPER_LARGE_V3_TURBO: &str = "whisper-large-v3-turbo";
490pub const DISTIL_WHISPER_LARGE_V3: &str = "distil-whisper-large-v3-en";
491
492#[derive(Clone)]
493pub struct TranscriptionModel {
494 client: Client,
495 pub model: String,
497}
498
499impl TranscriptionModel {
500 pub fn new(client: Client, model: &str) -> Self {
501 Self {
502 client,
503 model: model.to_string(),
504 }
505 }
506}
507impl transcription::TranscriptionModel for TranscriptionModel {
508 type Response = TranscriptionResponse;
509
510 #[cfg_attr(feature = "worker", worker::send)]
511 async fn transcription(
512 &self,
513 request: transcription::TranscriptionRequest,
514 ) -> Result<
515 transcription::TranscriptionResponse<Self::Response>,
516 transcription::TranscriptionError,
517 > {
518 let data = request.data;
519
520 let mut body = reqwest::multipart::Form::new()
521 .text("model", self.model.clone())
522 .text("language", request.language)
523 .part(
524 "file",
525 Part::bytes(data).file_name(request.filename.clone()),
526 );
527
528 if let Some(prompt) = request.prompt {
529 body = body.text("prompt", prompt.clone());
530 }
531
532 if let Some(ref temperature) = request.temperature {
533 body = body.text("temperature", temperature.to_string());
534 }
535
536 if let Some(ref additional_params) = request.additional_params {
537 for (key, value) in additional_params
538 .as_object()
539 .expect("Additional Parameters to OpenAI Transcription should be a map")
540 {
541 body = body.text(key.to_owned(), value.to_string());
542 }
543 }
544
545 let response = self
546 .client
547 .post("audio/transcriptions")
548 .multipart(body)
549 .send()
550 .await?;
551
552 if response.status().is_success() {
553 match response
554 .json::<ApiResponse<TranscriptionResponse>>()
555 .await?
556 {
557 ApiResponse::Ok(response) => response.try_into(),
558 ApiResponse::Err(api_error_response) => Err(TranscriptionError::ProviderError(
559 api_error_response.message,
560 )),
561 }
562 } else {
563 Err(TranscriptionError::ProviderError(response.text().await?))
564 }
565 }
566}
567
568#[derive(Deserialize, Debug)]
569#[serde(untagged)]
570pub enum StreamingDelta {
571 Reasoning {
572 reasoning: String,
573 },
574 MessageContent {
575 #[serde(default)]
576 content: Option<String>,
577 #[serde(default, deserialize_with = "json_utils::null_or_vec")]
578 tool_calls: Vec<StreamingToolCall>,
579 },
580}
581
582#[derive(Deserialize, Debug)]
583struct StreamingChoice {
584 delta: StreamingDelta,
585}
586
587#[derive(Deserialize, Debug)]
588struct StreamingCompletionChunk {
589 choices: Vec<StreamingChoice>,
590 usage: Option<Usage>,
591}
592
593#[derive(Clone, Deserialize, Serialize, Debug)]
594pub struct StreamingCompletionResponse {
595 pub usage: Usage,
596}
597
598impl GetTokenUsage for StreamingCompletionResponse {
599 fn token_usage(&self) -> Option<crate::completion::Usage> {
600 let mut usage = crate::completion::Usage::new();
601
602 usage.input_tokens = self.usage.prompt_tokens as u64;
603 usage.total_tokens = self.usage.total_tokens as u64;
604 usage.output_tokens = self.usage.total_tokens as u64 - self.usage.prompt_tokens as u64;
605
606 Some(usage)
607 }
608}
609
610pub async fn send_compatible_streaming_request(
611 request_builder: RequestBuilder,
612) -> Result<
613 crate::streaming::StreamingCompletionResponse<StreamingCompletionResponse>,
614 CompletionError,
615> {
616 let mut event_source = request_builder
617 .eventsource()
618 .expect("Cloning request must succeed");
619
620 let stream = Box::pin(stream! {
621 let mut final_usage = Usage {
622 prompt_tokens: 0,
623 total_tokens: 0
624 };
625
626 let mut calls: HashMap<usize, (String, String, String)> = HashMap::new();
627
628 while let Some(event_result) = event_source.next().await {
629 match event_result {
630 Ok(Event::Open) => {
631 tracing::trace!("SSE connection opened");
632 continue;
633 }
634
635 Ok(Event::Message(message)) => {
636 let data_str = message.data.trim();
637
638 let parsed = serde_json::from_str::<StreamingCompletionChunk>(data_str);
639 let Ok(data) = parsed else {
640 let err = parsed.unwrap_err();
641 tracing::debug!("Couldn't parse SSE payload as StreamingCompletionChunk: {:?}", err);
642 continue;
643 };
644
645 if let Some(choice) = data.choices.first() {
646 match &choice.delta {
647 StreamingDelta::Reasoning { reasoning } => {
648 yield Ok(crate::streaming::RawStreamingChoice::Reasoning {
649 id: None,
650 reasoning: reasoning.to_string()
651 });
652 }
653
654 StreamingDelta::MessageContent { content, tool_calls } => {
655 for tool_call in tool_calls {
657 let function = &tool_call.function;
658
659 if function.name.as_ref().map(|s| !s.is_empty()).unwrap_or(false)
661 && function.arguments.is_empty()
662 {
663 let id = tool_call.id.clone().unwrap_or_default();
664 let name = function.name.clone().unwrap();
665 calls.insert(tool_call.index, (id, name, String::new()));
666 }
667 else if function.name.as_ref().map(|s| s.is_empty()).unwrap_or(true)
669 && !function.arguments.is_empty()
670 {
671 if let Some((id, name, existing_args)) = calls.get(&tool_call.index) {
672 let combined = format!("{}{}", existing_args, function.arguments);
673 calls.insert(tool_call.index, (id.clone(), name.clone(), combined));
674 } else {
675 tracing::debug!("Partial tool call received but tool call was never started.");
676 }
677 }
678 else {
680 let id = tool_call.id.clone().unwrap_or_default();
681 let name = function.name.clone().unwrap_or_default();
682 let arguments_str = function.arguments.clone();
683
684 let Ok(arguments_json) = serde_json::from_str::<serde_json::Value>(&arguments_str) else {
685 tracing::debug!("Couldn't parse tool call args '{}'", arguments_str);
686 continue;
687 };
688
689 yield Ok(crate::streaming::RawStreamingChoice::ToolCall {
690 id,
691 name,
692 arguments: arguments_json,
693 call_id: None
694 });
695 }
696 }
697
698 if let Some(content) = content {
700 yield Ok(crate::streaming::RawStreamingChoice::Message(content.clone()));
701 }
702 }
703 }
704 }
705
706 if let Some(usage) = data.usage {
707 final_usage = usage.clone();
708 }
709 }
710
711 Err(reqwest_eventsource::Error::StreamEnded) => break,
712
713 Err(err) => {
714 tracing::error!(?err, "SSE error");
715 yield Err(CompletionError::ResponseError(err.to_string()));
716 break;
717 }
718 }
719 }
720
721 for (_, (id, name, arguments)) in calls {
723 let Ok(arguments_json) = serde_json::from_str::<serde_json::Value>(&arguments) else {
724 continue;
725 };
726 yield Ok(crate::streaming::RawStreamingChoice::ToolCall {
727 id,
728 name,
729 arguments: arguments_json,
730 call_id: None,
731 });
732 }
733
734 yield Ok(crate::streaming::RawStreamingChoice::FinalResponse(
736 StreamingCompletionResponse { usage: final_usage.clone() }
737 ));
738 });
739
740 Ok(crate::streaming::StreamingCompletionResponse::stream(
741 stream,
742 ))
743}