1use serde::{Deserialize, Serialize};
2use std::{convert::Infallible, str::FromStr};
3use tracing::{Instrument, Level, enabled, info_span};
4
5use super::client::{Client, Usage};
6use crate::completion::GetTokenUsage;
7use crate::http_client::{self, HttpClientExt};
8use crate::providers::internal::buffered;
9use crate::streaming::{RawStreamingChoice, RawStreamingToolCall, StreamingCompletionResponse};
10use crate::{
11 OneOrMany,
12 completion::{self, CompletionError, CompletionRequest},
13 json_utils, message,
14 providers::mistral::client::ApiResponse,
15 telemetry::SpanCombinator,
16};
17
18pub const CODESTRAL: &str = "codestral-latest";
20pub const MISTRAL_LARGE: &str = "mistral-large-latest";
22pub const PIXTRAL_LARGE: &str = "pixtral-large-latest";
24pub const MISTRAL_SABA: &str = "mistral-saba-latest";
26pub const MINISTRAL_3B: &str = "ministral-3b-latest";
28pub const MINISTRAL_8B: &str = "ministral-8b-latest";
30
31pub const MISTRAL_SMALL: &str = "mistral-small-latest";
33pub const PIXTRAL_SMALL: &str = "pixtral-12b-2409";
35pub const MISTRAL_NEMO: &str = "open-mistral-nemo";
37pub const CODESTRAL_MAMBA: &str = "open-codestral-mamba";
39
40#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
45#[serde(tag = "type", rename_all = "lowercase")]
46pub struct AssistantContent {
47 text: String,
48}
49
50#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
51#[serde(tag = "type", rename_all = "lowercase")]
52pub enum UserContent {
53 Text { text: String },
54}
55
56#[derive(Debug, Serialize, Deserialize, Clone)]
57pub struct Choice {
58 pub index: usize,
59 pub message: Message,
60 pub logprobs: Option<serde_json::Value>,
61 pub finish_reason: String,
62}
63
64#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
65#[serde(tag = "role", rename_all = "lowercase")]
66pub enum Message {
67 User {
68 content: String,
69 },
70 Assistant {
71 content: String,
72 #[serde(
73 default,
74 deserialize_with = "json_utils::null_or_vec",
75 skip_serializing_if = "Vec::is_empty"
76 )]
77 tool_calls: Vec<ToolCall>,
78 #[serde(default)]
79 prefix: bool,
80 },
81 System {
82 content: String,
83 },
84 Tool {
85 name: String,
87 content: String,
89 tool_call_id: String,
91 },
92}
93
94impl Message {
95 pub fn user(content: String) -> Self {
96 Message::User { content }
97 }
98
99 pub fn assistant(content: String, tool_calls: Vec<ToolCall>, prefix: bool) -> Self {
100 Message::Assistant {
101 content,
102 tool_calls,
103 prefix,
104 }
105 }
106
107 pub fn system(content: String) -> Self {
108 Message::System { content }
109 }
110}
111
112impl TryFrom<message::Message> for Vec<Message> {
113 type Error = message::MessageError;
114
115 fn try_from(message: message::Message) -> Result<Self, Self::Error> {
116 match message {
117 message::Message::System { content } => Ok(vec![Message::System { content }]),
118 message::Message::User { content } => {
119 let mut tool_result_messages = Vec::new();
120 let mut other_messages = Vec::new();
121
122 for content_item in content {
123 match content_item {
124 message::UserContent::ToolResult(message::ToolResult {
125 id,
126 call_id,
127 content: tool_content,
128 }) => {
129 let call_id_key = call_id.unwrap_or_else(|| id.clone());
130 let content_text = tool_content
131 .into_iter()
132 .find_map(|content_item| match content_item {
133 message::ToolResultContent::Text(text) => Some(text.text),
134 message::ToolResultContent::Image(_) => None,
135 })
136 .unwrap_or_default();
137 tool_result_messages.push(Message::Tool {
138 name: id,
139 content: content_text,
140 tool_call_id: call_id_key,
141 });
142 }
143 message::UserContent::Text(message::Text { text }) => {
144 other_messages.push(Message::User { content: text });
145 }
146 _ => {}
147 }
148 }
149
150 tool_result_messages.append(&mut other_messages);
151 Ok(tool_result_messages)
152 }
153 message::Message::Assistant { content, .. } => {
154 let mut text_content = Vec::new();
155 let mut tool_calls = Vec::new();
156
157 for content in content {
158 match content {
159 message::AssistantContent::Text(text) => text_content.push(text),
160 message::AssistantContent::ToolCall(tool_call) => {
161 tool_calls.push(tool_call)
162 }
163 message::AssistantContent::Reasoning(_) => {
164 }
167 message::AssistantContent::Image(_) => {
168 return Err(message::MessageError::ConversionError(
169 "Mistral assistant messages do not support image content".into(),
170 ));
171 }
172 }
173 }
174
175 if text_content.is_empty() && tool_calls.is_empty() {
176 return Ok(vec![]);
177 }
178
179 Ok(vec![Message::Assistant {
180 content: text_content
181 .into_iter()
182 .next()
183 .map(|content| content.text)
184 .unwrap_or_default(),
185 tool_calls: tool_calls
186 .into_iter()
187 .map(|tool_call| tool_call.into())
188 .collect::<Vec<_>>(),
189 prefix: false,
190 }])
191 }
192 }
193 }
194}
195
196#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
197pub struct ToolCall {
198 pub id: String,
199 #[serde(default)]
200 pub r#type: ToolType,
201 pub function: Function,
202}
203
204impl From<message::ToolCall> for ToolCall {
205 fn from(tool_call: message::ToolCall) -> Self {
206 Self {
207 id: tool_call.id,
208 r#type: ToolType::default(),
209 function: Function {
210 name: tool_call.function.name,
211 arguments: tool_call.function.arguments,
212 },
213 }
214 }
215}
216
217#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
218pub struct Function {
219 pub name: String,
220 #[serde(with = "json_utils::stringified_json")]
221 pub arguments: serde_json::Value,
222}
223
224#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
225#[serde(rename_all = "lowercase")]
226pub enum ToolType {
227 #[default]
228 Function,
229}
230
231#[derive(Debug, Deserialize, Serialize, Clone)]
232pub struct ToolDefinition {
233 pub r#type: String,
234 pub function: completion::ToolDefinition,
235}
236
237impl From<completion::ToolDefinition> for ToolDefinition {
238 fn from(tool: completion::ToolDefinition) -> Self {
239 Self {
240 r#type: "function".into(),
241 function: tool,
242 }
243 }
244}
245
246#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
247pub struct ToolResultContent {
248 #[serde(default)]
249 r#type: ToolResultContentType,
250 text: String,
251}
252
253#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
254#[serde(rename_all = "lowercase")]
255pub enum ToolResultContentType {
256 #[default]
257 Text,
258}
259
260impl From<String> for ToolResultContent {
261 fn from(s: String) -> Self {
262 ToolResultContent {
263 r#type: ToolResultContentType::default(),
264 text: s,
265 }
266 }
267}
268
269impl From<String> for UserContent {
270 fn from(s: String) -> Self {
271 UserContent::Text { text: s }
272 }
273}
274
275impl FromStr for UserContent {
276 type Err = Infallible;
277
278 fn from_str(s: &str) -> Result<Self, Self::Err> {
279 Ok(UserContent::Text {
280 text: s.to_string(),
281 })
282 }
283}
284
285impl From<String> for AssistantContent {
286 fn from(s: String) -> Self {
287 AssistantContent { text: s }
288 }
289}
290
291impl FromStr for AssistantContent {
292 type Err = Infallible;
293
294 fn from_str(s: &str) -> Result<Self, Self::Err> {
295 Ok(AssistantContent {
296 text: s.to_string(),
297 })
298 }
299}
300
301#[derive(Clone)]
302pub struct CompletionModel<T = reqwest::Client> {
303 pub(crate) client: Client<T>,
304 pub model: String,
305}
306
307#[derive(Debug, Default, Serialize, Deserialize)]
308pub enum ToolChoice {
309 #[default]
310 Auto,
311 None,
312 Any,
313}
314
315impl TryFrom<message::ToolChoice> for ToolChoice {
316 type Error = CompletionError;
317
318 fn try_from(value: message::ToolChoice) -> Result<Self, Self::Error> {
319 let res = match value {
320 message::ToolChoice::Auto => Self::Auto,
321 message::ToolChoice::None => Self::None,
322 message::ToolChoice::Required => Self::Any,
323 message::ToolChoice::Specific { .. } => {
324 return Err(CompletionError::ProviderError(
325 "Mistral doesn't support requiring specific tools to be called".to_string(),
326 ));
327 }
328 };
329
330 Ok(res)
331 }
332}
333
334#[derive(Debug, Serialize, Deserialize)]
335pub(super) struct MistralCompletionRequest {
336 model: String,
337 pub messages: Vec<Message>,
338 #[serde(skip_serializing_if = "Option::is_none")]
339 temperature: Option<f64>,
340 #[serde(skip_serializing_if = "Vec::is_empty")]
341 tools: Vec<ToolDefinition>,
342 #[serde(skip_serializing_if = "Option::is_none")]
343 tool_choice: Option<crate::providers::openai::completion::ToolChoice>,
344 #[serde(flatten, skip_serializing_if = "Option::is_none")]
345 pub additional_params: Option<serde_json::Value>,
346}
347
348impl TryFrom<(&str, CompletionRequest)> for MistralCompletionRequest {
349 type Error = CompletionError;
350
351 fn try_from((model, req): (&str, CompletionRequest)) -> Result<Self, Self::Error> {
352 if req.output_schema.is_some() {
353 tracing::warn!("Structured outputs currently not supported for Mistral");
354 }
355 let model = req.model.clone().unwrap_or_else(|| model.to_string());
356 let mut full_history: Vec<Message> = match &req.preamble {
357 Some(preamble) => vec![Message::system(preamble.clone())],
358 None => vec![],
359 };
360 if let Some(docs) = req.normalized_documents() {
361 let docs: Vec<Message> = docs.try_into()?;
362 full_history.extend(docs);
363 }
364
365 let chat_history: Vec<Message> = req
366 .chat_history
367 .clone()
368 .into_iter()
369 .map(|message| message.try_into())
370 .collect::<Result<Vec<Vec<Message>>, _>>()?
371 .into_iter()
372 .flatten()
373 .collect();
374
375 full_history.extend(chat_history);
376
377 if full_history.is_empty() {
378 return Err(CompletionError::RequestError(
379 std::io::Error::new(
380 std::io::ErrorKind::InvalidInput,
381 "Mistral request has no provider-compatible messages after conversion",
382 )
383 .into(),
384 ));
385 }
386
387 let tool_choice = req
388 .tool_choice
389 .clone()
390 .map(crate::providers::openai::completion::ToolChoice::try_from)
391 .transpose()?;
392
393 Ok(Self {
394 model: model.to_string(),
395 messages: full_history,
396 temperature: req.temperature,
397 tools: req
398 .tools
399 .clone()
400 .into_iter()
401 .map(ToolDefinition::from)
402 .collect::<Vec<_>>(),
403 tool_choice,
404 additional_params: req.additional_params,
405 })
406 }
407}
408
409impl<T> CompletionModel<T> {
410 pub fn new(client: Client<T>, model: impl Into<String>) -> Self {
411 Self {
412 client,
413 model: model.into(),
414 }
415 }
416
417 pub fn with_model(client: Client<T>, model: &str) -> Self {
418 Self {
419 client,
420 model: model.into(),
421 }
422 }
423}
424
425#[derive(Debug, Deserialize, Clone, Serialize)]
426pub struct CompletionResponse {
427 pub id: String,
428 pub object: String,
429 pub created: u64,
430 pub model: String,
431 pub system_fingerprint: Option<String>,
432 pub choices: Vec<Choice>,
433 pub usage: Option<Usage>,
434}
435
436impl crate::telemetry::ProviderResponseExt for CompletionResponse {
437 type OutputMessage = Choice;
438 type Usage = Usage;
439
440 fn get_response_id(&self) -> Option<String> {
441 Some(self.id.clone())
442 }
443
444 fn get_response_model_name(&self) -> Option<String> {
445 Some(self.model.clone())
446 }
447
448 fn get_output_messages(&self) -> Vec<Self::OutputMessage> {
449 self.choices.clone()
450 }
451
452 fn get_text_response(&self) -> Option<String> {
453 let res = self
454 .choices
455 .iter()
456 .filter_map(|choice| match choice.message {
457 Message::Assistant { ref content, .. } => {
458 if content.is_empty() {
459 None
460 } else {
461 Some(content.to_string())
462 }
463 }
464 _ => None,
465 })
466 .collect::<Vec<String>>()
467 .join("\n");
468
469 if res.is_empty() { None } else { Some(res) }
470 }
471
472 fn get_usage(&self) -> Option<Self::Usage> {
473 self.usage.clone()
474 }
475}
476
477impl GetTokenUsage for CompletionResponse {
478 fn token_usage(&self) -> Option<crate::completion::Usage> {
479 let api_usage = self.usage.clone()?;
480
481 let mut usage = crate::completion::Usage::new();
482 usage.input_tokens = api_usage.prompt_tokens as u64;
483 usage.output_tokens = api_usage.completion_tokens as u64;
484 usage.total_tokens = api_usage.total_tokens as u64;
485
486 Some(usage)
487 }
488}
489
490impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
491 type Error = CompletionError;
492
493 fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
494 let choice = response.choices.first().ok_or_else(|| {
495 CompletionError::ResponseError("Response contained no choices".to_owned())
496 })?;
497 let content = match &choice.message {
498 Message::Assistant {
499 content,
500 tool_calls,
501 ..
502 } => {
503 let mut content = if content.is_empty() {
504 vec![]
505 } else {
506 vec![completion::AssistantContent::text(content.clone())]
507 };
508
509 content.extend(
510 tool_calls
511 .iter()
512 .map(|call| {
513 completion::AssistantContent::tool_call(
514 &call.id,
515 &call.function.name,
516 call.function.arguments.clone(),
517 )
518 })
519 .collect::<Vec<_>>(),
520 );
521 Ok(content)
522 }
523 _ => Err(CompletionError::ResponseError(
524 "Response did not contain a valid message or tool call".into(),
525 )),
526 }?;
527
528 let choice = OneOrMany::many(content).map_err(|_| {
529 CompletionError::ResponseError(
530 "Response contained no message or tool call (empty)".to_owned(),
531 )
532 })?;
533
534 let usage = response
535 .usage
536 .as_ref()
537 .map(|usage| completion::Usage {
538 input_tokens: usage.prompt_tokens as u64,
539 output_tokens: (usage.total_tokens - usage.prompt_tokens) as u64,
540 total_tokens: usage.total_tokens as u64,
541 cached_input_tokens: 0,
542 cache_creation_input_tokens: 0,
543 })
544 .unwrap_or_default();
545
546 Ok(completion::CompletionResponse {
547 choice,
548 usage,
549 raw_response: response,
550 message_id: None,
551 })
552 }
553}
554
555fn assistant_content_to_streaming_choices(
556 content: message::AssistantContent,
557) -> Result<Vec<RawStreamingChoice<CompletionResponse>>, CompletionError> {
558 match content {
559 message::AssistantContent::Text(t) => Ok(vec![RawStreamingChoice::Message(t.text)]),
560 message::AssistantContent::ToolCall(tc) => Ok(vec![RawStreamingChoice::ToolCall(
561 RawStreamingToolCall::new(tc.id, tc.function.name, tc.function.arguments),
562 )]),
563 message::AssistantContent::Reasoning(_) => Ok(Vec::new()),
564 message::AssistantContent::Image(_) => Err(CompletionError::ResponseError(
565 "Image content is not supported on Mistral via Rig".into(),
566 )),
567 }
568}
569
570impl<T> completion::CompletionModel for CompletionModel<T>
571where
572 T: HttpClientExt + Send + Clone + std::fmt::Debug + 'static,
573{
574 type Response = CompletionResponse;
575 type StreamingResponse = CompletionResponse;
576
577 type Client = Client<T>;
578
579 fn make(client: &Self::Client, model: impl Into<String>) -> Self {
580 Self::new(client.clone(), model.into())
581 }
582
583 async fn completion(
584 &self,
585 completion_request: CompletionRequest,
586 ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
587 let preamble = completion_request.preamble.clone();
588 let request =
589 MistralCompletionRequest::try_from((self.model.as_ref(), completion_request))?;
590
591 if enabled!(Level::TRACE) {
592 tracing::trace!(
593 target: "rig::completions",
594 "Mistral completion request: {}",
595 serde_json::to_string_pretty(&request)?
596 );
597 }
598
599 let span = if tracing::Span::current().is_disabled() {
600 info_span!(
601 target: "rig::completions",
602 "chat",
603 gen_ai.operation.name = "chat",
604 gen_ai.provider.name = "mistral",
605 gen_ai.request.model = self.model,
606 gen_ai.system_instructions = &preamble,
607 gen_ai.response.id = tracing::field::Empty,
608 gen_ai.response.model = tracing::field::Empty,
609 gen_ai.usage.output_tokens = tracing::field::Empty,
610 gen_ai.usage.input_tokens = tracing::field::Empty,
611 gen_ai.usage.cache_read.input_tokens = tracing::field::Empty,
612 )
613 } else {
614 tracing::Span::current()
615 };
616
617 let body = serde_json::to_vec(&request)?;
618
619 let request = self
620 .client
621 .post("v1/chat/completions")?
622 .body(body)
623 .map_err(|e| CompletionError::HttpError(e.into()))?;
624
625 async move {
626 let response = self.client.send(request).await?;
627
628 if response.status().is_success() {
629 let text = http_client::text(response).await?;
630 match serde_json::from_str::<ApiResponse<CompletionResponse>>(&text)? {
631 ApiResponse::Ok(response) => {
632 let span = tracing::Span::current();
633 span.record_token_usage(&response);
634 span.record_response_metadata(&response);
635 response.try_into()
636 }
637 ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
638 }
639 } else {
640 let text = http_client::text(response).await?;
641 Err(CompletionError::ProviderError(text))
642 }
643 }
644 .instrument(span)
645 .await
646 }
647
648 async fn stream(
649 &self,
650 request: CompletionRequest,
651 ) -> Result<StreamingCompletionResponse<Self::StreamingResponse>, CompletionError> {
652 let resp = self.completion(request).await?;
653 buffered::stream_from_completion_response(resp, assistant_content_to_streaming_choices)
654 }
655}
656
657#[cfg(test)]
658mod tests {
659 use super::*;
660
661 #[test]
662 fn test_response_deserialization() {
663 let json_data = r#"
665 {
666 "id": "cmpl-e5cc70bb28c444948073e77776eb30ef",
667 "object": "chat.completion",
668 "model": "mistral-small-latest",
669 "usage": {
670 "prompt_tokens": 16,
671 "completion_tokens": 34,
672 "total_tokens": 50
673 },
674 "created": 1702256327,
675 "choices": [
676 {
677 "index": 0,
678 "message": {
679 "content": "string",
680 "tool_calls": [
681 {
682 "id": "null",
683 "type": "function",
684 "function": {
685 "name": "string",
686 "arguments": "{ }"
687 },
688 "index": 0
689 }
690 ],
691 "prefix": false,
692 "role": "assistant"
693 },
694 "finish_reason": "stop"
695 }
696 ]
697 }
698 "#;
699 let completion_response = serde_json::from_str::<CompletionResponse>(json_data).unwrap();
700 assert_eq!(completion_response.model, MISTRAL_SMALL);
701
702 let CompletionResponse {
703 id,
704 object,
705 created,
706 choices,
707 usage,
708 ..
709 } = completion_response;
710
711 assert_eq!(id, "cmpl-e5cc70bb28c444948073e77776eb30ef");
712
713 let Usage {
714 completion_tokens,
715 prompt_tokens,
716 total_tokens,
717 } = usage.unwrap();
718
719 assert_eq!(prompt_tokens, 16);
720 assert_eq!(completion_tokens, 34);
721 assert_eq!(total_tokens, 50);
722 assert_eq!(object, "chat.completion".to_string());
723 assert_eq!(created, 1702256327);
724 assert_eq!(choices.len(), 1);
725 }
726
727 #[test]
728 fn test_assistant_reasoning_is_skipped_in_message_conversion() {
729 let assistant = message::Message::Assistant {
730 id: None,
731 content: OneOrMany::one(message::AssistantContent::reasoning("hidden")),
732 };
733
734 let converted: Vec<Message> = assistant.try_into().expect("conversion should work");
735 assert!(converted.is_empty());
736 }
737
738 #[test]
739 fn test_assistant_text_and_tool_call_are_preserved_when_reasoning_present() {
740 let assistant = message::Message::Assistant {
741 id: None,
742 content: OneOrMany::many(vec![
743 message::AssistantContent::reasoning("hidden"),
744 message::AssistantContent::text("visible"),
745 message::AssistantContent::tool_call(
746 "call_1",
747 "subtract",
748 serde_json::json!({"x": 2, "y": 1}),
749 ),
750 ])
751 .expect("non-empty assistant content"),
752 };
753
754 let converted: Vec<Message> = assistant.try_into().expect("conversion should work");
755 assert_eq!(converted.len(), 1);
756
757 match &converted[0] {
758 Message::Assistant {
759 content,
760 tool_calls,
761 ..
762 } => {
763 assert_eq!(content, "visible");
764 assert_eq!(tool_calls.len(), 1);
765 assert_eq!(tool_calls[0].id, "call_1");
766 assert_eq!(tool_calls[0].function.name, "subtract");
767 assert_eq!(
768 tool_calls[0].function.arguments,
769 serde_json::json!({"x": 2, "y": 1})
770 );
771 }
772 _ => panic!("expected assistant message"),
773 }
774 }
775
776 #[test]
777 fn test_streaming_choice_mapping_skips_reasoning_and_preserves_other_content() {
778 let reasoning_choices =
779 assistant_content_to_streaming_choices(message::AssistantContent::reasoning("hidden"))
780 .expect("reasoning should be ignored");
781 assert!(reasoning_choices.is_empty());
782
783 let text_choices =
784 assistant_content_to_streaming_choices(message::AssistantContent::text("visible"))
785 .expect("text should be preserved");
786 match text_choices.as_slice() {
787 [RawStreamingChoice::Message(text)] => assert_eq!(text, "visible"),
788 _ => panic!("expected text streaming choice"),
789 }
790
791 let tool_choices =
792 assistant_content_to_streaming_choices(message::AssistantContent::tool_call(
793 "call_2",
794 "add",
795 serde_json::json!({"x": 2, "y": 3}),
796 ))
797 .expect("tool call should be preserved");
798 match tool_choices.as_slice() {
799 [RawStreamingChoice::ToolCall(call)] => {
800 assert_eq!(call.id, "call_2");
801 assert_eq!(call.name, "add");
802 assert_eq!(call.arguments, serde_json::json!({"x": 2, "y": 3}));
803 }
804 _ => panic!("expected tool-call streaming choice"),
805 }
806 }
807
808 #[test]
809 fn test_request_conversion_errors_when_all_messages_are_filtered() {
810 let request = CompletionRequest {
811 preamble: None,
812 chat_history: OneOrMany::one(message::Message::Assistant {
813 id: None,
814 content: OneOrMany::one(message::AssistantContent::reasoning("hidden")),
815 }),
816 documents: vec![],
817 tools: vec![],
818 temperature: None,
819 max_tokens: None,
820 tool_choice: None,
821 additional_params: None,
822 model: None,
823 output_schema: None,
824 };
825
826 let result = MistralCompletionRequest::try_from((MISTRAL_SMALL, request));
827 assert!(matches!(result, Err(CompletionError::RequestError(_))));
828 }
829}