1use serde::{Deserialize, Serialize};
2use std::{convert::Infallible, str::FromStr};
3use tracing::{Instrument, Level, enabled, info_span};
4
5use super::client::{Client, Usage};
6use crate::completion::GetTokenUsage;
7use crate::http_client::{self, HttpClientExt};
8use crate::providers::internal::buffered;
9use crate::streaming::{RawStreamingChoice, RawStreamingToolCall, StreamingCompletionResponse};
10use crate::{
11 OneOrMany,
12 completion::{self, CompletionError, CompletionRequest},
13 json_utils, message,
14 providers::mistral::client::ApiResponse,
15 telemetry::SpanCombinator,
16};
17
18pub const CODESTRAL: &str = "codestral-latest";
20pub const MISTRAL_LARGE: &str = "mistral-large-latest";
22pub const PIXTRAL_LARGE: &str = "pixtral-large-latest";
24pub const MISTRAL_SABA: &str = "mistral-saba-latest";
26pub const MINISTRAL_3B: &str = "ministral-3b-latest";
28pub const MINISTRAL_8B: &str = "ministral-8b-latest";
30
31pub const MISTRAL_SMALL: &str = "mistral-small-latest";
33pub const PIXTRAL_SMALL: &str = "pixtral-12b-2409";
35pub const MISTRAL_NEMO: &str = "open-mistral-nemo";
37pub const CODESTRAL_MAMBA: &str = "open-codestral-mamba";
39
40#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
45#[serde(tag = "type", rename_all = "lowercase")]
46pub struct AssistantContent {
47 text: String,
48}
49
50#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
51#[serde(tag = "type", rename_all = "lowercase")]
52pub enum UserContent {
53 Text { text: String },
54}
55
56#[derive(Debug, Serialize, Deserialize, Clone)]
57pub struct Choice {
58 pub index: usize,
59 pub message: Message,
60 pub logprobs: Option<serde_json::Value>,
61 pub finish_reason: String,
62}
63
64#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
65#[serde(tag = "role", rename_all = "lowercase")]
66pub enum Message {
67 User {
68 content: String,
69 },
70 Assistant {
71 content: String,
72 #[serde(
73 default,
74 deserialize_with = "json_utils::null_or_vec",
75 skip_serializing_if = "Vec::is_empty"
76 )]
77 tool_calls: Vec<ToolCall>,
78 #[serde(default)]
79 prefix: bool,
80 },
81 System {
82 content: String,
83 },
84 Tool {
85 name: String,
87 content: String,
89 tool_call_id: String,
91 },
92}
93
94impl Message {
95 pub fn user(content: String) -> Self {
96 Message::User { content }
97 }
98
99 pub fn assistant(content: String, tool_calls: Vec<ToolCall>, prefix: bool) -> Self {
100 Message::Assistant {
101 content,
102 tool_calls,
103 prefix,
104 }
105 }
106
107 pub fn system(content: String) -> Self {
108 Message::System { content }
109 }
110}
111
112impl TryFrom<message::Message> for Vec<Message> {
113 type Error = message::MessageError;
114
115 fn try_from(message: message::Message) -> Result<Self, Self::Error> {
116 match message {
117 message::Message::System { content } => Ok(vec![Message::System { content }]),
118 message::Message::User { content } => {
119 let mut tool_result_messages = Vec::new();
120 let mut other_messages = Vec::new();
121
122 for content_item in content {
123 match content_item {
124 message::UserContent::ToolResult(message::ToolResult {
125 id,
126 call_id,
127 content: tool_content,
128 }) => {
129 let call_id_key = call_id.unwrap_or_else(|| id.clone());
130 let content_text = tool_content
131 .into_iter()
132 .find_map(|content_item| match content_item {
133 message::ToolResultContent::Text(text) => Some(text.text),
134 message::ToolResultContent::Image(_) => None,
135 })
136 .unwrap_or_default();
137 tool_result_messages.push(Message::Tool {
138 name: id,
139 content: content_text,
140 tool_call_id: call_id_key,
141 });
142 }
143 message::UserContent::Text(message::Text { text }) => {
144 other_messages.push(Message::User { content: text });
145 }
146 _ => {}
147 }
148 }
149
150 tool_result_messages.append(&mut other_messages);
151 Ok(tool_result_messages)
152 }
153 message::Message::Assistant { content, .. } => {
154 let mut text_content = Vec::new();
155 let mut tool_calls = Vec::new();
156
157 for content in content {
158 match content {
159 message::AssistantContent::Text(text) => text_content.push(text),
160 message::AssistantContent::ToolCall(tool_call) => {
161 tool_calls.push(tool_call)
162 }
163 message::AssistantContent::Reasoning(_) => {
164 }
167 message::AssistantContent::Image(_) => {
168 return Err(message::MessageError::ConversionError(
169 "Mistral assistant messages do not support image content".into(),
170 ));
171 }
172 }
173 }
174
175 if text_content.is_empty() && tool_calls.is_empty() {
176 return Ok(vec![]);
177 }
178
179 Ok(vec![Message::Assistant {
180 content: text_content
181 .into_iter()
182 .next()
183 .map(|content| content.text)
184 .unwrap_or_default(),
185 tool_calls: tool_calls
186 .into_iter()
187 .map(|tool_call| tool_call.into())
188 .collect::<Vec<_>>(),
189 prefix: false,
190 }])
191 }
192 }
193 }
194}
195
196#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
197pub struct ToolCall {
198 pub id: String,
199 #[serde(default)]
200 pub r#type: ToolType,
201 pub function: Function,
202}
203
204impl From<message::ToolCall> for ToolCall {
205 fn from(tool_call: message::ToolCall) -> Self {
206 Self {
207 id: tool_call.id,
208 r#type: ToolType::default(),
209 function: Function {
210 name: tool_call.function.name,
211 arguments: tool_call.function.arguments,
212 },
213 }
214 }
215}
216
217#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
218pub struct Function {
219 pub name: String,
220 #[serde(with = "json_utils::stringified_json")]
221 pub arguments: serde_json::Value,
222}
223
224#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
225#[serde(rename_all = "lowercase")]
226pub enum ToolType {
227 #[default]
228 Function,
229}
230
231#[derive(Debug, Deserialize, Serialize, Clone)]
232pub struct ToolDefinition {
233 pub r#type: String,
234 pub function: completion::ToolDefinition,
235}
236
237impl From<completion::ToolDefinition> for ToolDefinition {
238 fn from(tool: completion::ToolDefinition) -> Self {
239 Self {
240 r#type: "function".into(),
241 function: tool,
242 }
243 }
244}
245
246#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
247pub struct ToolResultContent {
248 #[serde(default)]
249 r#type: ToolResultContentType,
250 text: String,
251}
252
253#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
254#[serde(rename_all = "lowercase")]
255pub enum ToolResultContentType {
256 #[default]
257 Text,
258}
259
260impl From<String> for ToolResultContent {
261 fn from(s: String) -> Self {
262 ToolResultContent {
263 r#type: ToolResultContentType::default(),
264 text: s,
265 }
266 }
267}
268
269impl From<String> for UserContent {
270 fn from(s: String) -> Self {
271 UserContent::Text { text: s }
272 }
273}
274
275impl FromStr for UserContent {
276 type Err = Infallible;
277
278 fn from_str(s: &str) -> Result<Self, Self::Err> {
279 Ok(UserContent::Text {
280 text: s.to_string(),
281 })
282 }
283}
284
285impl From<String> for AssistantContent {
286 fn from(s: String) -> Self {
287 AssistantContent { text: s }
288 }
289}
290
291impl FromStr for AssistantContent {
292 type Err = Infallible;
293
294 fn from_str(s: &str) -> Result<Self, Self::Err> {
295 Ok(AssistantContent {
296 text: s.to_string(),
297 })
298 }
299}
300
301#[derive(Clone)]
302pub struct CompletionModel<T = reqwest::Client> {
303 pub(crate) client: Client<T>,
304 pub model: String,
305}
306
307#[derive(Debug, Default, Serialize, Deserialize)]
308pub enum ToolChoice {
309 #[default]
310 Auto,
311 None,
312 Any,
313}
314
315impl TryFrom<message::ToolChoice> for ToolChoice {
316 type Error = CompletionError;
317
318 fn try_from(value: message::ToolChoice) -> Result<Self, Self::Error> {
319 let res = match value {
320 message::ToolChoice::Auto => Self::Auto,
321 message::ToolChoice::None => Self::None,
322 message::ToolChoice::Required => Self::Any,
323 message::ToolChoice::Specific { .. } => {
324 return Err(CompletionError::ProviderError(
325 "Mistral doesn't support requiring specific tools to be called".to_string(),
326 ));
327 }
328 };
329
330 Ok(res)
331 }
332}
333
334#[derive(Debug, Serialize, Deserialize)]
335pub(super) struct MistralCompletionRequest {
336 model: String,
337 pub messages: Vec<Message>,
338 #[serde(skip_serializing_if = "Option::is_none")]
339 temperature: Option<f64>,
340 #[serde(skip_serializing_if = "Vec::is_empty")]
341 tools: Vec<ToolDefinition>,
342 #[serde(skip_serializing_if = "Option::is_none")]
343 tool_choice: Option<crate::providers::openai::completion::ToolChoice>,
344 #[serde(flatten, skip_serializing_if = "Option::is_none")]
345 pub additional_params: Option<serde_json::Value>,
346}
347
348impl TryFrom<(&str, CompletionRequest)> for MistralCompletionRequest {
349 type Error = CompletionError;
350
351 fn try_from((model, req): (&str, CompletionRequest)) -> Result<Self, Self::Error> {
352 if req.output_schema.is_some() {
353 tracing::warn!("Structured outputs currently not supported for Mistral");
354 }
355 let model = req.model.clone().unwrap_or_else(|| model.to_string());
356 let mut full_history: Vec<Message> = match &req.preamble {
357 Some(preamble) => vec![Message::system(preamble.clone())],
358 None => vec![],
359 };
360 if let Some(docs) = req.normalized_documents() {
361 let docs: Vec<Message> = docs.try_into()?;
362 full_history.extend(docs);
363 }
364
365 let chat_history: Vec<Message> = req
366 .chat_history
367 .clone()
368 .into_iter()
369 .map(|message| message.try_into())
370 .collect::<Result<Vec<Vec<Message>>, _>>()?
371 .into_iter()
372 .flatten()
373 .collect();
374
375 full_history.extend(chat_history);
376
377 if full_history.is_empty() {
378 return Err(CompletionError::RequestError(
379 std::io::Error::new(
380 std::io::ErrorKind::InvalidInput,
381 "Mistral request has no provider-compatible messages after conversion",
382 )
383 .into(),
384 ));
385 }
386
387 let tool_choice = req
388 .tool_choice
389 .clone()
390 .map(crate::providers::openai::completion::ToolChoice::try_from)
391 .transpose()?;
392
393 Ok(Self {
394 model: model.to_string(),
395 messages: full_history,
396 temperature: req.temperature,
397 tools: req
398 .tools
399 .clone()
400 .into_iter()
401 .map(ToolDefinition::from)
402 .collect::<Vec<_>>(),
403 tool_choice,
404 additional_params: req.additional_params,
405 })
406 }
407}
408
409impl<T> CompletionModel<T> {
410 pub fn new(client: Client<T>, model: impl Into<String>) -> Self {
411 Self {
412 client,
413 model: model.into(),
414 }
415 }
416
417 pub fn with_model(client: Client<T>, model: &str) -> Self {
418 Self {
419 client,
420 model: model.into(),
421 }
422 }
423}
424
425#[derive(Debug, Deserialize, Clone, Serialize)]
426pub struct CompletionResponse {
427 pub id: String,
428 pub object: String,
429 pub created: u64,
430 pub model: String,
431 pub system_fingerprint: Option<String>,
432 pub choices: Vec<Choice>,
433 pub usage: Option<Usage>,
434}
435
436impl crate::telemetry::ProviderResponseExt for CompletionResponse {
437 type OutputMessage = Choice;
438 type Usage = Usage;
439
440 fn get_response_id(&self) -> Option<String> {
441 Some(self.id.clone())
442 }
443
444 fn get_response_model_name(&self) -> Option<String> {
445 Some(self.model.clone())
446 }
447
448 fn get_output_messages(&self) -> Vec<Self::OutputMessage> {
449 self.choices.clone()
450 }
451
452 fn get_text_response(&self) -> Option<String> {
453 let res = self
454 .choices
455 .iter()
456 .filter_map(|choice| match choice.message {
457 Message::Assistant { ref content, .. } => {
458 if content.is_empty() {
459 None
460 } else {
461 Some(content.to_string())
462 }
463 }
464 _ => None,
465 })
466 .collect::<Vec<String>>()
467 .join("\n");
468
469 if res.is_empty() { None } else { Some(res) }
470 }
471
472 fn get_usage(&self) -> Option<Self::Usage> {
473 self.usage.clone()
474 }
475}
476
477impl GetTokenUsage for CompletionResponse {
478 fn token_usage(&self) -> Option<crate::completion::Usage> {
479 let api_usage = self.usage.as_ref()?;
480
481 let mut usage = crate::completion::Usage::new();
482 usage.input_tokens = api_usage.prompt_tokens as u64;
483 usage.output_tokens = api_usage.completion_tokens as u64;
484 usage.total_tokens = api_usage.total_tokens as u64;
485 usage.cached_input_tokens = api_usage.cached_tokens();
486
487 Some(usage)
488 }
489}
490
491impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
492 type Error = CompletionError;
493
494 fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
495 let choice = response.choices.first().ok_or_else(|| {
496 CompletionError::ResponseError("Response contained no choices".to_owned())
497 })?;
498 let content = match &choice.message {
499 Message::Assistant {
500 content,
501 tool_calls,
502 ..
503 } => {
504 let mut content = if content.is_empty() {
505 vec![]
506 } else {
507 vec![completion::AssistantContent::text(content.clone())]
508 };
509
510 content.extend(
511 tool_calls
512 .iter()
513 .map(|call| {
514 completion::AssistantContent::tool_call(
515 &call.id,
516 &call.function.name,
517 call.function.arguments.clone(),
518 )
519 })
520 .collect::<Vec<_>>(),
521 );
522 Ok(content)
523 }
524 _ => Err(CompletionError::ResponseError(
525 "Response did not contain a valid message or tool call".into(),
526 )),
527 }?;
528
529 let choice = OneOrMany::many(content).map_err(|_| {
530 CompletionError::ResponseError(
531 "Response contained no message or tool call (empty)".to_owned(),
532 )
533 })?;
534
535 let usage = response
536 .usage
537 .as_ref()
538 .map(|usage| completion::Usage {
539 input_tokens: usage.prompt_tokens as u64,
540 output_tokens: (usage.total_tokens - usage.prompt_tokens) as u64,
541 total_tokens: usage.total_tokens as u64,
542 cached_input_tokens: usage.cached_tokens(),
543 cache_creation_input_tokens: 0,
544 reasoning_tokens: 0,
545 })
546 .unwrap_or_default();
547
548 Ok(completion::CompletionResponse {
549 choice,
550 usage,
551 raw_response: response,
552 message_id: None,
553 })
554 }
555}
556
557fn assistant_content_to_streaming_choices(
558 content: message::AssistantContent,
559) -> Result<Vec<RawStreamingChoice<CompletionResponse>>, CompletionError> {
560 match content {
561 message::AssistantContent::Text(t) => Ok(vec![RawStreamingChoice::Message(t.text)]),
562 message::AssistantContent::ToolCall(tc) => Ok(vec![RawStreamingChoice::ToolCall(
563 RawStreamingToolCall::new(tc.id, tc.function.name, tc.function.arguments),
564 )]),
565 message::AssistantContent::Reasoning(_) => Ok(Vec::new()),
566 message::AssistantContent::Image(_) => Err(CompletionError::ResponseError(
567 "Image content is not supported on Mistral via Rig".into(),
568 )),
569 }
570}
571
572impl<T> completion::CompletionModel for CompletionModel<T>
573where
574 T: HttpClientExt + Send + Clone + std::fmt::Debug + 'static,
575{
576 type Response = CompletionResponse;
577 type StreamingResponse = CompletionResponse;
578
579 type Client = Client<T>;
580
581 fn make(client: &Self::Client, model: impl Into<String>) -> Self {
582 Self::new(client.clone(), model.into())
583 }
584
585 async fn completion(
586 &self,
587 completion_request: CompletionRequest,
588 ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
589 let preamble = completion_request.preamble.clone();
590 let request =
591 MistralCompletionRequest::try_from((self.model.as_ref(), completion_request))?;
592
593 if enabled!(Level::TRACE) {
594 tracing::trace!(
595 target: "rig::completions",
596 "Mistral completion request: {}",
597 serde_json::to_string_pretty(&request)?
598 );
599 }
600
601 let span = if tracing::Span::current().is_disabled() {
602 info_span!(
603 target: "rig::completions",
604 "chat",
605 gen_ai.operation.name = "chat",
606 gen_ai.provider.name = "mistral",
607 gen_ai.request.model = self.model,
608 gen_ai.system_instructions = &preamble,
609 gen_ai.response.id = tracing::field::Empty,
610 gen_ai.response.model = tracing::field::Empty,
611 gen_ai.usage.output_tokens = tracing::field::Empty,
612 gen_ai.usage.input_tokens = tracing::field::Empty,
613 gen_ai.usage.cache_read.input_tokens = tracing::field::Empty,
614 )
615 } else {
616 tracing::Span::current()
617 };
618
619 let body = serde_json::to_vec(&request)?;
620
621 let request = self
622 .client
623 .post("v1/chat/completions")?
624 .body(body)
625 .map_err(|e| CompletionError::HttpError(e.into()))?;
626
627 async move {
628 let response = self.client.send(request).await?;
629
630 if response.status().is_success() {
631 let text = http_client::text(response).await?;
632 match serde_json::from_str::<ApiResponse<CompletionResponse>>(&text)? {
633 ApiResponse::Ok(response) => {
634 let span = tracing::Span::current();
635 span.record_token_usage(&response);
636 span.record_response_metadata(&response);
637 response.try_into()
638 }
639 ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
640 }
641 } else {
642 let text = http_client::text(response).await?;
643 Err(CompletionError::ProviderError(text))
644 }
645 }
646 .instrument(span)
647 .await
648 }
649
650 async fn stream(
651 &self,
652 request: CompletionRequest,
653 ) -> Result<StreamingCompletionResponse<Self::StreamingResponse>, CompletionError> {
654 let resp = self.completion(request).await?;
655 buffered::stream_from_completion_response(resp, assistant_content_to_streaming_choices)
656 }
657}
658
659#[cfg(test)]
660mod tests {
661 use super::*;
662
663 #[test]
664 fn test_response_deserialization() {
665 let json_data = r#"
667 {
668 "id": "cmpl-e5cc70bb28c444948073e77776eb30ef",
669 "object": "chat.completion",
670 "model": "mistral-small-latest",
671 "usage": {
672 "prompt_tokens": 16,
673 "completion_tokens": 34,
674 "total_tokens": 50
675 },
676 "created": 1702256327,
677 "choices": [
678 {
679 "index": 0,
680 "message": {
681 "content": "string",
682 "tool_calls": [
683 {
684 "id": "null",
685 "type": "function",
686 "function": {
687 "name": "string",
688 "arguments": "{ }"
689 },
690 "index": 0
691 }
692 ],
693 "prefix": false,
694 "role": "assistant"
695 },
696 "finish_reason": "stop"
697 }
698 ]
699 }
700 "#;
701 let completion_response = serde_json::from_str::<CompletionResponse>(json_data).unwrap();
702 assert_eq!(completion_response.model, MISTRAL_SMALL);
703
704 let CompletionResponse {
705 id,
706 object,
707 created,
708 choices,
709 usage,
710 ..
711 } = completion_response;
712
713 assert_eq!(id, "cmpl-e5cc70bb28c444948073e77776eb30ef");
714
715 let usage = usage.unwrap();
716 assert_eq!(usage.prompt_tokens, 16);
717 assert_eq!(usage.completion_tokens, 34);
718 assert_eq!(usage.total_tokens, 50);
719 assert_eq!(usage.cached_tokens(), 0);
720 assert!(usage.prompt_tokens_details.is_none());
721 assert!(usage.num_cached_tokens.is_none());
722 assert_eq!(object, "chat.completion".to_string());
723 assert_eq!(created, 1702256327);
724 assert_eq!(choices.len(), 1);
725 }
726
727 #[test]
728 fn test_usage_deserializes_prompt_tokens_details_cached_tokens() {
729 let json = r#"{
730 "prompt_tokens": 100,
731 "completion_tokens": 20,
732 "total_tokens": 120,
733 "prompt_tokens_details": { "cached_tokens": 42 }
734 }"#;
735 let usage: Usage = serde_json::from_str(json).unwrap();
736 assert_eq!(usage.prompt_tokens, 100);
737 assert_eq!(
738 usage.prompt_tokens_details.as_ref().unwrap().cached_tokens,
739 42
740 );
741 assert_eq!(usage.cached_tokens(), 42);
742 }
743
744 #[test]
745 fn test_usage_accepts_singular_prompt_token_details_alias() {
746 let json = r#"{
747 "prompt_tokens": 100,
748 "completion_tokens": 20,
749 "total_tokens": 120,
750 "prompt_token_details": { "cached_tokens": 7 }
751 }"#;
752 let usage: Usage = serde_json::from_str(json).unwrap();
753 assert_eq!(
754 usage.prompt_tokens_details.as_ref().unwrap().cached_tokens,
755 7
756 );
757 assert_eq!(usage.cached_tokens(), 7);
758 }
759
760 #[test]
761 fn test_usage_falls_back_to_num_cached_tokens() {
762 let json = r#"{
763 "prompt_tokens": 100,
764 "completion_tokens": 20,
765 "total_tokens": 120,
766 "num_cached_tokens": 13
767 }"#;
768 let usage: Usage = serde_json::from_str(json).unwrap();
769 assert_eq!(usage.num_cached_tokens, Some(13));
770 assert!(usage.prompt_tokens_details.is_none());
771 assert_eq!(usage.cached_tokens(), 13);
772 }
773
774 #[test]
775 fn test_usage_prefers_prompt_tokens_details_over_num_cached_tokens() {
776 let json = r#"{
777 "prompt_tokens": 100,
778 "completion_tokens": 20,
779 "total_tokens": 120,
780 "num_cached_tokens": 1,
781 "prompt_tokens_details": { "cached_tokens": 99 }
782 }"#;
783 let usage: Usage = serde_json::from_str(json).unwrap();
784 assert_eq!(usage.cached_tokens(), 99);
785 }
786
787 #[test]
788 fn test_token_usage_threads_cached_tokens_into_completion_usage() {
789 let json = r#"{
790 "id": "cmpl-x",
791 "object": "chat.completion",
792 "model": "mistral-small-latest",
793 "created": 1700000000,
794 "choices": [{
795 "index": 0,
796 "message": { "content": "hi", "role": "assistant", "prefix": false },
797 "finish_reason": "stop"
798 }],
799 "usage": {
800 "prompt_tokens": 100,
801 "completion_tokens": 20,
802 "total_tokens": 120,
803 "prompt_tokens_details": { "cached_tokens": 42 }
804 }
805 }"#;
806 let response: CompletionResponse = serde_json::from_str(json).unwrap();
807 let usage = response.token_usage().unwrap();
808 assert_eq!(usage.input_tokens, 100);
809 assert_eq!(usage.output_tokens, 20);
810 assert_eq!(usage.total_tokens, 120);
811 assert_eq!(usage.cached_input_tokens, 42);
812 }
813
814 #[test]
815 fn test_assistant_reasoning_is_skipped_in_message_conversion() {
816 let assistant = message::Message::Assistant {
817 id: None,
818 content: OneOrMany::one(message::AssistantContent::reasoning("hidden")),
819 };
820
821 let converted: Vec<Message> = assistant.try_into().expect("conversion should work");
822 assert!(converted.is_empty());
823 }
824
825 #[test]
826 fn test_assistant_text_and_tool_call_are_preserved_when_reasoning_present() {
827 let assistant = message::Message::Assistant {
828 id: None,
829 content: OneOrMany::many(vec![
830 message::AssistantContent::reasoning("hidden"),
831 message::AssistantContent::text("visible"),
832 message::AssistantContent::tool_call(
833 "call_1",
834 "subtract",
835 serde_json::json!({"x": 2, "y": 1}),
836 ),
837 ])
838 .expect("non-empty assistant content"),
839 };
840
841 let converted: Vec<Message> = assistant.try_into().expect("conversion should work");
842 assert_eq!(converted.len(), 1);
843
844 match &converted[0] {
845 Message::Assistant {
846 content,
847 tool_calls,
848 ..
849 } => {
850 assert_eq!(content, "visible");
851 assert_eq!(tool_calls.len(), 1);
852 assert_eq!(tool_calls[0].id, "call_1");
853 assert_eq!(tool_calls[0].function.name, "subtract");
854 assert_eq!(
855 tool_calls[0].function.arguments,
856 serde_json::json!({"x": 2, "y": 1})
857 );
858 }
859 _ => panic!("expected assistant message"),
860 }
861 }
862
863 #[test]
864 fn test_streaming_choice_mapping_skips_reasoning_and_preserves_other_content() {
865 let reasoning_choices =
866 assistant_content_to_streaming_choices(message::AssistantContent::reasoning("hidden"))
867 .expect("reasoning should be ignored");
868 assert!(reasoning_choices.is_empty());
869
870 let text_choices =
871 assistant_content_to_streaming_choices(message::AssistantContent::text("visible"))
872 .expect("text should be preserved");
873 match text_choices.as_slice() {
874 [RawStreamingChoice::Message(text)] => assert_eq!(text, "visible"),
875 _ => panic!("expected text streaming choice"),
876 }
877
878 let tool_choices =
879 assistant_content_to_streaming_choices(message::AssistantContent::tool_call(
880 "call_2",
881 "add",
882 serde_json::json!({"x": 2, "y": 3}),
883 ))
884 .expect("tool call should be preserved");
885 match tool_choices.as_slice() {
886 [RawStreamingChoice::ToolCall(call)] => {
887 assert_eq!(call.id, "call_2");
888 assert_eq!(call.name, "add");
889 assert_eq!(call.arguments, serde_json::json!({"x": 2, "y": 3}));
890 }
891 _ => panic!("expected tool-call streaming choice"),
892 }
893 }
894
895 #[test]
896 fn test_request_conversion_errors_when_all_messages_are_filtered() {
897 let request = CompletionRequest {
898 preamble: None,
899 chat_history: OneOrMany::one(message::Message::Assistant {
900 id: None,
901 content: OneOrMany::one(message::AssistantContent::reasoning("hidden")),
902 }),
903 documents: vec![],
904 tools: vec![],
905 temperature: None,
906 max_tokens: None,
907 tool_choice: None,
908 additional_params: None,
909 model: None,
910 output_schema: None,
911 };
912
913 let result = MistralCompletionRequest::try_from((MISTRAL_SMALL, request));
914 assert!(matches!(result, Err(CompletionError::RequestError(_))));
915 }
916}