Skip to main content

rig/providers/mistral/
completion.rs

1use async_stream::stream;
2use serde::{Deserialize, Serialize};
3use std::{convert::Infallible, str::FromStr};
4use tracing::{Instrument, Level, enabled, info_span};
5
6use super::client::{Client, Usage};
7use crate::completion::GetTokenUsage;
8use crate::http_client::{self, HttpClientExt};
9use crate::streaming::{RawStreamingChoice, RawStreamingToolCall, StreamingCompletionResponse};
10use crate::{
11    OneOrMany,
12    completion::{self, CompletionError, CompletionRequest},
13    json_utils, message,
14    providers::mistral::client::ApiResponse,
15    telemetry::SpanCombinator,
16};
17
18/// The latest version of the `codestral` Mistral model
19pub const CODESTRAL: &str = "codestral-latest";
20/// The latest version of the `mistral-large` Mistral model
21pub const MISTRAL_LARGE: &str = "mistral-large-latest";
22/// The latest version of the `pixtral-large` Mistral multimodal model
23pub const PIXTRAL_LARGE: &str = "pixtral-large-latest";
24/// The latest version of the `mistral` Mistral multimodal model, trained on datasets from the Middle East & South Asia
25pub const MISTRAL_SABA: &str = "mistral-saba-latest";
26/// The latest version of the `mistral-3b` Mistral completions model
27pub const MINISTRAL_3B: &str = "ministral-3b-latest";
28/// The latest version of the `mistral-8b` Mistral completions model
29pub const MINISTRAL_8B: &str = "ministral-8b-latest";
30
31/// The latest version of the `mistral-small` Mistral completions model
32pub const MISTRAL_SMALL: &str = "mistral-small-latest";
33/// The `24-09` version of the `pixtral-small` Mistral multimodal model
34pub const PIXTRAL_SMALL: &str = "pixtral-12b-2409";
35/// The `open-mistral-nemo` model
36pub const MISTRAL_NEMO: &str = "open-mistral-nemo";
37/// The `open-mistral-mamba` model
38pub const CODESTRAL_MAMBA: &str = "open-codestral-mamba";
39
40// =================================================================
41// Rig Implementation Types
42// =================================================================
43
44#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
45#[serde(tag = "type", rename_all = "lowercase")]
46pub struct AssistantContent {
47    text: String,
48}
49
50#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
51#[serde(tag = "type", rename_all = "lowercase")]
52pub enum UserContent {
53    Text { text: String },
54}
55
56#[derive(Debug, Serialize, Deserialize, Clone)]
57pub struct Choice {
58    pub index: usize,
59    pub message: Message,
60    pub logprobs: Option<serde_json::Value>,
61    pub finish_reason: String,
62}
63
64#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
65#[serde(tag = "role", rename_all = "lowercase")]
66pub enum Message {
67    User {
68        content: String,
69    },
70    Assistant {
71        content: String,
72        #[serde(
73            default,
74            deserialize_with = "json_utils::null_or_vec",
75            skip_serializing_if = "Vec::is_empty"
76        )]
77        tool_calls: Vec<ToolCall>,
78        #[serde(default)]
79        prefix: bool,
80    },
81    System {
82        content: String,
83    },
84    Tool {
85        /// The name of the tool that was called
86        name: String,
87        /// The content of the tool call
88        content: String,
89        /// The id of the tool call
90        tool_call_id: String,
91    },
92}
93
94impl Message {
95    pub fn user(content: String) -> Self {
96        Message::User { content }
97    }
98
99    pub fn assistant(content: String, tool_calls: Vec<ToolCall>, prefix: bool) -> Self {
100        Message::Assistant {
101            content,
102            tool_calls,
103            prefix,
104        }
105    }
106
107    pub fn system(content: String) -> Self {
108        Message::System { content }
109    }
110}
111
112impl TryFrom<message::Message> for Vec<Message> {
113    type Error = message::MessageError;
114
115    fn try_from(message: message::Message) -> Result<Self, Self::Error> {
116        match message {
117            message::Message::System { content } => Ok(vec![Message::System { content }]),
118            message::Message::User { content } => {
119                let mut tool_result_messages = Vec::new();
120                let mut other_messages = Vec::new();
121
122                for content_item in content {
123                    match content_item {
124                        message::UserContent::ToolResult(message::ToolResult {
125                            id,
126                            call_id,
127                            content: tool_content,
128                        }) => {
129                            let call_id_key = call_id.unwrap_or_else(|| id.clone());
130                            let content_text = tool_content
131                                .into_iter()
132                                .find_map(|content_item| match content_item {
133                                    message::ToolResultContent::Text(text) => Some(text.text),
134                                    message::ToolResultContent::Image(_) => None,
135                                })
136                                .unwrap_or_default();
137                            tool_result_messages.push(Message::Tool {
138                                name: id,
139                                content: content_text,
140                                tool_call_id: call_id_key,
141                            });
142                        }
143                        message::UserContent::Text(message::Text { text }) => {
144                            other_messages.push(Message::User { content: text });
145                        }
146                        _ => {}
147                    }
148                }
149
150                tool_result_messages.append(&mut other_messages);
151                Ok(tool_result_messages)
152            }
153            message::Message::Assistant { content, .. } => {
154                let mut text_content = Vec::new();
155                let mut tool_calls = Vec::new();
156
157                for content in content {
158                    match content {
159                        message::AssistantContent::Text(text) => text_content.push(text),
160                        message::AssistantContent::ToolCall(tool_call) => {
161                            tool_calls.push(tool_call)
162                        }
163                        message::AssistantContent::Reasoning(_) => {
164                            // Mistral conversion path currently does not support assistant-history
165                            // reasoning items. Silently skip to avoid crashing the process.
166                        }
167                        message::AssistantContent::Image(_) => {
168                            panic!("Image content is not currently supported on Mistral via Rig");
169                        }
170                    }
171                }
172
173                if text_content.is_empty() && tool_calls.is_empty() {
174                    return Ok(vec![]);
175                }
176
177                Ok(vec![Message::Assistant {
178                    content: text_content
179                        .into_iter()
180                        .next()
181                        .map(|content| content.text)
182                        .unwrap_or_default(),
183                    tool_calls: tool_calls
184                        .into_iter()
185                        .map(|tool_call| tool_call.into())
186                        .collect::<Vec<_>>(),
187                    prefix: false,
188                }])
189            }
190        }
191    }
192}
193
194#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
195pub struct ToolCall {
196    pub id: String,
197    #[serde(default)]
198    pub r#type: ToolType,
199    pub function: Function,
200}
201
202impl From<message::ToolCall> for ToolCall {
203    fn from(tool_call: message::ToolCall) -> Self {
204        Self {
205            id: tool_call.id,
206            r#type: ToolType::default(),
207            function: Function {
208                name: tool_call.function.name,
209                arguments: tool_call.function.arguments,
210            },
211        }
212    }
213}
214
215#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
216pub struct Function {
217    pub name: String,
218    #[serde(with = "json_utils::stringified_json")]
219    pub arguments: serde_json::Value,
220}
221
222#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
223#[serde(rename_all = "lowercase")]
224pub enum ToolType {
225    #[default]
226    Function,
227}
228
229#[derive(Debug, Deserialize, Serialize, Clone)]
230pub struct ToolDefinition {
231    pub r#type: String,
232    pub function: completion::ToolDefinition,
233}
234
235impl From<completion::ToolDefinition> for ToolDefinition {
236    fn from(tool: completion::ToolDefinition) -> Self {
237        Self {
238            r#type: "function".into(),
239            function: tool,
240        }
241    }
242}
243
244#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
245pub struct ToolResultContent {
246    #[serde(default)]
247    r#type: ToolResultContentType,
248    text: String,
249}
250
251#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
252#[serde(rename_all = "lowercase")]
253pub enum ToolResultContentType {
254    #[default]
255    Text,
256}
257
258impl From<String> for ToolResultContent {
259    fn from(s: String) -> Self {
260        ToolResultContent {
261            r#type: ToolResultContentType::default(),
262            text: s,
263        }
264    }
265}
266
267impl From<String> for UserContent {
268    fn from(s: String) -> Self {
269        UserContent::Text { text: s }
270    }
271}
272
273impl FromStr for UserContent {
274    type Err = Infallible;
275
276    fn from_str(s: &str) -> Result<Self, Self::Err> {
277        Ok(UserContent::Text {
278            text: s.to_string(),
279        })
280    }
281}
282
283impl From<String> for AssistantContent {
284    fn from(s: String) -> Self {
285        AssistantContent { text: s }
286    }
287}
288
289impl FromStr for AssistantContent {
290    type Err = Infallible;
291
292    fn from_str(s: &str) -> Result<Self, Self::Err> {
293        Ok(AssistantContent {
294            text: s.to_string(),
295        })
296    }
297}
298
299#[derive(Clone)]
300pub struct CompletionModel<T = reqwest::Client> {
301    pub(crate) client: Client<T>,
302    pub model: String,
303}
304
305#[derive(Debug, Default, Serialize, Deserialize)]
306pub enum ToolChoice {
307    #[default]
308    Auto,
309    None,
310    Any,
311}
312
313impl TryFrom<message::ToolChoice> for ToolChoice {
314    type Error = CompletionError;
315
316    fn try_from(value: message::ToolChoice) -> Result<Self, Self::Error> {
317        let res = match value {
318            message::ToolChoice::Auto => Self::Auto,
319            message::ToolChoice::None => Self::None,
320            message::ToolChoice::Required => Self::Any,
321            message::ToolChoice::Specific { .. } => {
322                return Err(CompletionError::ProviderError(
323                    "Mistral doesn't support requiring specific tools to be called".to_string(),
324                ));
325            }
326        };
327
328        Ok(res)
329    }
330}
331
332#[derive(Debug, Serialize, Deserialize)]
333pub(super) struct MistralCompletionRequest {
334    model: String,
335    pub messages: Vec<Message>,
336    #[serde(skip_serializing_if = "Option::is_none")]
337    temperature: Option<f64>,
338    #[serde(skip_serializing_if = "Vec::is_empty")]
339    tools: Vec<ToolDefinition>,
340    #[serde(skip_serializing_if = "Option::is_none")]
341    tool_choice: Option<crate::providers::openai::completion::ToolChoice>,
342    #[serde(flatten, skip_serializing_if = "Option::is_none")]
343    pub additional_params: Option<serde_json::Value>,
344}
345
346impl TryFrom<(&str, CompletionRequest)> for MistralCompletionRequest {
347    type Error = CompletionError;
348
349    fn try_from((model, req): (&str, CompletionRequest)) -> Result<Self, Self::Error> {
350        if req.output_schema.is_some() {
351            tracing::warn!("Structured outputs currently not supported for Mistral");
352        }
353        let model = req.model.clone().unwrap_or_else(|| model.to_string());
354        let mut full_history: Vec<Message> = match &req.preamble {
355            Some(preamble) => vec![Message::system(preamble.clone())],
356            None => vec![],
357        };
358        if let Some(docs) = req.normalized_documents() {
359            let docs: Vec<Message> = docs.try_into()?;
360            full_history.extend(docs);
361        }
362
363        let chat_history: Vec<Message> = req
364            .chat_history
365            .clone()
366            .into_iter()
367            .map(|message| message.try_into())
368            .collect::<Result<Vec<Vec<Message>>, _>>()?
369            .into_iter()
370            .flatten()
371            .collect();
372
373        full_history.extend(chat_history);
374
375        if full_history.is_empty() {
376            return Err(CompletionError::RequestError(
377                std::io::Error::new(
378                    std::io::ErrorKind::InvalidInput,
379                    "Mistral request has no provider-compatible messages after conversion",
380                )
381                .into(),
382            ));
383        }
384
385        let tool_choice = req
386            .tool_choice
387            .clone()
388            .map(crate::providers::openai::completion::ToolChoice::try_from)
389            .transpose()?;
390
391        Ok(Self {
392            model: model.to_string(),
393            messages: full_history,
394            temperature: req.temperature,
395            tools: req
396                .tools
397                .clone()
398                .into_iter()
399                .map(ToolDefinition::from)
400                .collect::<Vec<_>>(),
401            tool_choice,
402            additional_params: req.additional_params,
403        })
404    }
405}
406
407impl<T> CompletionModel<T> {
408    pub fn new(client: Client<T>, model: impl Into<String>) -> Self {
409        Self {
410            client,
411            model: model.into(),
412        }
413    }
414
415    pub fn with_model(client: Client<T>, model: &str) -> Self {
416        Self {
417            client,
418            model: model.into(),
419        }
420    }
421}
422
423#[derive(Debug, Deserialize, Clone, Serialize)]
424pub struct CompletionResponse {
425    pub id: String,
426    pub object: String,
427    pub created: u64,
428    pub model: String,
429    pub system_fingerprint: Option<String>,
430    pub choices: Vec<Choice>,
431    pub usage: Option<Usage>,
432}
433
434impl crate::telemetry::ProviderResponseExt for CompletionResponse {
435    type OutputMessage = Choice;
436    type Usage = Usage;
437
438    fn get_response_id(&self) -> Option<String> {
439        Some(self.id.clone())
440    }
441
442    fn get_response_model_name(&self) -> Option<String> {
443        Some(self.model.clone())
444    }
445
446    fn get_output_messages(&self) -> Vec<Self::OutputMessage> {
447        self.choices.clone()
448    }
449
450    fn get_text_response(&self) -> Option<String> {
451        let res = self
452            .choices
453            .iter()
454            .filter_map(|choice| match choice.message {
455                Message::Assistant { ref content, .. } => {
456                    if content.is_empty() {
457                        None
458                    } else {
459                        Some(content.to_string())
460                    }
461                }
462                _ => None,
463            })
464            .collect::<Vec<String>>()
465            .join("\n");
466
467        if res.is_empty() { None } else { Some(res) }
468    }
469
470    fn get_usage(&self) -> Option<Self::Usage> {
471        self.usage.clone()
472    }
473}
474
475impl GetTokenUsage for CompletionResponse {
476    fn token_usage(&self) -> Option<crate::completion::Usage> {
477        let api_usage = self.usage.clone()?;
478
479        let mut usage = crate::completion::Usage::new();
480        usage.input_tokens = api_usage.prompt_tokens as u64;
481        usage.output_tokens = api_usage.completion_tokens as u64;
482        usage.total_tokens = api_usage.total_tokens as u64;
483
484        Some(usage)
485    }
486}
487
488impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
489    type Error = CompletionError;
490
491    fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
492        let choice = response.choices.first().ok_or_else(|| {
493            CompletionError::ResponseError("Response contained no choices".to_owned())
494        })?;
495        let content = match &choice.message {
496            Message::Assistant {
497                content,
498                tool_calls,
499                ..
500            } => {
501                let mut content = if content.is_empty() {
502                    vec![]
503                } else {
504                    vec![completion::AssistantContent::text(content.clone())]
505                };
506
507                content.extend(
508                    tool_calls
509                        .iter()
510                        .map(|call| {
511                            completion::AssistantContent::tool_call(
512                                &call.id,
513                                &call.function.name,
514                                call.function.arguments.clone(),
515                            )
516                        })
517                        .collect::<Vec<_>>(),
518                );
519                Ok(content)
520            }
521            _ => Err(CompletionError::ResponseError(
522                "Response did not contain a valid message or tool call".into(),
523            )),
524        }?;
525
526        let choice = OneOrMany::many(content).map_err(|_| {
527            CompletionError::ResponseError(
528                "Response contained no message or tool call (empty)".to_owned(),
529            )
530        })?;
531
532        let usage = response
533            .usage
534            .as_ref()
535            .map(|usage| completion::Usage {
536                input_tokens: usage.prompt_tokens as u64,
537                output_tokens: (usage.total_tokens - usage.prompt_tokens) as u64,
538                total_tokens: usage.total_tokens as u64,
539                cached_input_tokens: 0,
540                cache_creation_input_tokens: 0,
541            })
542            .unwrap_or_default();
543
544        Ok(completion::CompletionResponse {
545            choice,
546            usage,
547            raw_response: response,
548            message_id: None,
549        })
550    }
551}
552
553fn assistant_content_to_streaming_choice(
554    content: message::AssistantContent,
555) -> Option<RawStreamingChoice<CompletionResponse>> {
556    match content {
557        message::AssistantContent::Text(t) => Some(RawStreamingChoice::Message(t.text)),
558        message::AssistantContent::ToolCall(tc) => Some(RawStreamingChoice::ToolCall(
559            RawStreamingToolCall::new(tc.id, tc.function.name, tc.function.arguments),
560        )),
561        message::AssistantContent::Reasoning(_) => None,
562        message::AssistantContent::Image(_) => {
563            panic!("Image content is not supported on Mistral via Rig")
564        }
565    }
566}
567
568impl<T> completion::CompletionModel for CompletionModel<T>
569where
570    T: HttpClientExt + Send + Clone + std::fmt::Debug + 'static,
571{
572    type Response = CompletionResponse;
573    type StreamingResponse = CompletionResponse;
574
575    type Client = Client<T>;
576
577    fn make(client: &Self::Client, model: impl Into<String>) -> Self {
578        Self::new(client.clone(), model.into())
579    }
580
581    async fn completion(
582        &self,
583        completion_request: CompletionRequest,
584    ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
585        let preamble = completion_request.preamble.clone();
586        let request =
587            MistralCompletionRequest::try_from((self.model.as_ref(), completion_request))?;
588
589        if enabled!(Level::TRACE) {
590            tracing::trace!(
591                target: "rig::completions",
592                "Mistral completion request: {}",
593                serde_json::to_string_pretty(&request)?
594            );
595        }
596
597        let span = if tracing::Span::current().is_disabled() {
598            info_span!(
599                target: "rig::completions",
600                "chat",
601                gen_ai.operation.name = "chat",
602                gen_ai.provider.name = "mistral",
603                gen_ai.request.model = self.model,
604                gen_ai.system_instructions = &preamble,
605                gen_ai.response.id = tracing::field::Empty,
606                gen_ai.response.model = tracing::field::Empty,
607                gen_ai.usage.output_tokens = tracing::field::Empty,
608                gen_ai.usage.input_tokens = tracing::field::Empty,
609                gen_ai.usage.cached_tokens = tracing::field::Empty,
610            )
611        } else {
612            tracing::Span::current()
613        };
614
615        let body = serde_json::to_vec(&request)?;
616
617        let request = self
618            .client
619            .post("v1/chat/completions")?
620            .body(body)
621            .map_err(|e| CompletionError::HttpError(e.into()))?;
622
623        async move {
624            let response = self.client.send(request).await?;
625
626            if response.status().is_success() {
627                let text = http_client::text(response).await?;
628                match serde_json::from_str::<ApiResponse<CompletionResponse>>(&text)? {
629                    ApiResponse::Ok(response) => {
630                        let span = tracing::Span::current();
631                        span.record_token_usage(&response);
632                        span.record_response_metadata(&response);
633                        response.try_into()
634                    }
635                    ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
636                }
637            } else {
638                let text = http_client::text(response).await?;
639                Err(CompletionError::ProviderError(text))
640            }
641        }
642        .instrument(span)
643        .await
644    }
645
646    async fn stream(
647        &self,
648        request: CompletionRequest,
649    ) -> Result<StreamingCompletionResponse<Self::StreamingResponse>, CompletionError> {
650        let resp = self.completion(request).await?;
651
652        let stream = stream! {
653            for c in resp.choice.clone() {
654                if let Some(choice) = assistant_content_to_streaming_choice(c) {
655                    yield Ok(choice);
656                }
657            }
658
659            yield Ok(RawStreamingChoice::FinalResponse(resp.raw_response.clone()));
660        };
661
662        Ok(StreamingCompletionResponse::stream(Box::pin(stream)))
663    }
664}
665
666#[cfg(test)]
667mod tests {
668    use super::*;
669
670    #[test]
671    fn test_response_deserialization() {
672        //https://docs.mistral.ai/api/#tag/chat/operation/chat_completion_v1_chat_completions_post
673        let json_data = r#"
674        {
675            "id": "cmpl-e5cc70bb28c444948073e77776eb30ef",
676            "object": "chat.completion",
677            "model": "mistral-small-latest",
678            "usage": {
679                "prompt_tokens": 16,
680                "completion_tokens": 34,
681                "total_tokens": 50
682            },
683            "created": 1702256327,
684            "choices": [
685                {
686                    "index": 0,
687                    "message": {
688                        "content": "string",
689                        "tool_calls": [
690                            {
691                                "id": "null",
692                                "type": "function",
693                                "function": {
694                                    "name": "string",
695                                    "arguments": "{ }"
696                                },
697                                "index": 0
698                            }
699                        ],
700                        "prefix": false,
701                        "role": "assistant"
702                    },
703                    "finish_reason": "stop"
704                }
705            ]
706        }
707        "#;
708        let completion_response = serde_json::from_str::<CompletionResponse>(json_data).unwrap();
709        assert_eq!(completion_response.model, MISTRAL_SMALL);
710
711        let CompletionResponse {
712            id,
713            object,
714            created,
715            choices,
716            usage,
717            ..
718        } = completion_response;
719
720        assert_eq!(id, "cmpl-e5cc70bb28c444948073e77776eb30ef");
721
722        let Usage {
723            completion_tokens,
724            prompt_tokens,
725            total_tokens,
726        } = usage.unwrap();
727
728        assert_eq!(prompt_tokens, 16);
729        assert_eq!(completion_tokens, 34);
730        assert_eq!(total_tokens, 50);
731        assert_eq!(object, "chat.completion".to_string());
732        assert_eq!(created, 1702256327);
733        assert_eq!(choices.len(), 1);
734    }
735
736    #[test]
737    fn test_assistant_reasoning_is_skipped_in_message_conversion() {
738        let assistant = message::Message::Assistant {
739            id: None,
740            content: OneOrMany::one(message::AssistantContent::reasoning("hidden")),
741        };
742
743        let converted: Vec<Message> = assistant.try_into().expect("conversion should work");
744        assert!(converted.is_empty());
745    }
746
747    #[test]
748    fn test_assistant_text_and_tool_call_are_preserved_when_reasoning_present() {
749        let assistant = message::Message::Assistant {
750            id: None,
751            content: OneOrMany::many(vec![
752                message::AssistantContent::reasoning("hidden"),
753                message::AssistantContent::text("visible"),
754                message::AssistantContent::tool_call(
755                    "call_1",
756                    "subtract",
757                    serde_json::json!({"x": 2, "y": 1}),
758                ),
759            ])
760            .expect("non-empty assistant content"),
761        };
762
763        let converted: Vec<Message> = assistant.try_into().expect("conversion should work");
764        assert_eq!(converted.len(), 1);
765
766        match &converted[0] {
767            Message::Assistant {
768                content,
769                tool_calls,
770                ..
771            } => {
772                assert_eq!(content, "visible");
773                assert_eq!(tool_calls.len(), 1);
774                assert_eq!(tool_calls[0].id, "call_1");
775                assert_eq!(tool_calls[0].function.name, "subtract");
776                assert_eq!(
777                    tool_calls[0].function.arguments,
778                    serde_json::json!({"x": 2, "y": 1})
779                );
780            }
781            _ => panic!("expected assistant message"),
782        }
783    }
784
785    #[test]
786    fn test_streaming_choice_mapping_skips_reasoning_and_preserves_other_content() {
787        assert!(
788            assistant_content_to_streaming_choice(message::AssistantContent::reasoning("hidden"))
789                .is_none()
790        );
791
792        let text_choice =
793            assistant_content_to_streaming_choice(message::AssistantContent::text("visible"))
794                .expect("text should be preserved");
795        match text_choice {
796            RawStreamingChoice::Message(text) => assert_eq!(text, "visible"),
797            _ => panic!("expected text streaming choice"),
798        }
799
800        let tool_choice =
801            assistant_content_to_streaming_choice(message::AssistantContent::tool_call(
802                "call_2",
803                "add",
804                serde_json::json!({"x": 2, "y": 3}),
805            ))
806            .expect("tool call should be preserved");
807        match tool_choice {
808            RawStreamingChoice::ToolCall(call) => {
809                assert_eq!(call.id, "call_2");
810                assert_eq!(call.name, "add");
811                assert_eq!(call.arguments, serde_json::json!({"x": 2, "y": 3}));
812            }
813            _ => panic!("expected tool-call streaming choice"),
814        }
815    }
816
817    #[test]
818    fn test_request_conversion_errors_when_all_messages_are_filtered() {
819        let request = CompletionRequest {
820            preamble: None,
821            chat_history: OneOrMany::one(message::Message::Assistant {
822                id: None,
823                content: OneOrMany::one(message::AssistantContent::reasoning("hidden")),
824            }),
825            documents: vec![],
826            tools: vec![],
827            temperature: None,
828            max_tokens: None,
829            tool_choice: None,
830            additional_params: None,
831            model: None,
832            output_schema: None,
833        };
834
835        let result = MistralCompletionRequest::try_from((MISTRAL_SMALL, request));
836        assert!(matches!(result, Err(CompletionError::RequestError(_))));
837    }
838}