agentix 0.13.0

Multi-provider LLM client for Rust — streaming, non-streaming, tool calls, MCP, DeepSeek, OpenAI, Anthropic, Gemini
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
use crate::request::Message;

pub(crate) fn prepare_history(messages: Vec<Message>) -> Vec<Message> {
    messages.into_iter().map(|m| match m {
        Message::Assistant { content, reasoning, tool_calls } => {
            let has_tools = !tool_calls.is_empty();
            Message::Assistant {
                content,
                reasoning: if has_tools { Some(reasoning.unwrap_or_default()) } else { None },
                tool_calls,
            }
        }
        other => other,
    }).collect()
}