use crate::common::errors::{AnthropicToolError, Result};
use crate::messages::request::model::Model;
use crate::messages::request::{mcp::McpServer, message::Message, message::SystemPrompt};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Body {
pub model: Model,
pub messages: Vec<Message>,
pub max_tokens: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub system: Option<SystemPrompt>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_k: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stop_sequences: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<Vec<serde_json::Value>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<ToolChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<Metadata>,
#[serde(skip_serializing_if = "Option::is_none")]
pub container: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub mcp_servers: Option<Vec<McpServer>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking: Option<ThinkingConfig>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(tag = "type")]
pub enum ToolChoice {
#[serde(rename = "auto")]
Auto,
#[serde(rename = "tool")]
Tool { name: String },
#[serde(rename = "any")]
Any,
#[serde(rename = "none")]
None,
}
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
pub struct Metadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub user_id: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(tag = "type")]
pub enum ThinkingConfig {
#[serde(rename = "enabled")]
Enabled {
budget_tokens: usize,
},
}
impl ThinkingConfig {
pub fn enabled(budget_tokens: usize) -> Self {
ThinkingConfig::Enabled { budget_tokens }
}
pub fn budget_tokens(&self) -> usize {
match self {
ThinkingConfig::Enabled { budget_tokens } => *budget_tokens,
}
}
}
impl Default for Body {
fn default() -> Self {
Body {
model: Model::default(),
messages: Vec::new(),
max_tokens: 1024,
system: None,
temperature: None,
top_p: None,
top_k: None,
stop_sequences: None,
stream: None,
tools: None,
tool_choice: None,
metadata: None,
container: None,
mcp_servers: None,
thinking: None,
}
}
}
impl Body {
pub fn new<T: Into<Model>>(model: T, max_tokens: usize) -> Self {
Body {
model: model.into(),
max_tokens,
..Default::default()
}
}
pub fn validate(&self) -> Result<()> {
if let Model::Other(ref s) = self.model
&& s.is_empty()
{
return Err(AnthropicToolError::MissingRequiredField(
"model".to_string(),
));
}
if self.messages.is_empty() {
return Err(AnthropicToolError::MissingRequiredField(
"messages".to_string(),
));
}
if self.max_tokens == 0 {
return Err(AnthropicToolError::InvalidParameter(
"max_tokens must be greater than 0".to_string(),
));
}
if let Some(temp) = self.temperature
&& !(0.0..=1.0).contains(&temp)
{
return Err(AnthropicToolError::InvalidParameter(
"temperature must be between 0.0 and 1.0".to_string(),
));
}
if let Some(top_p) = self.top_p
&& !(0.0..=1.0).contains(&top_p)
{
return Err(AnthropicToolError::InvalidParameter(
"top_p must be between 0.0 and 1.0".to_string(),
));
}
if let Some(ref thinking) = self.thinking {
let budget = thinking.budget_tokens();
if budget < 1024 {
return Err(AnthropicToolError::InvalidParameter(
"thinking budget_tokens must be at least 1024".to_string(),
));
}
if budget >= self.max_tokens {
return Err(AnthropicToolError::InvalidParameter(format!(
"thinking budget_tokens ({}) must be less than max_tokens ({})",
budget, self.max_tokens
)));
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_body_new_with_model_enum() {
let body = Body::new(Model::Sonnet4, 1024);
assert_eq!(body.model, Model::Sonnet4);
assert_eq!(body.max_tokens, 1024);
}
#[test]
fn test_body_new_with_string() {
let body = Body::new("claude-sonnet-4-20250514", 1024);
assert_eq!(body.model, Model::Sonnet4);
assert_eq!(body.max_tokens, 1024);
}
#[test]
fn test_body_validate_empty_custom_model() {
let mut body = Body {
model: Model::Other(String::new()),
..Default::default()
};
body.messages
.push(crate::messages::request::message::Message::user("Test"));
let result = body.validate();
assert!(result.is_err());
}
#[test]
fn test_body_validate_default_model_ok() {
let mut body = Body::default();
body.messages
.push(crate::messages::request::message::Message::user("Test"));
let result = body.validate();
assert!(result.is_ok());
}
#[test]
fn test_body_validate_missing_messages() {
let body = Body::new("claude-sonnet-4-20250514", 1024);
let result = body.validate();
assert!(result.is_err());
}
#[test]
fn test_tool_choice_serialize() {
let auto = ToolChoice::Auto;
let json = serde_json::to_string(&auto).unwrap();
assert!(json.contains("\"type\":\"auto\""));
let tool = ToolChoice::Tool {
name: "search".to_string(),
};
let json = serde_json::to_string(&tool).unwrap();
assert!(json.contains("\"type\":\"tool\""));
assert!(json.contains("\"name\":\"search\""));
}
#[test]
fn test_body_serialize() {
let body = Body::new(Model::Sonnet4, 1024);
let json = serde_json::to_string(&body).unwrap();
assert!(json.contains("\"model\":\"claude-sonnet-4-20250514\""));
assert!(json.contains("\"max_tokens\":1024"));
assert!(!json.contains("\"temperature\""));
assert!(!json.contains("\"system\""));
}
#[test]
fn test_thinking_config_serialize() {
let config = ThinkingConfig::enabled(10000);
let json = serde_json::to_string(&config).unwrap();
assert!(json.contains("\"type\":\"enabled\""));
assert!(json.contains("\"budget_tokens\":10000"));
}
#[test]
fn test_thinking_config_deserialize() {
let json = r#"{"type":"enabled","budget_tokens":8000}"#;
let config: ThinkingConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.budget_tokens(), 8000);
}
#[test]
fn test_body_with_thinking() {
use crate::messages::request::message::Message;
let mut body = Body::new(Model::Sonnet4, 16000);
body.thinking = Some(ThinkingConfig::enabled(10000));
body.messages.push(Message::user("Test"));
let json = serde_json::to_string(&body).unwrap();
assert!(json.contains("\"thinking\""));
assert!(json.contains("\"type\":\"enabled\""));
assert!(json.contains("\"budget_tokens\":10000"));
}
#[test]
fn test_validate_thinking_budget_too_small() {
use crate::messages::request::message::Message;
let mut body = Body::new(Model::Sonnet4, 16000);
body.thinking = Some(ThinkingConfig::enabled(500)); body.messages.push(Message::user("Test"));
let result = body.validate();
assert!(result.is_err());
let err = result.unwrap_err();
assert!(err.to_string().contains("at least 1024"));
}
#[test]
fn test_validate_thinking_budget_exceeds_max_tokens() {
use crate::messages::request::message::Message;
let mut body = Body::new(Model::Sonnet4, 8000);
body.thinking = Some(ThinkingConfig::enabled(10000)); body.messages.push(Message::user("Test"));
let result = body.validate();
assert!(result.is_err());
let err = result.unwrap_err();
assert!(err.to_string().contains("must be less than max_tokens"));
}
#[test]
fn test_validate_thinking_budget_valid() {
use crate::messages::request::message::Message;
let mut body = Body::new(Model::Sonnet4, 16000);
body.thinking = Some(ThinkingConfig::enabled(10000)); body.messages.push(Message::user("Test"));
let result = body.validate();
assert!(result.is_ok());
}
}