use std::collections::HashMap;
use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ChatCompletionCreateParams {
pub messages: Vec<objectiveai_sdk::agent::completions::message::Message>,
#[serde(skip_serializing_if = "Option::is_none")]
pub provider: Option<super::Provider>,
pub model: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logit_bias: Option<IndexMap<String, i64>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_completion_tokens: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stop: Option<objectiveai_sdk::agent::openrouter::Stop>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub min_p: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning: Option<objectiveai_sdk::agent::openrouter::Reasoning>,
#[serde(skip_serializing_if = "Option::is_none")]
pub repetition_penalty: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_a: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_k: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub verbosity: Option<objectiveai_sdk::agent::openrouter::Verbosity>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logprobs: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_logprobs: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub response_format: Option<super::response_format::ResponseFormat>,
#[serde(skip_serializing_if = "Option::is_none")]
pub seed: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<super::tool_choice::ToolChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<Vec<super::Tool>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parallel_tool_calls: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prediction: Option<super::Prediction>,
pub stream: bool,
pub stream_options: super::StreamOptions,
pub usage: super::Usage,
}
impl ChatCompletionCreateParams {
pub fn new(
agent: &objectiveai_sdk::agent::openrouter::Agent,
params: &objectiveai_sdk::agent::completions::request::AgentCompletionCreateParams,
messages: &[objectiveai_sdk::agent::completions::message::Message],
continuation: Option<&[crate::agent::completions::ContinuationItem<objectiveai_sdk::agent::completions::message::AssistantMessage>]>,
request_continuation: Option<&objectiveai_sdk::agent::openrouter::Continuation>,
tool_names: &[String],
tool_map: &HashMap<String, crate::agent::completions::resolved_tool::ResolvedTool>,
tools_enabled: bool,
) -> Self {
use crate::agent::completions::ContinuationItem;
use objectiveai_sdk::agent::completions::message::Message;
let continuation = continuation.unwrap_or_default();
let rc_len = request_continuation.map_or(0, |rc| rc.messages.len());
let mut all_messages =
Vec::with_capacity(rc_len + messages.len() + continuation.len());
if let Some(rc) = request_continuation {
all_messages.extend_from_slice(&rc.messages);
}
all_messages.extend_from_slice(messages);
all_messages.extend(continuation.iter().map(|item| match item {
ContinuationItem::State(assistant) => {
Message::Assistant(assistant.clone())
}
ContinuationItem::ToolMessage(tool) => {
Message::Tool(tool.clone())
}
ContinuationItem::UserMessage(user) => {
Message::User(user.clone())
}
}));
let resolved_response_format = resolve_response_format(params, agent);
let (openrouter_response_format, response_format_tool_required) =
match resolved_response_format {
Some(objectiveai_sdk::agent::completions::request::ResponseFormat::ToolCall {
name,
required,
..
}) => (None, Some((name, required))),
Some(rf) => (Some(super::response_format::ResponseFormat::new(&rf)), None),
None => (None, None),
};
let final_tools: Vec<super::Tool> = tool_names
.iter()
.filter_map(|resolved_name| {
let resolved = tool_map.get(resolved_name)?;
Some(match resolved {
crate::agent::completions::resolved_tool::ResolvedTool::Mcp { tool, .. } => {
super::Tool::new_from_mcp(resolved_name.clone(), tool)
}
crate::agent::completions::resolved_tool::ResolvedTool::ResponseFormat {
description, schema,
} => {
super::Tool::new_from_response_format(
resolved_name.clone(),
description.clone(),
schema.clone(),
)
}
})
})
.collect();
let (tools, tool_choice) = if !tools_enabled {
if final_tools.is_empty() {
(None, None)
} else {
(Some(final_tools), Some(super::tool_choice::ToolChoice::None))
}
} else if final_tools.is_empty() {
(None, None)
} else if let Some((ref name, required)) = response_format_tool_required {
let choice = if required == Some(true) {
super::tool_choice::ToolChoice::Function(
super::tool_choice::ToolChoiceFunction::Function {
function:
super::tool_choice::ToolChoiceFunctionFunction {
name: name.clone(),
},
},
)
} else {
super::tool_choice::ToolChoice::Auto
};
(Some(final_tools), Some(choice))
} else {
(
Some(final_tools),
Some(super::tool_choice::ToolChoice::Auto),
)
};
Self {
messages: all_messages,
provider: super::provider::Provider::new(
params.provider,
agent.base.provider.as_ref(),
),
model: agent.base.model.clone(),
frequency_penalty: agent.base.frequency_penalty,
logit_bias: agent.base.logit_bias.clone(),
max_completion_tokens: agent.base.max_completion_tokens,
presence_penalty: agent.base.presence_penalty,
stop: agent.base.stop.clone(),
temperature: agent.base.temperature,
top_p: agent.base.top_p,
max_tokens: agent.base.max_tokens,
min_p: agent.base.min_p,
reasoning: agent.base.reasoning,
repetition_penalty: agent.base.repetition_penalty,
top_a: agent.base.top_a,
top_k: agent.base.top_k,
verbosity: agent.base.verbosity,
logprobs: if let Some(top_logprobs) = agent.base.top_logprobs && top_logprobs > 0 {
Some(true)
} else {
None
},
top_logprobs: if let Some(top_logprobs) = agent.base.top_logprobs && top_logprobs > 0 {
Some(top_logprobs)
} else {
None
},
response_format: openrouter_response_format,
seed: params.seed,
tool_choice,
tools,
parallel_tool_calls: None,
prediction: None,
stream: true,
stream_options: super::StreamOptions {
include_usage: Some(true),
},
usage: super::Usage { include: true },
}
}
}
fn resolve_response_format(
params: &objectiveai_sdk::agent::completions::request::AgentCompletionCreateParams,
agent: &objectiveai_sdk::agent::openrouter::Agent,
) -> Option<objectiveai_sdk::agent::completions::request::ResponseFormat> {
match params.response_format.as_ref()? {
objectiveai_sdk::agent::completions::request::ResponseFormatParam::Single(rf) => {
Some(rf.clone())
}
objectiveai_sdk::agent::completions::request::ResponseFormatParam::PerAgent(map) => {
map.get(&agent.id).cloned()
}
}
}