use serde::de::{self, DeserializeOwned};
use serde::{Deserialize, Serialize};
use super::identifiers::InputId;
use crate::connection::AuthBindingRef;
use crate::provider::Provider;
use crate::service::TurnToolOverlay;
use crate::skills::SkillKey;
use crate::types::{HandlingMode, RenderMetadata};
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum RunApplyBoundary {
Immediate,
RunStart,
RunCheckpoint,
}
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum CoreRenderable {
Text { text: String },
Blocks {
blocks: Vec<crate::types::ContentBlock>,
},
Json { value: serde_json::Value },
Reference { uri: String, label: Option<String> },
}
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ConversationAppendRole {
User,
Assistant,
SystemNotice,
Tool,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ConversationAppend {
pub role: ConversationAppendRole,
pub content: CoreRenderable,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ConversationContextAppend {
pub key: String,
pub content: CoreRenderable,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum RuntimeExecutionKind {
ContentTurn,
ResumePending,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PeerResponseTerminalApplyIntent {
AppendContextAndRun,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ModelId(String);
impl ModelId {
pub fn new(s: impl Into<String>) -> Self {
Self(s.into())
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl std::fmt::Display for ModelId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct KeepAlivePolicy {
#[serde(with = "duration_seconds")]
pub ttl: std::time::Duration,
pub policy: KeepAliveMode,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum KeepAliveMode {
Pinned,
PolicyDriven,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct TurnInstruction {
pub kind: TurnInstructionKind,
pub body: String,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum TurnInstructionKind {
User,
System,
Host,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct StructuredProviderExtension {
pub namespace: String,
pub key: String,
#[cfg_attr(feature = "schema", schemars(with = "String"))]
#[serde(default)]
pub body: String,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "provider", rename_all = "snake_case")]
pub enum ProviderTag {
Anthropic(AnthropicProviderTag),
OpenAi(OpenAiProviderTag),
Gemini(GeminiProviderTag),
Unknown {
bag: StructuredProviderExtension,
},
}
impl ProviderTag {
pub fn from_legacy_value(
namespace: impl Into<String>,
key: impl Into<String>,
value: &serde_json::Value,
) -> Self {
let namespace = namespace.into();
let key = key.into();
if namespace == "anthropic"
&& key == "thinking"
&& let Some(budget) = value
.get("budget_tokens")
.and_then(serde_json::Value::as_u64)
.and_then(|v| u32::try_from(v).ok())
{
return Self::Anthropic(AnthropicProviderTag {
thinking: Some(AnthropicThinkingConfig::Enabled {
budget_tokens: budget,
}),
..Default::default()
});
}
if namespace == "openai"
&& key == "reasoning_effort"
&& let Some(effort) = value.as_str().and_then(|s| match s {
"low" => Some(ReasoningEffort::Low),
"medium" => Some(ReasoningEffort::Medium),
"high" => Some(ReasoningEffort::High),
_ => None,
})
{
return Self::OpenAi(OpenAiProviderTag {
reasoning_effort: Some(effort),
..Default::default()
});
}
if namespace == "gemini"
&& key == "candidate_count"
&& let Some(count) = value.as_u64().and_then(|v| u32::try_from(v).ok())
{
return Self::Gemini(GeminiProviderTag {
candidate_count: Some(count),
..Default::default()
});
}
Self::Unknown {
bag: StructuredProviderExtension {
namespace,
key,
body: value.to_string(),
},
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(transparent)]
pub struct OpaqueProviderBody {
#[cfg_attr(feature = "schema", schemars(with = "String"))]
pub body: String,
}
impl OpaqueProviderBody {
pub fn from_value(v: &serde_json::Value) -> Self {
Self {
body: v.to_string(),
}
}
pub fn as_value(&self) -> serde_json::Value {
serde_json::from_str(&self.body).unwrap_or(serde_json::Value::Null)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum AnthropicThinkingConfig {
Adaptive,
Enabled { budget_tokens: u32 },
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AnthropicEffort {
Low,
Medium,
High,
Max,
XHigh,
}
impl AnthropicEffort {
pub fn as_legacy_str(self) -> &'static str {
match self {
Self::Low => "low",
Self::Medium => "medium",
Self::High => "high",
Self::Max => "max",
Self::XHigh => "xhigh",
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum AnthropicInferenceGeo {
Us,
Global,
Other {
region: String,
},
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AnthropicContextWindow {
OneMegabyte,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum AnthropicCompactionConfig {
Auto,
Custom { edit: OpaqueProviderBody },
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct AnthropicProviderTag {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking: Option<AnthropicThinkingConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking_budget_tokens: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub web_search: Option<OpaqueProviderBody>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_k: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub effort: Option<AnthropicEffort>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub structured_output: Option<crate::OutputSchema>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub inference_geo: Option<AnthropicInferenceGeo>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub compaction: Option<AnthropicCompactionConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub context: Option<AnthropicContextWindow>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub supports_temperature_override: Option<bool>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)]
pub struct OpenAiProviderTag {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reasoning_effort: Option<ReasoningEffort>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub seed: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub web_search: Option<OpaqueProviderBody>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub structured_output: Option<crate::OutputSchema>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reasoning: Option<OpaqueProviderBody>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub chat_template_kwargs: Option<OpaqueProviderBody>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking: Option<OpaqueProviderBody>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub supports_temperature_override: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub supports_reasoning_override: Option<bool>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum GeminiThinkingLevel {
Minimal,
Low,
Medium,
High,
}
impl GeminiThinkingLevel {
pub fn as_str(self) -> &'static str {
match self {
Self::Minimal => "minimal",
Self::Low => "low",
Self::Medium => "medium",
Self::High => "high",
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct GeminiThinkingConfig {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub include_thoughts: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking_level: Option<GeminiThinkingLevel>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking_budget: Option<u32>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)]
pub struct GeminiProviderTag {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking: Option<GeminiThinkingConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking_budget: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking_level: Option<GeminiThinkingLevel>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_k: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub structured_output: Option<crate::OutputSchema>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub google_search: Option<OpaqueProviderBody>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub candidate_count: Option<u32>,
}
impl AnthropicProviderTag {
pub fn from_legacy_value(value: &serde_json::Value) -> Result<Self, LegacyProviderParamsError> {
let Some(obj) = value.as_object() else {
if value.is_null() {
return Ok(Self::default());
}
return Err(LegacyProviderParamsError::NotAnObject);
};
let mut tag = Self::default();
for (k, v) in obj {
match k.as_str() {
"thinking" => {
if v.get("type").and_then(|t| t.as_str()) == Some("adaptive") {
tag.thinking = Some(AnthropicThinkingConfig::Adaptive);
} else if let Some(budget) = v
.get("budget_tokens")
.and_then(serde_json::Value::as_u64)
.and_then(|n| u32::try_from(n).ok())
{
tag.thinking = Some(AnthropicThinkingConfig::Enabled {
budget_tokens: budget,
});
} else {
return Err(LegacyProviderParamsError::unknown_shape("thinking"));
}
}
"thinking_budget" => {
let budget =
v.as_u64()
.and_then(|n| u32::try_from(n).ok())
.ok_or_else(|| {
LegacyProviderParamsError::unknown_shape("thinking_budget")
})?;
tag.thinking_budget_tokens = Some(budget);
}
"top_k" => {
let top_k = match v {
serde_json::Value::Number(n) => n
.as_u64()
.and_then(|n| u32::try_from(n).ok())
.ok_or_else(|| LegacyProviderParamsError::unknown_shape("top_k"))?,
serde_json::Value::String(s) => s
.parse::<u32>()
.map_err(|_| LegacyProviderParamsError::unknown_shape("top_k"))?,
_ => return Err(LegacyProviderParamsError::unknown_shape("top_k")),
};
tag.top_k = Some(top_k);
}
"effort" => {
let effort = match v.as_str() {
Some("low") => AnthropicEffort::Low,
Some("medium") => AnthropicEffort::Medium,
Some("high") => AnthropicEffort::High,
Some("max") => AnthropicEffort::Max,
Some("xhigh") => AnthropicEffort::XHigh,
_ => return Err(LegacyProviderParamsError::unknown_shape("effort")),
};
tag.effort = Some(effort);
}
"structured_output" => {
let schema =
serde_json::from_value::<crate::OutputSchema>(v.clone()).map_err(|e| {
LegacyProviderParamsError::InvalidStructuredOutput {
reason: e.to_string(),
}
})?;
tag.structured_output = Some(schema);
}
"inference_geo" => {
let geo = match v.as_str() {
Some("us") => AnthropicInferenceGeo::Us,
Some("global") => AnthropicInferenceGeo::Global,
Some(other) => AnthropicInferenceGeo::Other {
region: other.to_string(),
},
None => {
return Err(LegacyProviderParamsError::unknown_shape("inference_geo"));
}
};
tag.inference_geo = Some(geo);
}
"compaction" => {
if v.as_str() == Some("auto") {
tag.compaction = Some(AnthropicCompactionConfig::Auto);
} else if v.is_object() {
tag.compaction = Some(AnthropicCompactionConfig::Custom {
edit: OpaqueProviderBody::from_value(v),
});
} else {
return Err(LegacyProviderParamsError::unknown_shape("compaction"));
}
}
"context" => match v.as_str() {
Some("1m") => tag.context = Some(AnthropicContextWindow::OneMegabyte),
_ => return Err(LegacyProviderParamsError::unknown_shape("context")),
},
"web_search" => {
if v.is_object() || v.is_boolean() || v.is_null() {
tag.web_search = Some(OpaqueProviderBody::from_value(v));
} else {
return Err(LegacyProviderParamsError::unknown_shape("web_search"));
}
}
"__meerkat_supports_temperature" => {
tag.supports_temperature_override = v.as_bool();
}
other => {
return Err(LegacyProviderParamsError::UnknownKey {
key: other.to_string(),
});
}
}
}
Ok(tag)
}
}
impl OpenAiProviderTag {
pub fn from_legacy_value(value: &serde_json::Value) -> Result<Self, LegacyProviderParamsError> {
let Some(obj) = value.as_object() else {
if value.is_null() {
return Ok(Self::default());
}
return Err(LegacyProviderParamsError::NotAnObject);
};
let mut tag = Self::default();
for (k, v) in obj {
match k.as_str() {
"reasoning_effort" => {
let effort = match v.as_str() {
Some("low") => ReasoningEffort::Low,
Some("medium") => ReasoningEffort::Medium,
Some("high") => ReasoningEffort::High,
_ => {
return Err(LegacyProviderParamsError::unknown_shape(
"reasoning_effort",
));
}
};
tag.reasoning_effort = Some(effort);
}
"seed" => {
tag.seed = v.as_i64();
}
"frequency_penalty" => {
let f = v.as_f64().ok_or_else(|| {
LegacyProviderParamsError::unknown_shape("frequency_penalty")
})?;
tag.frequency_penalty = Some(f as f32);
}
"presence_penalty" => {
let f = v.as_f64().ok_or_else(|| {
LegacyProviderParamsError::unknown_shape("presence_penalty")
})?;
tag.presence_penalty = Some(f as f32);
}
"web_search" => {
if v.is_object() || v.is_boolean() || v.is_null() {
tag.web_search = Some(OpaqueProviderBody::from_value(v));
} else {
return Err(LegacyProviderParamsError::unknown_shape("web_search"));
}
}
"structured_output" => {
let schema =
serde_json::from_value::<crate::OutputSchema>(v.clone()).map_err(|e| {
LegacyProviderParamsError::InvalidStructuredOutput {
reason: e.to_string(),
}
})?;
tag.structured_output = Some(schema);
}
"reasoning" => {
if v.is_object() {
tag.reasoning = Some(OpaqueProviderBody::from_value(v));
}
}
"chat_template_kwargs" => {
if v.is_object() {
tag.chat_template_kwargs = Some(OpaqueProviderBody::from_value(v));
}
}
"thinking" => {
if v.is_object() {
tag.thinking = Some(OpaqueProviderBody::from_value(v));
}
}
"__meerkat_supports_temperature" => {
tag.supports_temperature_override = v.as_bool();
}
"__meerkat_supports_reasoning" => {
tag.supports_reasoning_override = v.as_bool();
}
other => {
return Err(LegacyProviderParamsError::UnknownKey {
key: other.to_string(),
});
}
}
}
Ok(tag)
}
}
impl GeminiProviderTag {
pub fn from_legacy_value(value: &serde_json::Value) -> Result<Self, LegacyProviderParamsError> {
let Some(obj) = value.as_object() else {
if value.is_null() {
return Ok(Self::default());
}
return Err(LegacyProviderParamsError::NotAnObject);
};
let mut tag = Self::default();
for (k, v) in obj {
match k.as_str() {
"thinking" => {
let cfg = serde_json::from_value::<GeminiThinkingConfig>(v.clone())
.map_err(|_| LegacyProviderParamsError::unknown_shape("thinking"))?;
tag.thinking = Some(cfg);
}
"thinking_budget" => {
let b = v
.as_u64()
.and_then(|n| u32::try_from(n).ok())
.ok_or_else(|| {
LegacyProviderParamsError::unknown_shape("thinking_budget")
})?;
tag.thinking_budget = Some(b);
}
"thinking_level" => {
let level = serde_json::from_value::<GeminiThinkingLevel>(v.clone())
.map_err(|_| LegacyProviderParamsError::unknown_shape("thinking_level"))?;
tag.thinking_level = Some(level);
}
"top_k" => {
let n = v
.as_u64()
.and_then(|n| u32::try_from(n).ok())
.ok_or_else(|| LegacyProviderParamsError::unknown_shape("top_k"))?;
tag.top_k = Some(n);
}
"top_p" => {
let f = v
.as_f64()
.ok_or_else(|| LegacyProviderParamsError::unknown_shape("top_p"))?;
tag.top_p = Some(f as f32);
}
"structured_output" => {
let schema =
serde_json::from_value::<crate::OutputSchema>(v.clone()).map_err(|e| {
LegacyProviderParamsError::InvalidStructuredOutput {
reason: e.to_string(),
}
})?;
tag.structured_output = Some(schema);
}
"google_search" => {
if v.is_object() || v.is_boolean() || v.is_null() {
tag.google_search = Some(OpaqueProviderBody::from_value(v));
} else {
return Err(LegacyProviderParamsError::unknown_shape("google_search"));
}
}
"candidate_count" => {
let n = v
.as_u64()
.and_then(|n| u32::try_from(n).ok())
.ok_or_else(|| {
LegacyProviderParamsError::unknown_shape("candidate_count")
})?;
tag.candidate_count = Some(n);
}
other => {
return Err(LegacyProviderParamsError::UnknownKey {
key: other.to_string(),
});
}
}
}
Ok(tag)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum LegacyProviderParamsError {
NotAnObject,
UnknownKey { key: String },
UnknownShape { field: &'static str },
InvalidStructuredOutput { reason: String },
}
impl LegacyProviderParamsError {
fn unknown_shape(field: &'static str) -> Self {
Self::UnknownShape { field }
}
}
impl std::fmt::Display for LegacyProviderParamsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::NotAnObject => f.write_str("legacy provider-params value is not an object"),
Self::UnknownKey { key } => write!(f, "unknown legacy provider-params key: {key}"),
Self::UnknownShape { field } => {
write!(f, "legacy provider-params shape invalid for field {field}")
}
Self::InvalidStructuredOutput { reason } => {
write!(f, "structured_output deserialize failed: {reason}")
}
}
}
}
impl std::error::Error for LegacyProviderParamsError {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "snake_case")]
pub enum ReasoningEffort {
Low,
#[default]
Medium,
High,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ReasoningMode {
Emit,
Silent,
Off,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)]
pub struct ProviderParamsOverride {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max_output_tokens: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reasoning: Option<ReasoningMode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking_budget_tokens: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider_tag: Option<ProviderTag>,
}
impl ProviderParamsOverride {
pub fn is_empty(&self) -> bool {
self.temperature.is_none()
&& self.top_p.is_none()
&& self.max_output_tokens.is_none()
&& self.reasoning.is_none()
&& self.thinking_budget_tokens.is_none()
&& self.provider_tag.is_none()
}
pub fn from_legacy_provider_value(provider: &str, value: &serde_json::Value) -> Self {
let Some(obj) = value.as_object() else {
if value.is_null() {
return Self::default();
}
return Self {
provider_tag: Some(unknown_provider_tag(provider, value)),
..Default::default()
};
};
let mut remaining = serde_json::Map::new();
let mut override_params = Self::default();
for (key, item) in obj {
match key.as_str() {
"temperature" => {
if let Some(value) = item.as_f64() {
override_params.temperature = Some(value as f32);
} else {
remaining.insert(key.clone(), item.clone());
}
}
"top_p" => {
if let Some(value) = item.as_f64() {
override_params.top_p = Some(value as f32);
} else {
remaining.insert(key.clone(), item.clone());
}
}
"max_output_tokens" => {
if let Some(value) = item.as_u64().and_then(|value| u32::try_from(value).ok()) {
override_params.max_output_tokens = Some(value);
} else {
remaining.insert(key.clone(), item.clone());
}
}
"reasoning" => {
if let Some(value) = item.as_str().and_then(parse_reasoning_mode) {
override_params.reasoning = Some(value);
} else {
remaining.insert(key.clone(), item.clone());
}
}
"thinking_budget_tokens" => {
if let Some(value) = item.as_u64().and_then(|value| u32::try_from(value).ok()) {
override_params.thinking_budget_tokens = Some(value);
} else {
remaining.insert(key.clone(), item.clone());
}
}
_ => {
remaining.insert(key.clone(), item.clone());
}
}
}
if !remaining.is_empty() {
let provider_value = serde_json::Value::Object(remaining);
override_params.provider_tag =
Some(project_legacy_provider_tag(provider, &provider_value));
}
override_params
}
pub fn to_legacy_provider_value(&self) -> serde_json::Value {
let mut object = serde_json::Map::new();
if let Some(value) = self.temperature {
object.insert("temperature".to_string(), serde_json::json!(value));
}
if let Some(value) = self.top_p {
object.insert("top_p".to_string(), serde_json::json!(value));
}
if let Some(value) = self.max_output_tokens {
object.insert("max_output_tokens".to_string(), serde_json::json!(value));
}
if let Some(value) = self.reasoning {
object.insert("reasoning".to_string(), serde_json::json!(value));
}
if let Some(value) = self.thinking_budget_tokens {
object.insert(
"thinking_budget_tokens".to_string(),
serde_json::json!(value),
);
}
if let Some(tag) = self.provider_tag.as_ref() {
insert_provider_tag_legacy_fields(&mut object, tag);
}
serde_json::Value::Object(object)
}
}
fn parse_reasoning_mode(value: &str) -> Option<ReasoningMode> {
match value {
"emit" => Some(ReasoningMode::Emit),
"silent" => Some(ReasoningMode::Silent),
"off" => Some(ReasoningMode::Off),
_ => None,
}
}
fn project_legacy_provider_tag(provider: &str, value: &serde_json::Value) -> ProviderTag {
match provider {
"anthropic" => AnthropicProviderTag::from_legacy_value(value)
.map(ProviderTag::Anthropic)
.unwrap_or_else(|_| unknown_provider_tag("anthropic", value)),
"openai" => OpenAiProviderTag::from_legacy_value(value)
.map(ProviderTag::OpenAi)
.unwrap_or_else(|_| unknown_provider_tag("openai", value)),
"gemini" | "google" => GeminiProviderTag::from_legacy_value(value)
.map(ProviderTag::Gemini)
.unwrap_or_else(|_| unknown_provider_tag("gemini", value)),
other => unknown_provider_tag(other, value),
}
}
fn unknown_provider_tag(provider: &str, value: &serde_json::Value) -> ProviderTag {
ProviderTag::Unknown {
bag: StructuredProviderExtension {
namespace: provider.to_string(),
key: "provider_params".to_string(),
body: value.to_string(),
},
}
}
fn insert_provider_tag_legacy_fields(
object: &mut serde_json::Map<String, serde_json::Value>,
tag: &ProviderTag,
) {
match tag {
ProviderTag::Anthropic(tag) => insert_anthropic_provider_tag_legacy_fields(object, tag),
ProviderTag::OpenAi(tag) => insert_openai_provider_tag_legacy_fields(object, tag),
ProviderTag::Gemini(tag) => insert_gemini_provider_tag_legacy_fields(object, tag),
ProviderTag::Unknown { bag } => insert_unknown_provider_tag_legacy_fields(object, bag),
}
}
fn insert_serialized_legacy_field<T: Serialize>(
object: &mut serde_json::Map<String, serde_json::Value>,
key: &'static str,
value: &T,
) {
if let Ok(value) = serde_json::to_value(value) {
object.insert(key.to_string(), value);
}
}
fn insert_anthropic_provider_tag_legacy_fields(
object: &mut serde_json::Map<String, serde_json::Value>,
tag: &AnthropicProviderTag,
) {
if let Some(thinking) = tag.thinking.as_ref() {
insert_serialized_legacy_field(object, "thinking", thinking);
}
if let Some(value) = tag.thinking_budget_tokens {
object.insert("thinking_budget".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.web_search.as_ref() {
object.insert("web_search".to_string(), value.as_value());
}
if let Some(value) = tag.top_k {
object.insert("top_k".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.effort {
object.insert(
"effort".to_string(),
serde_json::json!(value.as_legacy_str()),
);
}
if let Some(value) = tag.structured_output.as_ref() {
insert_serialized_legacy_field(object, "structured_output", value);
}
if let Some(value) = tag.inference_geo.as_ref() {
match value {
AnthropicInferenceGeo::Us => {
object.insert("inference_geo".to_string(), serde_json::json!("us"));
}
AnthropicInferenceGeo::Global => {
object.insert("inference_geo".to_string(), serde_json::json!("global"));
}
AnthropicInferenceGeo::Other { region } => {
object.insert("inference_geo".to_string(), serde_json::json!(region));
}
}
}
if let Some(value) = tag.compaction.as_ref() {
match value {
AnthropicCompactionConfig::Auto => {
object.insert("compaction".to_string(), serde_json::json!("auto"));
}
AnthropicCompactionConfig::Custom { edit } => {
object.insert("compaction".to_string(), edit.as_value());
}
}
}
if matches!(tag.context, Some(AnthropicContextWindow::OneMegabyte)) {
object.insert("context".to_string(), serde_json::json!("1m"));
}
if let Some(value) = tag.supports_temperature_override {
object.insert(
"__meerkat_supports_temperature".to_string(),
serde_json::json!(value),
);
}
}
fn insert_openai_provider_tag_legacy_fields(
object: &mut serde_json::Map<String, serde_json::Value>,
tag: &OpenAiProviderTag,
) {
if let Some(value) = tag.reasoning_effort {
insert_serialized_legacy_field(object, "reasoning_effort", &value);
}
if let Some(value) = tag.seed {
object.insert("seed".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.frequency_penalty {
object.insert("frequency_penalty".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.presence_penalty {
object.insert("presence_penalty".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.web_search.as_ref() {
object.insert("web_search".to_string(), value.as_value());
}
if let Some(value) = tag.structured_output.as_ref() {
insert_serialized_legacy_field(object, "structured_output", value);
}
if let Some(value) = tag.reasoning.as_ref() {
object.insert("reasoning".to_string(), value.as_value());
}
if let Some(value) = tag.chat_template_kwargs.as_ref() {
object.insert("chat_template_kwargs".to_string(), value.as_value());
}
if let Some(value) = tag.thinking.as_ref() {
object.insert("thinking".to_string(), value.as_value());
}
if let Some(value) = tag.supports_temperature_override {
object.insert(
"__meerkat_supports_temperature".to_string(),
serde_json::json!(value),
);
}
if let Some(value) = tag.supports_reasoning_override {
object.insert(
"__meerkat_supports_reasoning".to_string(),
serde_json::json!(value),
);
}
}
fn insert_gemini_provider_tag_legacy_fields(
object: &mut serde_json::Map<String, serde_json::Value>,
tag: &GeminiProviderTag,
) {
if let Some(value) = tag.thinking.as_ref() {
insert_serialized_legacy_field(object, "thinking", value);
}
if let Some(value) = tag.thinking_budget {
object.insert("thinking_budget".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.thinking_level {
object.insert(
"thinking_level".to_string(),
serde_json::json!(value.as_str()),
);
}
if let Some(value) = tag.top_k {
object.insert("top_k".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.top_p
&& !object.contains_key("top_p")
{
object.insert("top_p".to_string(), serde_json::json!(value));
}
if let Some(value) = tag.structured_output.as_ref() {
insert_serialized_legacy_field(object, "structured_output", value);
}
if let Some(value) = tag.google_search.as_ref() {
object.insert("google_search".to_string(), value.as_value());
}
if let Some(value) = tag.candidate_count {
object.insert("candidate_count".to_string(), serde_json::json!(value));
}
}
fn insert_unknown_provider_tag_legacy_fields(
object: &mut serde_json::Map<String, serde_json::Value>,
bag: &StructuredProviderExtension,
) {
let body = serde_json::from_str::<serde_json::Value>(&bag.body)
.unwrap_or_else(|_| serde_json::Value::String(bag.body.clone()));
if bag.key == "provider_params"
&& let serde_json::Value::Object(extension) = body
{
object.extend(extension);
return;
}
object.insert(bag.key.clone(), body);
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TurnMetadataMergeConflict {
pub field: &'static str,
pub reason: &'static str,
}
impl std::fmt::Display for TurnMetadataMergeConflict {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"batch turn-metadata scalar conflict on field `{}`: {}",
self.field, self.reason
)
}
}
impl std::error::Error for TurnMetadataMergeConflict {}
#[derive(Debug, Clone, PartialEq, Serialize)]
#[serde(tag = "action", content = "value", rename_all = "snake_case")]
pub enum TurnMetadataOverride<T> {
Set(T),
Clear,
}
impl<T> TurnMetadataOverride<T> {
pub fn set(value: T) -> Self {
Self::Set(value)
}
pub const fn clear() -> Self {
Self::Clear
}
pub fn as_set(&self) -> Option<&T> {
match self {
Self::Set(value) => Some(value),
Self::Clear => None,
}
}
pub fn into_set(self) -> Option<T> {
match self {
Self::Set(value) => Some(value),
Self::Clear => None,
}
}
pub const fn is_clear(&self) -> bool {
matches!(self, Self::Clear)
}
}
impl<'de, T> Deserialize<'de> for TurnMetadataOverride<T>
where
T: DeserializeOwned,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let raw = serde_json::Value::deserialize(deserializer)?;
if let Some(object) = raw.as_object() {
let Some(action_value) = object.get("action") else {
return serde_json::from_value(raw)
.map(Self::Set)
.map_err(de::Error::custom);
};
let action = action_value.as_str().ok_or_else(|| {
de::Error::custom("turn metadata override action must be a string")
})?;
return match action {
"clear" => {
if object.contains_key("value") {
return Err(de::Error::custom("clear override cannot include value"));
}
Ok(Self::Clear)
}
"set" => {
let value = object
.get("value")
.ok_or_else(|| de::Error::custom("set override is missing value"))?;
serde_json::from_value(value.clone())
.map(Self::Set)
.map_err(de::Error::custom)
}
other => Err(de::Error::custom(format!(
"unknown turn metadata override action `{other}`"
))),
};
}
serde_json::from_value(raw)
.map(Self::Set)
.map_err(de::Error::custom)
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Default)]
pub struct RuntimeTurnMetadata {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub handling_mode: Option<HandlingMode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub skill_references: Option<Vec<SkillKey>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub flow_tool_overlay: Option<TurnToolOverlay>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub additional_instructions: Option<Vec<TurnInstruction>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub model: Option<ModelId>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<Provider>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider_params: Option<TurnMetadataOverride<ProviderParamsOverride>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth_binding: Option<TurnMetadataOverride<AuthBindingRef>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub keep_alive: Option<KeepAlivePolicy>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub render_metadata: Option<RenderMetadata>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub execution_kind: Option<RuntimeExecutionKind>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub peer_response_terminal_apply_intent: Option<PeerResponseTerminalApplyIntent>,
}
#[derive(Deserialize)]
struct RuntimeTurnMetadataFields {
#[serde(default)]
handling_mode: Option<HandlingMode>,
#[serde(default)]
skill_references: Option<Vec<SkillKey>>,
#[serde(default)]
flow_tool_overlay: Option<TurnToolOverlay>,
#[serde(default)]
additional_instructions: Option<Vec<TurnInstruction>>,
#[serde(default)]
model: Option<ModelId>,
#[serde(default)]
provider: Option<Provider>,
#[serde(default)]
provider_params: Option<TurnMetadataOverride<ProviderParamsOverride>>,
#[serde(default)]
auth_binding: Option<TurnMetadataOverride<AuthBindingRef>>,
#[serde(default)]
keep_alive: Option<KeepAlivePolicy>,
#[serde(default)]
render_metadata: Option<RenderMetadata>,
#[serde(default)]
execution_kind: Option<RuntimeExecutionKind>,
#[serde(default)]
peer_response_terminal_apply_intent: Option<PeerResponseTerminalApplyIntent>,
}
impl<'de> Deserialize<'de> for RuntimeTurnMetadata {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut raw = serde_json::Value::deserialize(deserializer)?;
let (clear_provider_params, clear_auth_binding) = if let Some(object) = raw.as_object_mut()
{
(
take_legacy_clear_bool(object, "clear_provider_params", &[])?,
take_legacy_clear_bool(object, "clear_auth_binding", &[])?,
)
} else {
(false, false)
};
let fields: RuntimeTurnMetadataFields =
serde_json::from_value(raw).map_err(de::Error::custom)?;
let provider_params = legacy_override_from_split_fields(
fields.provider_params,
clear_provider_params,
"provider_params",
"clear_provider_params",
)?;
let auth_binding = legacy_override_from_split_fields(
fields.auth_binding,
clear_auth_binding,
"auth_binding",
"clear_auth_binding",
)?;
Ok(Self {
handling_mode: fields.handling_mode,
skill_references: fields.skill_references,
flow_tool_overlay: fields.flow_tool_overlay,
additional_instructions: fields.additional_instructions,
model: fields.model,
provider: fields.provider,
provider_params,
auth_binding,
keep_alive: fields.keep_alive,
render_metadata: fields.render_metadata,
execution_kind: fields.execution_kind,
peer_response_terminal_apply_intent: fields.peer_response_terminal_apply_intent,
})
}
}
impl RuntimeTurnMetadata {
pub fn is_empty(&self) -> bool {
self.handling_mode.is_none()
&& self.skill_references.is_none()
&& self.flow_tool_overlay.is_none()
&& self.additional_instructions.is_none()
&& self.model.is_none()
&& self.provider.is_none()
&& self.provider_params.is_none()
&& self.auth_binding.is_none()
&& self.keep_alive.is_none()
&& self.render_metadata.is_none()
&& self.execution_kind.is_none()
&& self.peer_response_terminal_apply_intent.is_none()
}
pub fn merge(&mut self, other: Self) -> Result<(), TurnMetadataMergeConflict> {
merge_scalar(
&mut self.handling_mode,
other.handling_mode,
"handling_mode",
)?;
merge_scalar(
&mut self.flow_tool_overlay,
other.flow_tool_overlay,
"flow_tool_overlay",
)?;
merge_scalar(&mut self.model, other.model, "model")?;
merge_scalar(&mut self.provider, other.provider, "provider")?;
merge_override(
&mut self.provider_params,
other.provider_params,
"provider_params",
)?;
merge_override(&mut self.auth_binding, other.auth_binding, "auth_binding")?;
merge_scalar(&mut self.keep_alive, other.keep_alive, "keep_alive")?;
merge_scalar(
&mut self.render_metadata,
other.render_metadata,
"render_metadata",
)?;
merge_scalar(
&mut self.execution_kind,
other.execution_kind,
"execution_kind",
)?;
merge_scalar(
&mut self.peer_response_terminal_apply_intent,
other.peer_response_terminal_apply_intent,
"peer_response_terminal_apply_intent",
)?;
if let Some(extra) = other.skill_references {
self.skill_references
.get_or_insert_with(Vec::new)
.extend(extra);
}
if let Some(extra) = other.additional_instructions {
self.additional_instructions
.get_or_insert_with(Vec::new)
.extend(extra);
}
Ok(())
}
}
fn merge_scalar<T: PartialEq>(
lhs: &mut Option<T>,
rhs: Option<T>,
field: &'static str,
) -> Result<(), TurnMetadataMergeConflict> {
match (lhs.as_ref(), rhs) {
(_, None) => Ok(()),
(None, Some(v)) => {
*lhs = Some(v);
Ok(())
}
(Some(existing), Some(new)) => {
if *existing == new {
Ok(())
} else {
Err(TurnMetadataMergeConflict {
field,
reason: "two inputs in one batch set distinct scalar overrides",
})
}
}
}
}
fn merge_override<T: PartialEq>(
lhs: &mut Option<TurnMetadataOverride<T>>,
rhs: Option<TurnMetadataOverride<T>>,
field: &'static str,
) -> Result<(), TurnMetadataMergeConflict> {
match (lhs.as_ref(), rhs) {
(_, None) => Ok(()),
(None, Some(override_fact)) => {
*lhs = Some(override_fact);
Ok(())
}
(Some(existing), Some(new)) if *existing == new => Ok(()),
(Some(TurnMetadataOverride::Set(_)), Some(TurnMetadataOverride::Set(_))) => {
Err(TurnMetadataMergeConflict {
field,
reason: "two inputs in one batch set distinct scalar overrides",
})
}
(Some(_), Some(_)) => Err(TurnMetadataMergeConflict {
field,
reason: "one input sets the field while another clears it",
}),
}
}
fn legacy_override_from_split_fields<T, E>(
set_value: Option<TurnMetadataOverride<T>>,
clear: bool,
set_field: &'static str,
clear_field: &'static str,
) -> Result<Option<TurnMetadataOverride<T>>, E>
where
E: de::Error,
{
if clear && set_value.is_some() {
return Err(E::custom(format!(
"{clear_field} cannot be combined with {set_field}"
)));
}
if clear {
Ok(Some(TurnMetadataOverride::Clear))
} else {
Ok(set_value)
}
}
fn take_legacy_clear_bool<E>(
object: &mut serde_json::Map<String, serde_json::Value>,
field: &'static str,
aliases: &[&'static str],
) -> Result<bool, E>
where
E: de::Error,
{
let mut seen = None;
for key in std::iter::once(field).chain(aliases.iter().copied()) {
match object.remove(key) {
None => {}
Some(serde_json::Value::Bool(value)) => match seen {
None => seen = Some(value),
Some(previous) if previous == value => {}
Some(_) => {
return Err(E::custom(format!(
"{field} and its compatibility aliases disagree"
)));
}
},
Some(_) => return Err(E::custom(format!("{key} must be a boolean"))),
}
}
Ok(seen.unwrap_or(false))
}
mod duration_seconds {
use serde::{Deserialize, Deserializer, Serializer};
use std::time::Duration;
pub fn serialize<S: Serializer>(value: &Duration, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_u64(value.as_secs())
}
pub fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<Duration, D::Error> {
let secs = u64::deserialize(de)?;
Ok(Duration::from_secs(secs))
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct StagedRunInput {
pub boundary: RunApplyBoundary,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub appends: Vec<ConversationAppend>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub context_appends: Vec<ConversationContextAppend>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub contributing_input_ids: Vec<InputId>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub turn_metadata: Option<RuntimeTurnMetadata>,
}
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "primitive_type", rename_all = "snake_case")]
#[allow(clippy::large_enum_variant)]
pub enum RunPrimitive {
StagedInput(StagedRunInput),
ImmediateAppend(ConversationAppend),
ImmediateContextAppend(ConversationContextAppend),
}
impl RunPrimitive {
pub fn contributing_input_ids(&self) -> &[InputId] {
match self {
RunPrimitive::StagedInput(staged) => &staged.contributing_input_ids,
RunPrimitive::ImmediateAppend(_) | RunPrimitive::ImmediateContextAppend(_) => &[],
}
}
pub fn turn_metadata(&self) -> Option<&RuntimeTurnMetadata> {
match self {
RunPrimitive::StagedInput(staged) => staged.turn_metadata.as_ref(),
RunPrimitive::ImmediateAppend(_) | RunPrimitive::ImmediateContextAppend(_) => None,
}
}
pub fn extract_content_input(&self) -> crate::types::ContentInput {
use crate::types::{ContentBlock, ContentInput};
match self {
RunPrimitive::StagedInput(staged) => {
let mut all_blocks = Vec::new();
for append in &staged.appends {
match &append.content {
CoreRenderable::Text { text } => {
all_blocks.push(ContentBlock::Text { text: text.clone() });
}
CoreRenderable::Blocks { blocks } => {
all_blocks.extend(blocks.iter().cloned());
}
_ => {}
}
}
if all_blocks.is_empty() {
ContentInput::Text(String::new())
} else if all_blocks.len() == 1 {
if let ContentBlock::Text { text } = &all_blocks[0] {
ContentInput::Text(text.clone())
} else {
ContentInput::Blocks(all_blocks)
}
} else {
ContentInput::Blocks(all_blocks)
}
}
RunPrimitive::ImmediateAppend(append) => match &append.content {
CoreRenderable::Text { text } => ContentInput::Text(text.clone()),
CoreRenderable::Blocks { blocks } => ContentInput::Blocks(blocks.clone()),
_ => ContentInput::Text(String::new()),
},
RunPrimitive::ImmediateContextAppend(ctx) => match &ctx.content {
CoreRenderable::Text { text } => ContentInput::Text(text.clone()),
CoreRenderable::Blocks { blocks } => ContentInput::Blocks(blocks.clone()),
_ => ContentInput::Text(String::new()),
},
}
}
pub fn apply_boundary(&self) -> RunApplyBoundary {
match self {
RunPrimitive::StagedInput(staged) => staged.boundary,
RunPrimitive::ImmediateAppend(_) | RunPrimitive::ImmediateContextAppend(_) => {
RunApplyBoundary::Immediate
}
}
}
pub fn peer_response_terminal_apply_intent(&self) -> Option<PeerResponseTerminalApplyIntent> {
self.turn_metadata()
.and_then(|metadata| metadata.peer_response_terminal_apply_intent)
}
pub fn is_peer_response_terminal_context_and_run(&self) -> bool {
matches!(
self.peer_response_terminal_apply_intent(),
Some(PeerResponseTerminalApplyIntent::AppendContextAndRun)
)
}
pub fn peer_response_terminal_apply_intent_violation(&self) -> Option<&'static str> {
if !self.is_peer_response_terminal_context_and_run() {
return None;
}
let RunPrimitive::StagedInput(staged) = self else {
return Some("terminal peer-response apply intent requires a staged primitive");
};
if staged.boundary != RunApplyBoundary::RunStart {
return Some("terminal peer-response apply intent requires RunStart boundary");
}
if !staged.appends.is_empty() || staged.context_appends.is_empty() {
return Some(
"terminal peer-response apply intent requires context-only staged appends",
);
}
if staged
.turn_metadata
.as_ref()
.and_then(|metadata| metadata.execution_kind)
!= Some(RuntimeExecutionKind::ContentTurn)
{
return Some("terminal peer-response apply intent requires ContentTurn execution kind");
}
None
}
pub fn is_context_only_apply_without_turn(&self) -> bool {
matches!(
self,
RunPrimitive::StagedInput(staged)
if staged.appends.is_empty()
&& !staged.context_appends.is_empty()
&& !self.is_peer_response_terminal_context_and_run()
)
}
pub fn is_context_only_immediate(&self) -> bool {
matches!(
self,
RunPrimitive::StagedInput(staged)
if staged.appends.is_empty()
&& !staged.context_appends.is_empty()
&& staged.boundary == RunApplyBoundary::Immediate
)
}
}
#[cfg(test)]
#[allow(clippy::expect_used, clippy::unwrap_used, clippy::panic)]
mod tests {
use super::*;
#[test]
fn run_apply_boundary_serde_roundtrip() {
for boundary in [
RunApplyBoundary::Immediate,
RunApplyBoundary::RunStart,
RunApplyBoundary::RunCheckpoint,
] {
let json = serde_json::to_value(boundary).unwrap();
let parsed: RunApplyBoundary = serde_json::from_value(json).unwrap();
assert_eq!(boundary, parsed);
}
}
#[test]
fn core_renderable_text_serde() {
let r = CoreRenderable::Text {
text: "hello".into(),
};
let json = serde_json::to_value(&r).unwrap();
assert_eq!(json["type"], "text");
assert_eq!(json["text"], "hello");
let parsed: CoreRenderable = serde_json::from_value(json).unwrap();
assert_eq!(r, parsed);
}
#[test]
fn core_renderable_json_serde() {
let r = CoreRenderable::Json {
value: serde_json::json!({"key": "val"}),
};
let json = serde_json::to_value(&r).unwrap();
assert_eq!(json["type"], "json");
let parsed: CoreRenderable = serde_json::from_value(json).unwrap();
assert_eq!(r, parsed);
}
fn make_staged(appends: Vec<ConversationAppend>) -> RunPrimitive {
RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::RunStart,
appends,
context_appends: vec![],
contributing_input_ids: vec![],
turn_metadata: None,
})
}
#[test]
fn extract_content_from_staged_text() {
let p = make_staged(vec![ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Text {
text: "hello".into(),
},
}]);
assert_eq!(
p.extract_content_input(),
crate::types::ContentInput::Text("hello".into())
);
}
#[test]
fn extract_content_from_staged_blocks() {
let p = make_staged(vec![ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Blocks {
blocks: vec![
crate::types::ContentBlock::Text { text: "a".into() },
crate::types::ContentBlock::Text { text: "b".into() },
],
},
}]);
let result = p.extract_content_input();
assert!(
matches!(&result, crate::types::ContentInput::Blocks(blocks) if blocks.len() == 2),
"expected Blocks with 2 elements, got {result:?}"
);
}
#[test]
fn extract_content_from_staged_empty() {
let p = make_staged(vec![]);
assert_eq!(
p.extract_content_input(),
crate::types::ContentInput::Text(String::new())
);
}
#[test]
fn extract_content_single_text_block_collapses() {
let p = make_staged(vec![ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Blocks {
blocks: vec![crate::types::ContentBlock::Text {
text: "single".into(),
}],
},
}]);
assert_eq!(
p.extract_content_input(),
crate::types::ContentInput::Text("single".into())
);
}
#[test]
fn context_only_immediate_true() {
let p = RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::Immediate,
appends: vec![],
context_appends: vec![ConversationContextAppend {
key: "k".into(),
content: CoreRenderable::Text { text: "ctx".into() },
}],
contributing_input_ids: vec![],
turn_metadata: None,
});
assert!(p.is_context_only_immediate());
}
#[test]
fn context_only_immediate_false_with_appends() {
let p = RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::Immediate,
appends: vec![ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Text { text: "hi".into() },
}],
context_appends: vec![ConversationContextAppend {
key: "k".into(),
content: CoreRenderable::Text { text: "ctx".into() },
}],
contributing_input_ids: vec![],
turn_metadata: None,
});
assert!(!p.is_context_only_immediate());
}
#[test]
fn context_only_immediate_false_wrong_boundary() {
let p = RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::RunCheckpoint,
appends: vec![],
context_appends: vec![ConversationContextAppend {
key: "k".into(),
content: CoreRenderable::Text { text: "ctx".into() },
}],
contributing_input_ids: vec![],
turn_metadata: None,
});
assert!(!p.is_context_only_immediate());
}
#[test]
fn context_only_apply_without_turn_true_for_plain_context() {
let p = RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::RunCheckpoint,
appends: vec![],
context_appends: vec![ConversationContextAppend {
key: "k".into(),
content: CoreRenderable::Text { text: "ctx".into() },
}],
contributing_input_ids: vec![],
turn_metadata: Some(RuntimeTurnMetadata {
execution_kind: Some(RuntimeExecutionKind::ContentTurn),
..Default::default()
}),
});
assert!(p.is_context_only_apply_without_turn());
}
#[test]
fn terminal_peer_response_context_and_run_bypasses_context_only_shortcut() {
let p = RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::RunStart,
appends: vec![],
context_appends: vec![ConversationContextAppend {
key: "peer_response_terminal:analyst-rt:req-123".into(),
content: CoreRenderable::Text {
text: "[SYSTEM NOTICE][PEER_RESPONSE_TERMINAL] done".into(),
},
}],
contributing_input_ids: vec![InputId::new()],
turn_metadata: Some(RuntimeTurnMetadata {
execution_kind: Some(RuntimeExecutionKind::ContentTurn),
peer_response_terminal_apply_intent: Some(
PeerResponseTerminalApplyIntent::AppendContextAndRun,
),
..Default::default()
}),
});
assert!(p.is_peer_response_terminal_context_and_run());
assert_eq!(p.peer_response_terminal_apply_intent_violation(), None);
assert!(!p.is_context_only_apply_without_turn());
}
#[test]
fn non_staged_is_not_context_only() {
let p = RunPrimitive::ImmediateAppend(ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Text { text: "hi".into() },
});
assert!(!p.is_context_only_immediate());
}
#[test]
fn core_renderable_reference_serde() {
let r = CoreRenderable::Reference {
uri: "file:///tmp/a.txt".into(),
label: Some("a file".into()),
};
let json = serde_json::to_value(&r).unwrap();
assert_eq!(json["type"], "reference");
let parsed: CoreRenderable = serde_json::from_value(json).unwrap();
assert_eq!(r, parsed);
}
#[test]
fn execution_kind_serde_round_trip() {
for kind in [
RuntimeExecutionKind::ContentTurn,
RuntimeExecutionKind::ResumePending,
] {
let json = serde_json::to_value(kind).unwrap();
let parsed: RuntimeExecutionKind = serde_json::from_value(json.clone()).unwrap();
assert_eq!(kind, parsed);
}
assert_eq!(
serde_json::to_value(RuntimeExecutionKind::ContentTurn).unwrap(),
serde_json::Value::String("content_turn".into())
);
assert_eq!(
serde_json::to_value(RuntimeExecutionKind::ResumePending).unwrap(),
serde_json::Value::String("resume_pending".into())
);
}
#[test]
fn turn_metadata_execution_kind_defaults_to_none() {
let meta = RuntimeTurnMetadata::default();
assert_eq!(meta.execution_kind, None);
}
#[test]
fn turn_metadata_execution_kind_round_trips() {
let meta = RuntimeTurnMetadata {
execution_kind: Some(RuntimeExecutionKind::ContentTurn),
..Default::default()
};
let json = serde_json::to_value(&meta).unwrap();
assert_eq!(json["execution_kind"], "content_turn");
let parsed: RuntimeTurnMetadata = serde_json::from_value(json).unwrap();
assert_eq!(
parsed.execution_kind,
Some(RuntimeExecutionKind::ContentTurn)
);
}
#[test]
fn provider_params_override_projects_back_to_legacy_provider_json() {
let legacy = serde_json::json!({
"temperature": 0.2,
"thinking": { "budget_tokens": 10_000 },
"effort": "xhigh",
"web_search": null,
});
let params = ProviderParamsOverride::from_legacy_provider_value("anthropic", &legacy);
let projected = params.to_legacy_provider_value();
assert!(projected.get("provider_tag").is_none());
let temperature = projected["temperature"].as_f64().expect("temperature");
assert!(
(temperature - 0.2).abs() < 0.000_001,
"unexpected temperature: {temperature}"
);
assert_eq!(
projected["thinking"]["budget_tokens"],
serde_json::json!(10_000)
);
assert_eq!(projected["effort"], serde_json::json!("xhigh"));
assert!(
projected
.as_object()
.is_some_and(|obj| obj.contains_key("web_search")),
"explicit provider-native null must not be dropped"
);
assert!(projected["web_search"].is_null());
}
#[test]
fn turn_metadata_without_execution_kind_deserializes() {
let json = serde_json::json!({});
let parsed: RuntimeTurnMetadata = serde_json::from_value(json).unwrap();
assert_eq!(parsed.execution_kind, None);
}
#[test]
fn conversation_append_role_serde() {
for role in [
ConversationAppendRole::User,
ConversationAppendRole::Assistant,
ConversationAppendRole::SystemNotice,
ConversationAppendRole::Tool,
] {
let json = serde_json::to_value(role).unwrap();
let parsed: ConversationAppendRole = serde_json::from_value(json).unwrap();
assert_eq!(role, parsed);
}
}
#[test]
fn conversation_append_serde() {
let append = ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Text {
text: "hello".into(),
},
};
let json = serde_json::to_value(&append).unwrap();
let parsed: ConversationAppend = serde_json::from_value(json).unwrap();
assert_eq!(append, parsed);
}
#[test]
fn staged_run_input_serde() {
let staged = StagedRunInput {
boundary: RunApplyBoundary::RunStart,
appends: vec![ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Text {
text: "prompt".into(),
},
}],
context_appends: vec![],
contributing_input_ids: vec![InputId::new()],
turn_metadata: Some(RuntimeTurnMetadata {
keep_alive: Some(KeepAlivePolicy {
ttl: std::time::Duration::from_secs(30),
policy: KeepAliveMode::Pinned,
}),
..Default::default()
}),
};
let json = serde_json::to_value(&staged).unwrap();
let parsed: StagedRunInput = serde_json::from_value(json).unwrap();
assert_eq!(staged, parsed);
}
#[test]
fn run_primitive_staged_input_serde() {
let primitive = RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::RunStart,
appends: vec![],
context_appends: vec![],
contributing_input_ids: vec![InputId::new(), InputId::new()],
turn_metadata: None,
});
let json = serde_json::to_value(&primitive).unwrap();
assert_eq!(json["primitive_type"], "staged_input");
let parsed: RunPrimitive = serde_json::from_value(json).unwrap();
assert_eq!(primitive, parsed);
}
#[test]
fn run_primitive_immediate_append_serde() {
let primitive = RunPrimitive::ImmediateAppend(ConversationAppend {
role: ConversationAppendRole::SystemNotice,
content: CoreRenderable::Text {
text: "notice".into(),
},
});
let json = serde_json::to_value(&primitive).unwrap();
assert_eq!(json["primitive_type"], "immediate_append");
let parsed: RunPrimitive = serde_json::from_value(json).unwrap();
assert_eq!(primitive, parsed);
}
#[test]
fn run_primitive_contributing_input_ids() {
let ids = vec![InputId::new(), InputId::new()];
let primitive = RunPrimitive::StagedInput(StagedRunInput {
boundary: RunApplyBoundary::RunStart,
appends: vec![],
context_appends: vec![],
contributing_input_ids: ids.clone(),
turn_metadata: None,
});
assert_eq!(primitive.contributing_input_ids(), &ids);
let immediate = RunPrimitive::ImmediateAppend(ConversationAppend {
role: ConversationAppendRole::User,
content: CoreRenderable::Text { text: "hi".into() },
});
assert!(immediate.contributing_input_ids().is_empty());
}
#[test]
fn conversation_context_append_serde() {
let ctx = ConversationContextAppend {
key: "peers".into(),
content: CoreRenderable::Json {
value: serde_json::json!(["peer1", "peer2"]),
},
};
let json = serde_json::to_value(&ctx).unwrap();
let parsed: ConversationContextAppend = serde_json::from_value(json).unwrap();
assert_eq!(ctx, parsed);
}
#[test]
fn anthropic_from_legacy_value_projects_multi_key_blob() {
let v = serde_json::json!({
"thinking": {"type": "adaptive"},
"top_k": 40,
"effort": "max",
"inference_geo": "global",
"context": "1m",
"compaction": "auto",
"__meerkat_supports_temperature": true,
});
let tag = AnthropicProviderTag::from_legacy_value(&v).expect("projects");
assert_eq!(tag.thinking, Some(AnthropicThinkingConfig::Adaptive));
assert_eq!(tag.top_k, Some(40));
assert_eq!(tag.effort, Some(AnthropicEffort::Max));
assert_eq!(tag.inference_geo, Some(AnthropicInferenceGeo::Global));
assert_eq!(tag.context, Some(AnthropicContextWindow::OneMegabyte));
assert_eq!(tag.compaction, Some(AnthropicCompactionConfig::Auto));
assert_eq!(tag.supports_temperature_override, Some(true));
}
#[test]
fn anthropic_from_legacy_value_unknown_key_errs() {
let v = serde_json::json!({"unknown_key": 1});
let err = AnthropicProviderTag::from_legacy_value(&v).unwrap_err();
assert!(matches!(err, LegacyProviderParamsError::UnknownKey { .. }));
}
#[test]
fn openai_from_legacy_value_projects_reasoning_and_penalties() {
let v = serde_json::json!({
"reasoning_effort": "high",
"seed": 42,
"frequency_penalty": 0.5,
"presence_penalty": 0.3,
});
let tag = OpenAiProviderTag::from_legacy_value(&v).expect("projects");
assert_eq!(tag.reasoning_effort, Some(ReasoningEffort::High));
assert_eq!(tag.seed, Some(42));
assert!(matches!(tag.frequency_penalty, Some(v) if (v - 0.5).abs() < 1e-6));
assert!(matches!(tag.presence_penalty, Some(v) if (v - 0.3).abs() < 1e-6));
}
#[test]
fn gemini_from_legacy_value_projects_thinking_and_top_knobs() {
let v = serde_json::json!({
"thinking": {"include_thoughts": true, "thinking_budget": 8000},
"top_k": 40,
"top_p": 0.95,
});
let tag = GeminiProviderTag::from_legacy_value(&v).expect("projects");
let thinking = tag.thinking.expect("thinking present");
assert_eq!(thinking.include_thoughts, Some(true));
assert_eq!(thinking.thinking_budget, Some(8000));
assert_eq!(tag.top_k, Some(40));
assert!(matches!(tag.top_p, Some(v) if (v - 0.95).abs() < 1e-6));
}
#[test]
fn opaque_provider_body_round_trip() {
let v = serde_json::json!({"max_uses": 5, "allowed_domains": ["example.com"]});
let body = OpaqueProviderBody::from_value(&v);
assert_eq!(body.as_value(), v);
}
}