use serde_json::Value;
use crate::core::providers::unified_provider::ProviderError;
use crate::core::types::thinking::{
ThinkingCapabilities, ThinkingConfig, ThinkingContent, ThinkingEffort, ThinkingUsage,
};
pub mod openai_thinking {
use super::*;
const OPENAI_THINKING_MODELS: &[&str] = &[
"o1",
"o1-preview",
"o1-mini",
"o3",
"o3-mini",
"o4",
"o4-mini",
];
pub fn supports_thinking(model: &str) -> bool {
let model_lower = model.to_lowercase();
OPENAI_THINKING_MODELS
.iter()
.any(|m| model_lower.starts_with(m) || model_lower.contains(&format!("/{}", m)))
}
pub fn capabilities(model: &str) -> ThinkingCapabilities {
if supports_thinking(model) {
ThinkingCapabilities {
supports_thinking: true,
supports_streaming_thinking: false, max_thinking_tokens: Some(20_000),
supported_efforts: vec![
ThinkingEffort::Low,
ThinkingEffort::Medium,
ThinkingEffort::High,
],
thinking_models: OPENAI_THINKING_MODELS
.iter()
.map(|s| s.to_string())
.collect(),
can_return_thinking: true,
thinking_always_on: false,
}
} else {
ThinkingCapabilities::unsupported()
}
}
pub fn transform_config(config: &ThinkingConfig, _model: &str) -> Result<Value, ProviderError> {
let mut params = serde_json::Map::new();
if let Some(budget) = config.budget_tokens {
let capped = budget.min(20_000);
params.insert("max_reasoning_tokens".into(), capped.into());
}
if config.include_thinking {
params.insert("include_reasoning".into(), true.into());
}
if let Some(effort) = &config.effort {
let effort_str = match effort {
ThinkingEffort::Low => "low",
ThinkingEffort::Medium => "medium",
ThinkingEffort::High => "high",
};
params.insert("reasoning_effort".into(), effort_str.into());
}
Ok(Value::Object(params))
}
pub fn extract_thinking(response: &Value) -> Option<ThinkingContent> {
response
.pointer("/choices/0/message/reasoning")
.and_then(|v| v.as_str())
.map(|text| ThinkingContent::Text {
text: text.to_string(),
signature: None,
})
}
pub fn extract_usage(response: &Value) -> Option<ThinkingUsage> {
response
.pointer("/usage/reasoning_tokens")
.map(|tokens| ThinkingUsage {
thinking_tokens: tokens.as_u64().map(|t| t as u32),
budget_tokens: None,
thinking_cost: None,
provider: Some("openai".to_string()),
})
}
}
pub mod anthropic_thinking {
use super::*;
const ANTHROPIC_THINKING_MODELS: &[&str] = &[
"claude-3-opus",
"claude-3-sonnet",
"claude-3-haiku",
"claude-3-5-sonnet",
"claude-3-5-opus",
"claude-4",
];
pub fn supports_thinking(model: &str) -> bool {
let model_lower = model.to_lowercase();
ANTHROPIC_THINKING_MODELS
.iter()
.any(|m| model_lower.contains(m))
}
pub fn capabilities(model: &str) -> ThinkingCapabilities {
if supports_thinking(model) {
ThinkingCapabilities {
supports_thinking: true,
supports_streaming_thinking: true, max_thinking_tokens: Some(100_000), supported_efforts: vec![ThinkingEffort::Medium, ThinkingEffort::High],
thinking_models: ANTHROPIC_THINKING_MODELS
.iter()
.map(|s| s.to_string())
.collect(),
can_return_thinking: true,
thinking_always_on: false,
}
} else {
ThinkingCapabilities::unsupported()
}
}
pub fn transform_config(config: &ThinkingConfig, _model: &str) -> Result<Value, ProviderError> {
let mut params = serde_json::Map::new();
if config.enabled {
let mut thinking = serde_json::Map::new();
thinking.insert("type".into(), "enabled".into());
if let Some(budget) = config.budget_tokens {
thinking.insert("budget_tokens".into(), budget.into());
}
params.insert("thinking".into(), Value::Object(thinking));
}
Ok(Value::Object(params))
}
pub fn extract_thinking(response: &Value) -> Option<ThinkingContent> {
response
.pointer("/content")
.and_then(|v| v.as_array())
.and_then(|blocks| {
blocks.iter().find_map(|block| {
if block.get("type")?.as_str()? == "thinking" {
Some(ThinkingContent::Block {
thinking: block.get("thinking")?.as_str()?.to_string(),
block_type: Some("thinking".to_string()),
})
} else {
None
}
})
})
}
pub fn extract_usage(response: &Value) -> Option<ThinkingUsage> {
let thinking_tokens = response
.pointer("/usage/thinking_tokens")
.and_then(|v| v.as_u64())
.map(|t| t as u32);
if thinking_tokens.is_some() {
Some(ThinkingUsage {
thinking_tokens,
budget_tokens: response
.pointer("/usage/thinking_budget_tokens")
.and_then(|v| v.as_u64())
.map(|t| t as u32),
thinking_cost: None,
provider: Some("anthropic".to_string()),
})
} else {
None
}
}
}
pub mod deepseek_thinking {
use super::*;
const DEEPSEEK_THINKING_MODELS: &[&str] = &["deepseek-r1", "deepseek-reasoner", "r1"];
pub fn supports_thinking(model: &str) -> bool {
let model_lower = model.to_lowercase();
DEEPSEEK_THINKING_MODELS
.iter()
.any(|m| model_lower.contains(m))
}
pub fn capabilities(model: &str) -> ThinkingCapabilities {
if supports_thinking(model) {
ThinkingCapabilities {
supports_thinking: true,
supports_streaming_thinking: true,
max_thinking_tokens: None, supported_efforts: vec![
ThinkingEffort::Low,
ThinkingEffort::Medium,
ThinkingEffort::High,
],
thinking_models: DEEPSEEK_THINKING_MODELS
.iter()
.map(|s| s.to_string())
.collect(),
can_return_thinking: true,
thinking_always_on: true, }
} else {
ThinkingCapabilities::unsupported()
}
}
pub fn transform_config(config: &ThinkingConfig, _model: &str) -> Result<Value, ProviderError> {
let mut params = serde_json::Map::new();
if let Some(effort) = &config.effort {
let effort_str = match effort {
ThinkingEffort::Low => "low",
ThinkingEffort::Medium => "medium",
ThinkingEffort::High => "high",
};
params.insert("reasoning_effort".into(), effort_str.into());
}
Ok(Value::Object(params))
}
pub fn extract_thinking(response: &Value) -> Option<ThinkingContent> {
response
.pointer("/choices/0/message/reasoning_content")
.and_then(|v| v.as_str())
.map(|text| ThinkingContent::Text {
text: text.to_string(),
signature: None,
})
}
pub fn extract_usage(response: &Value) -> Option<ThinkingUsage> {
response
.pointer("/usage/reasoning_tokens")
.map(|tokens| ThinkingUsage {
thinking_tokens: tokens.as_u64().map(|t| t as u32),
budget_tokens: None,
thinking_cost: None,
provider: Some("deepseek".to_string()),
})
}
}
pub mod gemini_thinking {
use super::*;
const GEMINI_THINKING_MODELS: &[&str] = &[
"gemini-2.0-flash-thinking",
"gemini-3.0-deep-think",
"gemini-thinking",
];
pub fn supports_thinking(model: &str) -> bool {
let model_lower = model.to_lowercase();
GEMINI_THINKING_MODELS
.iter()
.any(|m| model_lower.contains(m))
|| model_lower.contains("thinking")
|| model_lower.contains("deep-think")
}
pub fn capabilities(model: &str) -> ThinkingCapabilities {
if supports_thinking(model) {
ThinkingCapabilities {
supports_thinking: true,
supports_streaming_thinking: true,
max_thinking_tokens: Some(32_000),
supported_efforts: vec![ThinkingEffort::Medium, ThinkingEffort::High],
thinking_models: GEMINI_THINKING_MODELS
.iter()
.map(|s| s.to_string())
.collect(),
can_return_thinking: true,
thinking_always_on: false,
}
} else {
ThinkingCapabilities::unsupported()
}
}
pub fn transform_config(config: &ThinkingConfig, _model: &str) -> Result<Value, ProviderError> {
let mut params = serde_json::Map::new();
if config.enabled {
params.insert("enableThinking".into(), true.into());
if let Some(budget) = config.budget_tokens {
params.insert("thinkingBudget".into(), budget.into());
}
}
Ok(Value::Object(params))
}
pub fn extract_thinking(response: &Value) -> Option<ThinkingContent> {
response
.pointer("/candidates/0/content/thoughts")
.and_then(|v| v.as_str())
.map(|text| ThinkingContent::Text {
text: text.to_string(),
signature: None,
})
.or_else(|| {
response
.pointer("/candidates/0/content/thinking")
.and_then(|v| v.as_str())
.map(|text| ThinkingContent::Text {
text: text.to_string(),
signature: None,
})
})
}
pub fn extract_usage(response: &Value) -> Option<ThinkingUsage> {
response
.pointer("/usageMetadata/thinkingTokenCount")
.map(|tokens| ThinkingUsage {
thinking_tokens: tokens.as_u64().map(|t| t as u32),
budget_tokens: None,
thinking_cost: None,
provider: Some("gemini".to_string()),
})
}
}
pub mod openrouter_thinking {
use super::*;
pub fn supports_thinking(model: &str) -> bool {
let model_lower = model.to_lowercase();
if model_lower.contains("o1") || model_lower.contains("o3") || model_lower.contains("o4") {
return true;
}
if model_lower.contains("claude") {
return true;
}
if model_lower.contains("deepseek-r1") || model_lower.contains("reasoner") {
return true;
}
if model_lower.contains("gemini") && model_lower.contains("thinking") {
return true;
}
false
}
pub fn detect_provider(model: &str) -> &'static str {
let model_lower = model.to_lowercase();
if model_lower.contains("openai")
|| model_lower.starts_with("o1")
|| model_lower.starts_with("o3")
|| model_lower.starts_with("o4")
{
"openai"
} else if model_lower.contains("anthropic") || model_lower.contains("claude") {
"anthropic"
} else if model_lower.contains("deepseek") {
"deepseek"
} else if model_lower.contains("gemini") || model_lower.contains("google") {
"gemini"
} else {
"unknown"
}
}
pub fn capabilities(model: &str) -> ThinkingCapabilities {
match detect_provider(model) {
"openai" => openai_thinking::capabilities(model),
"anthropic" => anthropic_thinking::capabilities(model),
"deepseek" => deepseek_thinking::capabilities(model),
"gemini" => gemini_thinking::capabilities(model),
_ => ThinkingCapabilities::unsupported(),
}
}
pub fn transform_config(config: &ThinkingConfig, model: &str) -> Result<Value, ProviderError> {
match detect_provider(model) {
"openai" => openai_thinking::transform_config(config, model),
"anthropic" => anthropic_thinking::transform_config(config, model),
"deepseek" => deepseek_thinking::transform_config(config, model),
"gemini" => gemini_thinking::transform_config(config, model),
_ => {
let mut params = serde_json::Map::new();
if let Some(effort) = &config.effort {
let effort_str = match effort {
ThinkingEffort::Low => "low",
ThinkingEffort::Medium => "medium",
ThinkingEffort::High => "high",
};
let mut reasoning = serde_json::Map::new();
reasoning.insert("effort".into(), effort_str.into());
params.insert("reasoning".into(), Value::Object(reasoning));
} else if let Some(budget) = config.budget_tokens {
let mut reasoning = serde_json::Map::new();
reasoning.insert("max_tokens".into(), budget.into());
params.insert("reasoning".into(), Value::Object(reasoning));
}
Ok(Value::Object(params))
}
}
}
pub fn extract_thinking(response: &Value) -> Option<ThinkingContent> {
if let Some(thinking) = openai_thinking::extract_thinking(response) {
return Some(thinking);
}
if let Some(thinking) = deepseek_thinking::extract_thinking(response) {
return Some(thinking);
}
if let Some(thinking) = anthropic_thinking::extract_thinking(response) {
return Some(thinking);
}
if let Some(thinking) = gemini_thinking::extract_thinking(response) {
return Some(thinking);
}
None
}
pub fn extract_usage(response: &Value) -> Option<ThinkingUsage> {
if let Some(mut usage) = openai_thinking::extract_usage(response) {
usage.provider = Some("openrouter".to_string());
return Some(usage);
}
if let Some(mut usage) = deepseek_thinking::extract_usage(response) {
usage.provider = Some("openrouter".to_string());
return Some(usage);
}
if let Some(mut usage) = anthropic_thinking::extract_usage(response) {
usage.provider = Some("openrouter".to_string());
return Some(usage);
}
if let Some(mut usage) = gemini_thinking::extract_usage(response) {
usage.provider = Some("openrouter".to_string());
return Some(usage);
}
None
}
}