use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::collections::HashMap;
fn map_is_empty(value: &HashMap<String, serde_json::Value>) -> bool {
value.is_empty()
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct ChatMessage {
pub role: MessageRole,
pub content: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum MessageRole {
System,
User,
Assistant,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum OpenAIMessageRole {
System,
User,
Assistant,
Tool,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum OpenAIMessageContent {
Text(String),
Parts(Vec<OpenAIContentPart>),
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum OpenAIContentPart {
Text {
text: String,
},
ImageUrl {
image_url: OpenAIImageUrl,
},
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct OpenAIImageUrl {
pub url: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub detail: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct OpenAIFunctionCall {
pub name: String,
pub arguments: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct OpenAIToolCall {
pub id: String,
pub r#type: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub extra_content: Option<serde_json::Value>,
pub function: OpenAIFunctionCall,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct OpenAIChatMessage {
pub role: OpenAIMessageRole,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<OpenAIMessageContent>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_calls: Option<Vec<OpenAIToolCall>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_call_id: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
#[serde(rename_all = "lowercase")]
pub enum ResearchProvider {
#[default]
Exa,
Tavily,
Auto,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
#[serde(rename_all = "lowercase")]
pub enum ResearchDepth {
#[default]
Basic,
Advanced,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionRequest {
pub model: String,
pub messages: Vec<ChatMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stop: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub user: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logit_bias: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logprobs: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_logprobs: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub n: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub response_format: Option<ResponseFormat>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<Vec<Tool>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<ToolChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking_config: Option<ThinkingConfig>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIChatCompletionRequest {
pub model: String,
pub messages: Vec<OpenAIChatMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stop: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub user: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logit_bias: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logprobs: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_logprobs: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub n: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub response_format: Option<ResponseFormat>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<Vec<Tool>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<ToolChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking_config: Option<ThinkingConfig>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionResponse {
pub id: String,
pub object: String,
pub created: u64,
pub model: String,
pub choices: Vec<ChatChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub usage: Option<Usage>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIChatCompletionResponse {
pub id: String,
pub object: String,
pub created: u64,
pub model: String,
pub choices: Vec<OpenAIChatChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub usage: Option<Usage>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionChunk {
pub id: String,
pub object: String,
pub created: u64,
pub model: String,
pub choices: Vec<ChatCompletionChunkChoice>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionChunkChoice {
pub index: u32,
pub delta: ChatCompletionChunkDelta,
#[serde(skip_serializing_if = "Option::is_none")]
pub finish_reason: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionChunkDelta {
#[serde(skip_serializing_if = "Option::is_none")]
pub role: Option<MessageRole>,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thought: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatChoice {
pub index: u32,
pub message: ChatMessage,
pub finish_reason: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIChatChoice {
pub index: u32,
pub message: OpenAIChatMessage,
pub finish_reason: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Usage {
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub total_tokens: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HealthStatus {
pub status: String,
pub timestamp: String,
pub uptime: f64,
pub services: ServiceStatus,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServiceStatus {
pub database: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub redis: Option<bool>,
pub providers: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct AvailableModels {
#[serde(default)]
pub providers: HashMap<String, Vec<String>>,
#[serde(default)]
pub total_models: usize,
#[serde(default)]
pub active_providers: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreditInfo {
pub current_credits: f64,
pub estimated_cost: f64,
pub credits_after_request: f64,
pub reset_date: String,
}
#[derive(Debug, Clone)]
pub struct RequestMetadata {
pub response_time: Option<u64>,
pub provider: Option<String>,
pub tokens_used: Option<u32>,
pub credits_used: Option<f64>,
pub credits_remaining: Option<f64>,
pub request_id: Option<String>,
pub compat_warnings: Option<u32>,
pub response_mode: Option<String>,
pub billing_plan: Option<String>,
pub rainy_credits_charged: Option<f64>,
pub rainy_markup_percent: Option<f64>,
pub rainy_daily_credits_remaining: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ResponsesRequest {
pub model: String,
pub input: serde_json::Value,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<Vec<serde_json::Value>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub response_format: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_output_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub user: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prompt_cache_key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning: Option<serde_json::Value>,
#[serde(flatten, skip_serializing_if = "map_is_empty", default)]
pub extra: HashMap<String, serde_json::Value>,
}
impl ResponsesRequest {
pub fn new(model: impl Into<String>, input: serde_json::Value) -> Self {
Self {
model: model.into(),
input,
stream: None,
tools: None,
tool_choice: None,
response_format: None,
temperature: None,
top_p: None,
max_output_tokens: None,
user: None,
prompt_cache_key: None,
reasoning: None,
extra: HashMap::new(),
}
}
pub fn text(model: impl Into<String>, input_text: impl Into<String>) -> Self {
Self::new(model, serde_json::Value::String(input_text.into()))
}
pub fn with_stream(mut self, stream: bool) -> Self {
self.stream = Some(stream);
self
}
pub fn with_reasoning(mut self, reasoning: serde_json::Value) -> Self {
self.reasoning = Some(reasoning);
self
}
pub fn with_reasoning_effort(mut self, effort: impl Into<String>) -> Self {
self.reasoning = Some(serde_json::json!({ "effort": effort.into() }));
self
}
pub fn with_max_output_tokens(mut self, max_output_tokens: u32) -> Self {
self.max_output_tokens = Some(max_output_tokens);
self
}
pub fn with_prompt_cache_key(mut self, prompt_cache_key: impl Into<String>) -> Self {
self.prompt_cache_key = Some(prompt_cache_key.into());
self
}
pub fn with_user(mut self, user: impl Into<String>) -> Self {
self.user = Some(user.into());
self
}
pub fn with_tools(mut self, tools: Vec<serde_json::Value>) -> Self {
self.tools = Some(tools);
self
}
pub fn add_function_tool(
mut self,
name: impl Into<String>,
description: impl Into<String>,
parameters: serde_json::Value,
) -> Self {
let mut tools = self.tools.unwrap_or_default();
tools.push(serde_json::json!({
"type": "function",
"name": name.into(),
"description": description.into(),
"parameters": parameters
}));
self.tools = Some(tools);
self
}
pub fn with_extra(mut self, key: impl Into<String>, value: serde_json::Value) -> Self {
self.extra.insert(key.into(), value);
self
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ResponsesUsage {
#[serde(skip_serializing_if = "Option::is_none")]
pub input_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub output_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cache_creation_input_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cache_read_input_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub output_tokens_details: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub completion_tokens_details: Option<serde_json::Value>,
#[serde(flatten, default)]
pub extra: HashMap<String, serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ResponsesApiResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub object: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub model: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub output_text: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub output: Option<Vec<serde_json::Value>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub usage: Option<ResponsesUsage>,
#[serde(flatten, default)]
pub extra: HashMap<String, serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompatWarning {
pub code: String,
pub message: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FeaturesUsed {
pub reasoning: bool,
#[serde(rename = "imageInput")]
pub image_input: bool,
pub tools: bool,
#[serde(rename = "structuredOutput")]
pub structured_output: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReasoningMeta {
pub present: bool,
pub summary_present: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub tokens: Option<u32>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct RainyEnvelopeMeta {
#[serde(
rename = "billingPlan",
alias = "billing_plan",
skip_serializing_if = "Option::is_none"
)]
pub billing_plan: Option<String>,
#[serde(
rename = "creditsCharged",
alias = "credits_charged",
skip_serializing_if = "Option::is_none"
)]
pub credits_charged: Option<f64>,
#[serde(
rename = "markupPercent",
alias = "markup_percent",
skip_serializing_if = "Option::is_none"
)]
pub markup_percent: Option<f64>,
#[serde(
rename = "dailyCreditsRemaining",
alias = "daily_credits_remaining",
skip_serializing_if = "Option::is_none"
)]
pub daily_credits_remaining: Option<String>,
#[serde(
rename = "compatWarnings",
alias = "compat_warnings",
skip_serializing_if = "Option::is_none"
)]
pub compat_warnings: Option<Vec<CompatWarning>>,
#[serde(
rename = "featuresUsed",
alias = "features_used",
skip_serializing_if = "Option::is_none"
)]
pub features_used: Option<FeaturesUsed>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning: Option<ReasoningMeta>,
#[serde(flatten, default)]
pub extra: HashMap<String, serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RainyEnvelope<T> {
pub success: bool,
pub data: T,
#[serde(skip_serializing_if = "Option::is_none")]
pub meta: Option<RainyEnvelopeMeta>,
}
pub type ResponsesStreamEvent = serde_json::Value;
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ModelArchitecture {
#[serde(skip_serializing_if = "Option::is_none")]
pub input_modalities: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub output_modalities: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tokenizer: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub instruct_type: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CapabilityFlag {
Bool(bool),
Text(String),
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct RainyCapabilities {
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning: Option<CapabilityFlag>,
#[serde(skip_serializing_if = "Option::is_none")]
pub image_input: Option<CapabilityFlag>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<CapabilityFlag>,
#[serde(skip_serializing_if = "Option::is_none")]
pub response_format: Option<CapabilityFlag>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum ReasoningProvider {
Openai,
Google,
Anthropic,
Other,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct ThinkingBudget {
pub min: i32,
pub max: i32,
#[serde(skip_serializing_if = "Option::is_none")]
pub dynamic_value: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub disable_value: Option<i32>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct ReasoningControls {
#[serde(skip_serializing_if = "Option::is_none")]
pub observed_parameters: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning_toggle: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning_effort: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub effort: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking_level: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking_budget: Option<ThinkingBudget>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct ReasoningProfile {
pub provider: ReasoningProvider,
pub parameter_path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub values: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub notes: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct ReasoningToggle {
#[serde(skip_serializing_if = "Option::is_none")]
pub enable_param: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include_reasoning_param: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct RainyReasoningCapabilitiesV2 {
pub supported: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub controls: Option<ReasoningControls>,
#[serde(default)]
pub profiles: Vec<ReasoningProfile>,
#[serde(skip_serializing_if = "Option::is_none")]
pub toggle: Option<ReasoningToggle>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct RainyMultimodalCapabilitiesV2 {
#[serde(default)]
pub input: Vec<String>,
#[serde(default)]
pub output: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct RainyParametersCapabilitiesV2 {
#[serde(default)]
pub accepted: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct RainyCapabilitiesV2 {
pub multimodal: RainyMultimodalCapabilitiesV2,
pub reasoning: RainyReasoningCapabilitiesV2,
pub parameters: RainyParametersCapabilitiesV2,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct ModelPricing {
#[serde(skip_serializing_if = "Option::is_none")]
pub prompt: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub completion: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ModelCatalogItem {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub context_length: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pricing: Option<ModelPricing>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supported_parameters: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub architecture: Option<ModelArchitecture>,
#[serde(skip_serializing_if = "Option::is_none")]
pub rainy_capabilities: Option<RainyCapabilities>,
#[serde(skip_serializing_if = "Option::is_none")]
pub rainy_capabilities_v2: Option<RainyCapabilitiesV2>,
#[serde(flatten, default)]
pub extra: HashMap<String, serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ReasoningMode {
Effort,
ThinkingLevel,
ThinkingBudget,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
pub struct ModelSelectionCriteria {
#[serde(default)]
pub required_input_modalities: Vec<String>,
#[serde(default)]
pub required_output_modalities: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub require_tools: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub require_structured_output: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning_mode: Option<ReasoningMode>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning_value: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct ReasoningPreference {
pub mode: ReasoningMode,
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub budget: Option<i32>,
}
fn parse_price(value: Option<&str>) -> f64 {
value
.and_then(|raw| raw.parse::<f64>().ok())
.filter(|v| v.is_finite())
.unwrap_or(f64::MAX)
}
fn has_required_modalities(available: &[String], required: &[String]) -> bool {
if required.is_empty() {
return true;
}
required.iter().all(|modality| {
available
.iter()
.any(|candidate| candidate.eq_ignore_ascii_case(modality))
})
}
fn supports_reasoning_preference(
capabilities: &RainyCapabilitiesV2,
mode: &ReasoningMode,
reasoning_value: Option<&str>,
) -> bool {
if !capabilities.reasoning.supported {
return false;
}
let controls = capabilities.reasoning.controls.as_ref();
match mode {
ReasoningMode::Effort => controls
.map(|c| {
c.reasoning_effort == Some(true) || c.effort.as_ref().is_some_and(|v| !v.is_empty())
})
.filter(|supported| *supported)
.map(|_| {
controls
.and_then(|c| c.effort.as_ref())
.map(|values| {
reasoning_value.is_none_or(|value| {
values
.iter()
.any(|candidate| candidate.eq_ignore_ascii_case(value))
})
})
.unwrap_or(reasoning_value.is_none())
})
.unwrap_or(false),
ReasoningMode::ThinkingLevel => controls
.and_then(|c| c.thinking_level.as_ref())
.map(|values| {
reasoning_value.is_none_or(|value| {
values
.iter()
.any(|candidate| candidate.eq_ignore_ascii_case(value))
})
})
.unwrap_or(false),
ReasoningMode::ThinkingBudget => {
controls.and_then(|c| c.thinking_budget.as_ref()).is_some()
}
}
}
fn catalog_item_supports(item: &ModelCatalogItem, parameter: &str) -> bool {
if let Some(v2) = &item.rainy_capabilities_v2 {
return v2
.parameters
.accepted
.iter()
.any(|candidate| candidate == parameter);
}
item.supported_parameters
.as_ref()
.map(|params| params.iter().any(|candidate| candidate == parameter))
.unwrap_or(false)
}
pub fn select_models(
models: &[ModelCatalogItem],
criteria: &ModelSelectionCriteria,
) -> Vec<ModelCatalogItem> {
let required_inputs: Vec<String> = criteria
.required_input_modalities
.iter()
.map(|v| v.to_lowercase())
.collect();
let required_outputs: Vec<String> = criteria
.required_output_modalities
.iter()
.map(|v| v.to_lowercase())
.collect();
let mut filtered: Vec<ModelCatalogItem> = models
.iter()
.filter(|item| {
let Some(v2) = item.rainy_capabilities_v2.as_ref() else {
return false;
};
let input: Vec<String> = v2
.multimodal
.input
.iter()
.map(|v| v.to_lowercase())
.collect();
let output: Vec<String> = v2
.multimodal
.output
.iter()
.map(|v| v.to_lowercase())
.collect();
if !has_required_modalities(&input, &required_inputs) {
return false;
}
if !has_required_modalities(&output, &required_outputs) {
return false;
}
if criteria.require_tools == Some(true) && !catalog_item_supports(item, "tools") {
return false;
}
if criteria.require_structured_output == Some(true)
&& !catalog_item_supports(item, "response_format")
&& !catalog_item_supports(item, "structured_outputs")
{
return false;
}
if let Some(mode) = &criteria.reasoning_mode {
let reasoning_value = criteria.reasoning_value.as_deref();
if !supports_reasoning_preference(v2, mode, reasoning_value) {
return false;
}
}
true
})
.cloned()
.collect();
filtered.sort_by(|a, b| {
let a_prompt = parse_price(a.pricing.as_ref().and_then(|p| p.prompt.as_deref()));
let b_prompt = parse_price(b.pricing.as_ref().and_then(|p| p.prompt.as_deref()));
let prompt_cmp = a_prompt.partial_cmp(&b_prompt).unwrap_or(Ordering::Equal);
if prompt_cmp != Ordering::Equal {
return prompt_cmp;
}
let a_completion = parse_price(a.pricing.as_ref().and_then(|p| p.completion.as_deref()));
let b_completion = parse_price(b.pricing.as_ref().and_then(|p| p.completion.as_deref()));
let completion_cmp = a_completion
.partial_cmp(&b_completion)
.unwrap_or(Ordering::Equal);
if completion_cmp != Ordering::Equal {
return completion_cmp;
}
let a_context = a.context_length.unwrap_or_default();
let b_context = b.context_length.unwrap_or_default();
b_context.cmp(&a_context)
});
filtered
}
pub fn build_reasoning_config(
model: &ModelCatalogItem,
preference: &ReasoningPreference,
) -> Option<serde_json::Value> {
let v2 = model.rainy_capabilities_v2.as_ref()?;
if !v2.reasoning.supported {
return None;
}
let profiles = &v2.reasoning.profiles;
let controls = v2.reasoning.controls.as_ref();
match preference.mode {
ReasoningMode::Effort => {
let value = preference.value.clone()?;
let supports_effort = controls
.map(|c| {
c.reasoning_effort == Some(true)
|| c.effort.as_ref().is_some_and(|v| !v.is_empty())
})
.unwrap_or(false);
if !supports_effort {
return None;
}
if let Some(efforts) = controls.and_then(|c| c.effort.as_ref()) {
if !efforts.iter().any(|v| v.eq_ignore_ascii_case(&value)) {
return None;
}
}
let effort_profile = profiles
.iter()
.find(|p| p.parameter_path == "reasoning.effort")?;
match effort_profile.parameter_path.as_str() {
"reasoning.effort" => Some(serde_json::json!({
"reasoning": { "effort": value }
})),
_ => None,
}
}
ReasoningMode::ThinkingLevel => {
let value = preference.value.clone()?;
let supports = controls
.and_then(|c| c.thinking_level.as_ref())
.map(|levels| levels.iter().any(|v| v.eq_ignore_ascii_case(&value)))
.unwrap_or(false);
if !supports {
return None;
}
let level_profile = profiles
.iter()
.find(|p| p.parameter_path == "thinking_config.thinking_level")?;
if let Some(values) = &level_profile.values {
if !values.iter().any(|v| v.eq_ignore_ascii_case(&value)) {
return None;
}
}
Some(serde_json::json!({
"thinking_config": { "thinking_level": value }
}))
}
ReasoningMode::ThinkingBudget => {
let budget = preference.budget?;
let supports = controls.and_then(|c| c.thinking_budget.as_ref())?;
if budget < supports.min || budget > supports.max {
return None;
}
let budget_profile = profiles.iter().find(|p| {
p.parameter_path == "thinking.budget_tokens"
|| p.parameter_path == "thinking_config.thinking_budget"
})?;
if budget_profile.parameter_path == "thinking.budget_tokens" {
return Some(serde_json::json!({
"thinking": { "budget_tokens": budget }
}));
}
if budget_profile.parameter_path == "thinking_config.thinking_budget" {
return Some(serde_json::json!({
"thinking_config": { "thinking_budget": budget }
}));
}
None
}
}
}
pub mod model_constants {
pub const OPENAI_GPT_4O: &str = "gpt-4o";
pub const OPENAI_GPT_5: &str = "gpt-5";
pub const OPENAI_GPT_5_PRO: &str = "gpt-5-pro";
pub const OPENAI_O3: &str = "o3";
pub const OPENAI_O4_MINI: &str = "o4-mini";
pub const GOOGLE_GEMINI_2_5_PRO: &str = "gemini-2.5-pro";
pub const GOOGLE_GEMINI_2_5_FLASH: &str = "gemini-2.5-flash";
pub const GOOGLE_GEMINI_2_5_FLASH_LITE: &str = "gemini-2.5-flash-lite";
pub const GOOGLE_GEMINI_3_PRO: &str = "gemini-3-pro-preview";
pub const GOOGLE_GEMINI_3_FLASH: &str = "gemini-3-flash-preview";
pub const GOOGLE_GEMINI_3_PRO_IMAGE: &str = "gemini-3-pro-image-preview";
pub const GROQ_LLAMA_3_1_8B_INSTANT: &str = "llama-3.1-8b-instant";
pub const GROQ_LLAMA_3_3_70B_VERSATILE: &str = "llama-3.3-70b-versatile";
pub const KIMI_K2_0925: &str = "moonshotai/kimi-k2-instruct-0905";
pub const CEREBRAS_LLAMA3_1_8B: &str = "cerebras/llama3.1-8b";
pub const ASTRONOMER_1: &str = "astronomer-1";
pub const ASTRONOMER_1_MAX: &str = "astronomer-1-max";
pub const ASTRONOMER_1_5: &str = "astronomer-1.5";
pub const ASTRONOMER_2: &str = "astronomer-2";
pub const ASTRONOMER_2_PRO: &str = "astronomer-2-pro";
#[deprecated(note = "Use OPENAI_GPT_4O instead for OpenAI compatibility")]
pub const GPT_4O: &str = "openai/gpt-4o";
#[deprecated(note = "Use OPENAI_GPT_5 instead for OpenAI compatibility")]
pub const GPT_5: &str = "openai/gpt-5";
#[deprecated(note = "Use GOOGLE_GEMINI_2_5_PRO instead for OpenAI compatibility")]
pub const GEMINI_2_5_PRO: &str = "google/gemini-2.5-pro";
#[deprecated(note = "Use GOOGLE_GEMINI_2_5_FLASH instead for OpenAI compatibility")]
pub const GEMINI_2_5_FLASH: &str = "google/gemini-2.5-flash";
#[deprecated(note = "Use GOOGLE_GEMINI_2_5_FLASH_LITE instead for OpenAI compatibility")]
pub const GEMINI_2_5_FLASH_LITE: &str = "google/gemini-2.5-flash-lite";
#[deprecated(note = "Use GROQ_LLAMA_3_1_8B_INSTANT instead for OpenAI compatibility")]
pub const LLAMA_3_1_8B_INSTANT: &str = "groq/llama-3.1-8b-instant";
#[deprecated(note = "Use CEREBRAS_LLAMA3_1_8B instead for OpenAI compatibility")]
pub const LLAMA3_1_8B: &str = "cerebras/llama3.1-8b";
}
pub mod providers {
pub const OPENAI: &str = "openai";
pub const ANTHROPIC: &str = "anthropic";
pub const GROQ: &str = "groq";
pub const CEREBRAS: &str = "cerebras";
pub const GEMINI: &str = "gemini";
pub const ENOSISLABS: &str = "enosislabs";
}
impl ChatCompletionRequest {
pub fn new(model: impl Into<String>, messages: Vec<ChatMessage>) -> Self {
Self {
model: model.into(),
messages,
temperature: None,
max_tokens: None,
top_p: None,
frequency_penalty: None,
presence_penalty: None,
stop: None,
user: None,
provider: None,
stream: None,
logit_bias: None,
logprobs: None,
top_logprobs: None,
n: None,
response_format: None,
tools: None,
tool_choice: None,
thinking_config: None,
}
}
pub fn with_temperature(mut self, temperature: f32) -> Self {
self.temperature = Some(temperature.clamp(0.0, 2.0));
self
}
pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
self.max_tokens = Some(max_tokens);
self
}
pub fn with_user(mut self, user: impl Into<String>) -> Self {
self.user = Some(user.into());
self
}
pub fn with_provider(mut self, provider: impl Into<String>) -> Self {
self.provider = Some(provider.into());
self
}
pub fn with_stream(mut self, stream: bool) -> Self {
self.stream = Some(stream);
self
}
pub fn with_logit_bias(mut self, logit_bias: serde_json::Value) -> Self {
self.logit_bias = Some(logit_bias);
self
}
pub fn with_logprobs(mut self, logprobs: bool) -> Self {
self.logprobs = Some(logprobs);
self
}
pub fn with_top_logprobs(mut self, top_logprobs: u32) -> Self {
self.top_logprobs = Some(top_logprobs);
self
}
pub fn with_n(mut self, n: u32) -> Self {
self.n = Some(n);
self
}
pub fn with_response_format(mut self, response_format: ResponseFormat) -> Self {
self.response_format = Some(response_format);
self
}
pub fn with_tools(mut self, tools: Vec<Tool>) -> Self {
self.tools = Some(tools);
self
}
pub fn with_tool_choice(mut self, tool_choice: ToolChoice) -> Self {
self.tool_choice = Some(tool_choice);
self
}
pub fn with_thinking_config(mut self, thinking_config: ThinkingConfig) -> Self {
self.thinking_config = Some(thinking_config);
self
}
pub fn with_include_thoughts(mut self, include_thoughts: bool) -> Self {
let mut config = self.thinking_config.unwrap_or_default();
config.include_thoughts = Some(include_thoughts);
self.thinking_config = Some(config);
self
}
pub fn with_thinking_level(mut self, thinking_level: ThinkingLevel) -> Self {
let mut config = self.thinking_config.unwrap_or_default();
config.thinking_level = Some(thinking_level);
self.thinking_config = Some(config);
self
}
pub fn with_thinking_budget(mut self, thinking_budget: i32) -> Self {
let mut config = self.thinking_config.unwrap_or_default();
config.thinking_budget = Some(thinking_budget);
self.thinking_config = Some(config);
self
}
pub fn validate_openai_compatibility(&self) -> Result<(), String> {
if let Some(temp) = self.temperature {
if !(0.0..=2.0).contains(&temp) {
return Err(format!(
"Temperature must be between 0.0 and 2.0, got {}",
temp
));
}
}
if let Some(top_p) = self.top_p {
if !(0.0..=1.0).contains(&top_p) {
return Err(format!("Top-p must be between 0.0 and 1.0, got {}", top_p));
}
}
if let Some(fp) = self.frequency_penalty {
if !(-2.0..=2.0).contains(&fp) {
return Err(format!(
"Frequency penalty must be between -2.0 and 2.0, got {}",
fp
));
}
}
if let Some(pp) = self.presence_penalty {
if !(-2.0..=2.0).contains(&pp) {
return Err(format!(
"Presence penalty must be between -2.0 and 2.0, got {}",
pp
));
}
}
if let Some(mt) = self.max_tokens {
if mt == 0 {
return Err("Max tokens must be greater than 0".to_string());
}
}
if let Some(tlp) = self.top_logprobs {
if !(0..=20).contains(&tlp) {
return Err(format!(
"Top logprobs must be between 0 and 20, got {}",
tlp
));
}
}
if let Some(n) = self.n {
if n == 0 {
return Err("n must be greater than 0".to_string());
}
}
if let Some(stop) = &self.stop {
if stop.len() > 4 {
return Err("Cannot have more than 4 stop sequences".to_string());
}
for seq in stop {
if seq.is_empty() {
return Err("Stop sequences cannot be empty".to_string());
}
if seq.len() > 64 {
return Err("Stop sequences cannot be longer than 64 characters".to_string());
}
}
}
if let Some(thinking_config) = &self.thinking_config {
self.validate_thinking_config(thinking_config)?;
}
Ok(())
}
fn validate_thinking_config(&self, config: &ThinkingConfig) -> Result<(), String> {
let is_gemini_3 = self.model.contains("gemini-3");
let is_gemini_2_5 = self.model.contains("gemini-2.5");
let is_gemini_3_pro = self.model.contains("gemini-3-pro");
if let Some(level) = &config.thinking_level {
if !is_gemini_3 {
return Err("thinking_level is only supported for Gemini 3 models".to_string());
}
match level {
ThinkingLevel::Minimal | ThinkingLevel::Medium => {
if is_gemini_3_pro {
return Err(
"Gemini 3 Pro only supports 'low' and 'high' thinking levels"
.to_string(),
);
}
}
_ => {}
}
}
if let Some(budget) = config.thinking_budget {
if !is_gemini_2_5 {
return Err("thinking_budget is only supported for Gemini 2.5 models".to_string());
}
if self.model.contains("2.5-pro") {
if budget != -1 && !(128..=32768).contains(&budget) {
return Err(
"Gemini 2.5 Pro thinking budget must be -1 (dynamic) or between 128-32768"
.to_string(),
);
}
} else if self.model.contains("2.5-flash")
&& budget != -1
&& !(0..=24576).contains(&budget)
{
return Err(
"Gemini 2.5 Flash thinking budget must be -1 (dynamic) or between 0-24576"
.to_string(),
);
}
}
if config.thinking_level.is_some() && config.thinking_budget.is_some() {
return Err("Cannot specify both thinking_level (Gemini 3) and thinking_budget (Gemini 2.5) in the same request".to_string());
}
Ok(())
}
pub fn supports_thinking(&self) -> bool {
self.model.contains("gemini-3") || self.model.contains("gemini-2.5")
}
pub fn requires_thought_signatures(&self) -> bool {
self.model.contains("gemini-3")
}
}
impl OpenAIChatCompletionRequest {
pub fn new(model: impl Into<String>, messages: Vec<OpenAIChatMessage>) -> Self {
Self {
model: model.into(),
messages,
temperature: None,
max_tokens: None,
top_p: None,
frequency_penalty: None,
presence_penalty: None,
stop: None,
user: None,
provider: None,
stream: None,
logit_bias: None,
logprobs: None,
top_logprobs: None,
n: None,
response_format: None,
tools: None,
tool_choice: None,
thinking_config: None,
thinking: None,
}
}
pub fn with_temperature(mut self, temperature: f32) -> Self {
self.temperature = Some(temperature.clamp(0.0, 2.0));
self
}
pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
self.max_tokens = Some(max_tokens);
self
}
pub fn with_user(mut self, user: impl Into<String>) -> Self {
self.user = Some(user.into());
self
}
pub fn with_provider(mut self, provider: impl Into<String>) -> Self {
self.provider = Some(provider.into());
self
}
pub fn with_stream(mut self, stream: bool) -> Self {
self.stream = Some(stream);
self
}
pub fn with_top_p(mut self, top_p: f32) -> Self {
self.top_p = Some(top_p.clamp(0.0, 1.0));
self
}
pub fn with_frequency_penalty(mut self, frequency_penalty: f32) -> Self {
self.frequency_penalty = Some(frequency_penalty.clamp(-2.0, 2.0));
self
}
pub fn with_presence_penalty(mut self, presence_penalty: f32) -> Self {
self.presence_penalty = Some(presence_penalty.clamp(-2.0, 2.0));
self
}
pub fn with_stop(mut self, stop: Vec<String>) -> Self {
self.stop = Some(stop);
self
}
pub fn with_logit_bias(mut self, logit_bias: serde_json::Value) -> Self {
self.logit_bias = Some(logit_bias);
self
}
pub fn with_logprobs(mut self, logprobs: bool) -> Self {
self.logprobs = Some(logprobs);
self
}
pub fn with_top_logprobs(mut self, top_logprobs: u32) -> Self {
self.top_logprobs = Some(top_logprobs);
self
}
pub fn with_n(mut self, n: u32) -> Self {
self.n = Some(n);
self
}
pub fn with_response_format(mut self, response_format: ResponseFormat) -> Self {
self.response_format = Some(response_format);
self
}
pub fn with_tools(mut self, tools: Vec<Tool>) -> Self {
self.tools = Some(tools);
self
}
pub fn with_tool_choice(mut self, tool_choice: ToolChoice) -> Self {
self.tool_choice = Some(tool_choice);
self
}
pub fn with_thinking_config(mut self, thinking_config: ThinkingConfig) -> Self {
self.thinking_config = Some(thinking_config);
self
}
pub fn with_include_thoughts(mut self, include_thoughts: bool) -> Self {
let mut config = self.thinking_config.unwrap_or_default();
config.include_thoughts = Some(include_thoughts);
self.thinking_config = Some(config);
self
}
pub fn with_thinking_level(mut self, thinking_level: ThinkingLevel) -> Self {
let mut config = self.thinking_config.unwrap_or_default();
config.thinking_level = Some(thinking_level);
self.thinking_config = Some(config);
self
}
pub fn with_thinking_budget(mut self, thinking_budget: i32) -> Self {
let mut config = self.thinking_config.unwrap_or_default();
config.thinking_budget = Some(thinking_budget);
self.thinking_config = Some(config);
self
}
pub fn with_anthropic_thinking(mut self, budget_tokens: i32) -> Self {
self.thinking =
Some(serde_json::json!({"type": "enabled", "budget_tokens": budget_tokens}));
self
}
pub fn validate_openai_compatibility(&self) -> Result<(), String> {
ChatCompletionRequest {
model: self.model.clone(),
messages: vec![],
temperature: self.temperature,
max_tokens: self.max_tokens,
top_p: self.top_p,
frequency_penalty: self.frequency_penalty,
presence_penalty: self.presence_penalty,
stop: self.stop.clone(),
user: self.user.clone(),
provider: self.provider.clone(),
stream: self.stream,
logit_bias: self.logit_bias.clone(),
logprobs: self.logprobs,
top_logprobs: self.top_logprobs,
n: self.n,
response_format: self.response_format.clone(),
tools: self.tools.clone(),
tool_choice: self.tool_choice.clone(),
thinking_config: self.thinking_config.clone(),
}
.validate_openai_compatibility()
}
pub fn supports_thinking(&self) -> bool {
self.model.contains("gemini-3") || self.model.contains("gemini-2.5")
}
pub fn requires_thought_signatures(&self) -> bool {
self.model.contains("gemini-3")
}
}
impl ChatMessage {
pub fn system(content: impl Into<String>) -> Self {
Self {
role: MessageRole::System,
content: content.into(),
}
}
pub fn user(content: impl Into<String>) -> Self {
Self {
role: MessageRole::User,
content: content.into(),
}
}
pub fn assistant(content: impl Into<String>) -> Self {
Self {
role: MessageRole::Assistant,
content: content.into(),
}
}
}
impl OpenAIMessageContent {
pub fn text(content: impl Into<String>) -> Self {
Self::Text(content.into())
}
pub fn parts(parts: Vec<OpenAIContentPart>) -> Self {
Self::Parts(parts)
}
}
impl OpenAIContentPart {
pub fn text(content: impl Into<String>) -> Self {
Self::Text {
text: content.into(),
}
}
pub fn image_url(url: impl Into<String>) -> Self {
Self::ImageUrl {
image_url: OpenAIImageUrl {
url: url.into(),
detail: None,
},
}
}
pub fn image_url_with_detail(url: impl Into<String>, detail: impl Into<String>) -> Self {
Self::ImageUrl {
image_url: OpenAIImageUrl {
url: url.into(),
detail: Some(detail.into()),
},
}
}
}
impl OpenAIChatMessage {
pub fn system(content: impl Into<OpenAIMessageContent>) -> Self {
Self {
role: OpenAIMessageRole::System,
content: Some(content.into()),
name: None,
tool_calls: None,
tool_call_id: None,
}
}
pub fn user(content: impl Into<OpenAIMessageContent>) -> Self {
Self {
role: OpenAIMessageRole::User,
content: Some(content.into()),
name: None,
tool_calls: None,
tool_call_id: None,
}
}
pub fn assistant(content: impl Into<OpenAIMessageContent>) -> Self {
Self {
role: OpenAIMessageRole::Assistant,
content: Some(content.into()),
name: None,
tool_calls: None,
tool_call_id: None,
}
}
pub fn assistant_with_tool_calls(tool_calls: Vec<OpenAIToolCall>) -> Self {
Self {
role: OpenAIMessageRole::Assistant,
content: None,
name: None,
tool_calls: Some(tool_calls),
tool_call_id: None,
}
}
pub fn tool(tool_call_id: impl Into<String>, content: impl Into<OpenAIMessageContent>) -> Self {
Self {
role: OpenAIMessageRole::Tool,
content: Some(content.into()),
name: None,
tool_calls: None,
tool_call_id: Some(tool_call_id.into()),
}
}
pub fn with_parts(
role: OpenAIMessageRole,
content: Option<OpenAIMessageContent>,
tool_calls: Option<Vec<OpenAIToolCall>>,
tool_call_id: Option<String>,
) -> Self {
Self {
role,
content,
name: None,
tool_calls,
tool_call_id,
}
}
}
impl From<String> for OpenAIMessageContent {
fn from(value: String) -> Self {
Self::Text(value)
}
}
impl From<&str> for OpenAIMessageContent {
fn from(value: &str) -> Self {
Self::Text(value.to_string())
}
}
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct User {
pub id: Uuid,
pub user_id: String,
pub plan_name: String,
pub current_credits: f64,
pub credits_used_this_month: f64,
pub credits_reset_date: DateTime<Utc>,
pub is_active: bool,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiKey {
pub id: Uuid,
pub key: String,
pub owner_id: Uuid,
pub is_active: bool,
pub created_at: DateTime<Utc>,
pub expires_at: Option<DateTime<Utc>>,
pub description: Option<String>,
pub last_used_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UsageStats {
pub period_days: u32,
pub daily_usage: Vec<DailyUsage>,
pub recent_transactions: Vec<CreditTransaction>,
pub total_requests: u64,
pub total_tokens: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DailyUsage {
pub date: String,
pub credits_used: f64,
pub requests: u64,
pub tokens: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreditTransaction {
pub id: Uuid,
pub transaction_type: TransactionType,
pub credits_amount: f64,
pub credits_balance_after: f64,
pub provider: Option<String>,
pub model: Option<String>,
pub description: String,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum TransactionType {
Usage,
Reset,
Purchase,
Refund,
}
pub type ChatRole = MessageRole;
pub type ChatUsage = Usage;
pub type HealthCheck = HealthStatus;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HealthServices {
pub database: bool,
pub redis: bool,
pub providers: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum HealthStatusEnum {
Healthy,
Degraded,
Unhealthy,
NeedsInit,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ResponseFormat {
Text,
JsonObject,
JsonSchema {
json_schema: serde_json::Value,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tool {
pub r#type: ToolType,
pub function: FunctionDefinition,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ToolType {
Function,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FunctionDefinition {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ToolChoice {
None,
Auto,
Tool {
r#type: ToolType,
function: ToolFunction,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolFunction {
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ThinkingConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub include_thoughts: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking_level: Option<ThinkingLevel>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking_budget: Option<i32>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ThinkingLevel {
Minimal,
Low,
Medium,
High,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct ContentPart {
#[serde(skip_serializing_if = "Option::is_none")]
pub text: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub function_call: Option<FunctionCall>,
#[serde(skip_serializing_if = "Option::is_none")]
pub function_response: Option<FunctionResponse>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thought: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thought_signature: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct FunctionCall {
pub name: String,
pub args: serde_json::Value,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct FunctionResponse {
pub name: String,
pub response: serde_json::Value,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct EnhancedChatMessage {
pub role: MessageRole,
pub parts: Vec<ContentPart>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EnhancedUsage {
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub total_tokens: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub thoughts_token_count: Option<u32>,
}
impl ThinkingConfig {
pub fn new() -> Self {
Self::default()
}
pub fn gemini_3(level: ThinkingLevel, include_thoughts: bool) -> Self {
Self {
thinking_level: Some(level),
include_thoughts: Some(include_thoughts),
thinking_budget: None,
}
}
pub fn gemini_2_5(budget: i32, include_thoughts: bool) -> Self {
Self {
thinking_budget: Some(budget),
include_thoughts: Some(include_thoughts),
thinking_level: None,
}
}
pub fn high_reasoning() -> Self {
Self {
thinking_level: Some(ThinkingLevel::High),
include_thoughts: Some(true),
thinking_budget: Some(-1), }
}
pub fn fast_response() -> Self {
Self {
thinking_level: Some(ThinkingLevel::Low),
include_thoughts: Some(false),
thinking_budget: Some(512), }
}
}
impl ContentPart {
pub fn text(content: impl Into<String>) -> Self {
Self {
text: Some(content.into()),
function_call: None,
function_response: None,
thought: None,
thought_signature: None,
}
}
pub fn function_call(name: impl Into<String>, args: serde_json::Value) -> Self {
Self {
text: None,
function_call: Some(FunctionCall {
name: name.into(),
args,
}),
function_response: None,
thought: None,
thought_signature: None,
}
}
pub fn function_response(name: impl Into<String>, response: serde_json::Value) -> Self {
Self {
text: None,
function_call: None,
function_response: Some(FunctionResponse {
name: name.into(),
response,
}),
thought: None,
thought_signature: None,
}
}
pub fn with_thought_signature(mut self, signature: impl Into<String>) -> Self {
self.thought_signature = Some(signature.into());
self
}
pub fn as_thought(mut self) -> Self {
self.thought = Some(true);
self
}
}
impl EnhancedChatMessage {
pub fn system(content: impl Into<String>) -> Self {
Self {
role: MessageRole::System,
parts: vec![ContentPart::text(content)],
}
}
pub fn user(content: impl Into<String>) -> Self {
Self {
role: MessageRole::User,
parts: vec![ContentPart::text(content)],
}
}
pub fn assistant(content: impl Into<String>) -> Self {
Self {
role: MessageRole::Assistant,
parts: vec![ContentPart::text(content)],
}
}
pub fn with_parts(role: MessageRole, parts: Vec<ContentPart>) -> Self {
Self { role, parts }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionStreamResponse {
pub id: String,
pub object: String,
pub created: u64,
pub model: String,
pub choices: Vec<ChatCompletionStreamChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub usage: Option<Usage>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionStreamChoice {
pub index: u32,
pub delta: ChatCompletionStreamDelta,
#[serde(skip_serializing_if = "Option::is_none")]
pub finish_reason: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatCompletionStreamDelta {
#[serde(skip_serializing_if = "Option::is_none")]
pub role: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thought: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_calls: Option<Vec<ToolCall>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolCall {
pub index: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub r#type: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub function: Option<ToolCallFunction>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolCallFunction {
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub arguments: Option<String>,
}