Skip to main content

rainy_sdk/
search.rs

1//! Web Research Module
2//!
3//! This module provides types and functionality for web research via Rainy API v2.
4//! Supports multiple providers (Exa, Tavily) and configurable search depth.
5
6use crate::models::{ResearchDepth, ResearchProvider};
7use serde::{Deserialize, Serialize};
8
9/// Thinking level for Gemini 3 models
10#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
11#[serde(rename_all = "lowercase")]
12pub enum ThinkingLevel {
13    /// Minimum reasoning depth
14    Minimal,
15    /// Fast reasoning depth
16    Low,
17    /// Balanced reasoning depth
18    Medium,
19    /// Maximum reasoning depth
20    High,
21}
22
23/// Options for configuring a web research request
24#[derive(Debug, Clone, Serialize, Deserialize)]
25pub struct ResearchConfig {
26    /// The search provider to use
27    #[serde(default)]
28    pub provider: ResearchProvider,
29    /// The depth of the search
30    #[serde(default)]
31    pub depth: ResearchDepth,
32    /// Maximum number of sources to include
33    #[serde(default = "default_max_sources")]
34    pub max_sources: u32,
35    /// Whether to include images in the results
36    #[serde(default)]
37    pub include_images: bool,
38    /// Process the request asynchronously
39    #[serde(default)]
40    pub async_mode: bool,
41    /// The specific AI model to use for analysis (e.g. "gemini-2.0-flash-exp")
42    #[serde(default, skip_serializing_if = "Option::is_none")]
43    pub model: Option<String>,
44    /// The thinking level for Gemini 3 models
45    #[serde(
46        default,
47        skip_serializing_if = "Option::is_none",
48        rename = "thinkingLevel"
49    )]
50    pub thinking_level: Option<ThinkingLevel>,
51}
52
53fn default_max_sources() -> u32 {
54    10
55}
56
57impl Default for ResearchConfig {
58    fn default() -> Self {
59        Self {
60            provider: ResearchProvider::default(),
61            depth: ResearchDepth::default(),
62            max_sources: 10,
63            include_images: false,
64            async_mode: false,
65            model: None,
66            thinking_level: None,
67        }
68    }
69}
70
71impl ResearchConfig {
72    /// Create a new configuration with default settings
73    pub fn new() -> Self {
74        Self::default()
75    }
76
77    /// Set the search provider
78    pub fn with_provider(mut self, provider: ResearchProvider) -> Self {
79        self.provider = provider;
80        self
81    }
82
83    /// Set the search depth
84    pub fn with_depth(mut self, depth: ResearchDepth) -> Self {
85        self.depth = depth;
86        self
87    }
88
89    /// Set maximum sources
90    pub fn with_max_sources(mut self, max: u32) -> Self {
91        self.max_sources = max;
92        self
93    }
94
95    /// Set the request to be processed asynchronously
96    pub fn with_async(mut self, async_mode: bool) -> Self {
97        self.async_mode = async_mode;
98        self
99    }
100
101    /// Set the specific AI model
102    pub fn with_model(mut self, model: impl Into<String>) -> Self {
103        self.model = Some(model.into());
104        self
105    }
106
107    /// Set the thinking level (Gemini 3 only)
108    pub fn with_thinking_level(mut self, level: ThinkingLevel) -> Self {
109        self.thinking_level = Some(level);
110        self
111    }
112}
113
114/// Request body for web research
115#[derive(Debug, Clone, Serialize)]
116pub(crate) struct ResearchRequest {
117    pub topic: String,
118    pub provider: ResearchProvider,
119    pub depth: ResearchDepth,
120    #[serde(rename = "maxSources")]
121    pub max_sources: u32,
122    #[serde(rename = "async")]
123    pub async_mode: bool,
124    #[serde(skip_serializing_if = "Option::is_none")]
125    pub model: Option<String>,
126    #[serde(skip_serializing_if = "Option::is_none", rename = "thinkingLevel")]
127    pub thinking_level: Option<ThinkingLevel>,
128}
129
130impl ResearchRequest {
131    pub fn new(topic: impl Into<String>, config: &ResearchConfig) -> Self {
132        Self {
133            topic: topic.into(),
134            provider: config.provider.clone(),
135            depth: config.depth.clone(),
136            max_sources: config.max_sources,
137            async_mode: config.async_mode,
138            model: config.model.clone(),
139            thinking_level: config.thinking_level.clone(),
140        }
141    }
142}
143
144/// Result from a research operation
145#[derive(Debug, Clone, Serialize, Deserialize)]
146pub struct ResearchResult {
147    /// original research prompt/topic
148    pub topic: String,
149    /// Comprehensive summary/answer
150    pub content: String,
151    /// Sources used for the research
152    #[serde(default)]
153    pub sources: Vec<ResearchSource>,
154    /// Provider used for the search
155    pub provider: String,
156}
157
158/// A source used in the research
159#[derive(Debug, Clone, Serialize, Deserialize)]
160pub struct ResearchSource {
161    /// The title of the web page or document
162    pub title: String,
163    /// The URL of the source
164    pub url: String,
165    /// A short snippet or excerpt from the content
166    #[serde(default)]
167    pub snippet: Option<String>,
168}
169
170/// Response from the research API
171#[derive(Debug, Clone, Serialize, Deserialize)]
172#[serde(untagged)]
173pub enum ResearchResponse {
174    /// Synchronous response with results
175    Sync {
176        /// Whether the operation was successful
177        success: bool,
178        /// The operation mode ("sync")
179        mode: String,
180        /// The actual research report or answer
181        result: String,
182        /// When the result was generated
183        generated_at: String,
184        /// Metadata about the search provider
185        provider: String,
186    },
187    /// Asynchronous response with task ID
188    Async {
189        /// Whether the operation was successful
190        success: bool,
191        /// The operation mode ("async")
192        mode: String,
193        /// Unique identifier for the background task
194        #[serde(rename = "taskId")]
195        task_id: String,
196        /// Informational message about task status
197        message: String,
198    },
199}
200
201// We need to be careful about the Sync response structure.
202// In agents.ts:
203// const result = await researchNetwork.run(researchPrompt);
204// return c.json({ success: true, mode: "sync", result, ... });
205//
206// So 'result' is a string containing the markdown report.
207//
208// The SDK user probably wants a cleaner struct.
209// Let's define a clean struct for success response.
210
211/// Unified response structure for deep research operations
212#[derive(Debug, Clone, Serialize, Deserialize)]
213pub struct DeepResearchResponse {
214    /// Whether the operation was successfully initiated or completed
215    pub success: bool,
216    /// The operation mode ("sync" or "async")
217    pub mode: String,
218    /// The result of the research (only set for sync mode)
219    pub result: Option<serde_json::Value>,
220    /// The unique task identifier (only set for async mode)
221    #[serde(rename = "taskId")]
222    pub task_id: Option<String>,
223    /// ISO 8601 timestamp of generation
224    #[serde(rename = "generatedAt")]
225    pub generated_at: Option<String>,
226    /// The provider used for the operation
227    pub provider: Option<String>,
228    /// Informational message (e.g. error details or task status)
229    pub message: Option<String>,
230}
231
232pub use crate::search::DeepResearchResponse as ResearchApiResponse;