rainy_sdk/search.rs
1//! Web Research Module
2//!
3//! This module provides types and functionality for web research via Rainy API v3.
4//! The current SDK implementation maps the legacy deep-research API onto v3 `/api/v1/search`.
5
6use crate::models::{ResearchDepth, ResearchProvider};
7use serde::{Deserialize, Serialize};
8
9/// Thinking level for Gemini 3 models
10#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
11#[serde(rename_all = "lowercase")]
12pub enum ThinkingLevel {
13 /// Minimum reasoning depth
14 Minimal,
15 /// Fast reasoning depth
16 Low,
17 /// Balanced reasoning depth
18 Medium,
19 /// Maximum reasoning depth
20 High,
21}
22
23/// Options for configuring a web research request
24#[derive(Debug, Clone, Serialize, Deserialize)]
25pub struct ResearchConfig {
26 /// The search provider to use
27 #[serde(default)]
28 pub provider: ResearchProvider,
29 /// The depth of the search
30 #[serde(default)]
31 pub depth: ResearchDepth,
32 /// Maximum number of sources to include
33 #[serde(default = "default_max_sources")]
34 pub max_sources: u32,
35 /// Whether to include images in the results
36 #[serde(default)]
37 pub include_images: bool,
38 /// Process the request asynchronously
39 #[serde(default)]
40 pub async_mode: bool,
41 /// The specific AI model to use for analysis (e.g. "gemini-2.0-flash-exp")
42 #[serde(default, skip_serializing_if = "Option::is_none")]
43 pub model: Option<String>,
44 /// The thinking level for Gemini 3 models
45 #[serde(
46 default,
47 skip_serializing_if = "Option::is_none",
48 rename = "thinkingLevel"
49 )]
50 pub thinking_level: Option<ThinkingLevel>,
51}
52
53fn default_max_sources() -> u32 {
54 10
55}
56
57impl Default for ResearchConfig {
58 fn default() -> Self {
59 Self {
60 provider: ResearchProvider::default(),
61 depth: ResearchDepth::default(),
62 max_sources: 10,
63 include_images: false,
64 async_mode: false,
65 model: None,
66 thinking_level: None,
67 }
68 }
69}
70
71impl ResearchConfig {
72 /// Create a new configuration with default settings
73 pub fn new() -> Self {
74 Self::default()
75 }
76
77 /// Set the search provider
78 pub fn with_provider(mut self, provider: ResearchProvider) -> Self {
79 self.provider = provider;
80 self
81 }
82
83 /// Set the search depth
84 pub fn with_depth(mut self, depth: ResearchDepth) -> Self {
85 self.depth = depth;
86 self
87 }
88
89 /// Set maximum sources
90 pub fn with_max_sources(mut self, max: u32) -> Self {
91 self.max_sources = max;
92 self
93 }
94
95 /// Set the request to be processed asynchronously
96 pub fn with_async(mut self, async_mode: bool) -> Self {
97 self.async_mode = async_mode;
98 self
99 }
100
101 /// Set the specific AI model
102 pub fn with_model(mut self, model: impl Into<String>) -> Self {
103 self.model = Some(model.into());
104 self
105 }
106
107 /// Set the thinking level (Gemini 3 only)
108 pub fn with_thinking_level(mut self, level: ThinkingLevel) -> Self {
109 self.thinking_level = Some(level);
110 self
111 }
112}
113
114/// Result from a research operation
115#[derive(Debug, Clone, Serialize, Deserialize)]
116pub struct ResearchResult {
117 /// original research prompt/topic
118 pub topic: String,
119 /// Comprehensive summary/answer
120 pub content: String,
121 /// Sources used for the research
122 #[serde(default)]
123 pub sources: Vec<ResearchSource>,
124 /// Provider used for the search
125 pub provider: String,
126}
127
128/// A source used in the research
129#[derive(Debug, Clone, Serialize, Deserialize)]
130pub struct ResearchSource {
131 /// The title of the web page or document
132 pub title: String,
133 /// The URL of the source
134 pub url: String,
135 /// A short snippet or excerpt from the content
136 #[serde(default)]
137 pub snippet: Option<String>,
138}
139
140/// Response from the research API
141#[derive(Debug, Clone, Serialize, Deserialize)]
142#[serde(untagged)]
143pub enum ResearchResponse {
144 /// Synchronous response with results
145 Sync {
146 /// Whether the operation was successful
147 success: bool,
148 /// The operation mode ("sync")
149 mode: String,
150 /// The actual research report or answer
151 result: String,
152 /// When the result was generated
153 generated_at: String,
154 /// Metadata about the search provider
155 provider: String,
156 },
157 /// Asynchronous response with task ID
158 Async {
159 /// Whether the operation was successful
160 success: bool,
161 /// The operation mode ("async")
162 mode: String,
163 /// Unique identifier for the background task
164 #[serde(rename = "taskId")]
165 task_id: String,
166 /// Informational message about task status
167 message: String,
168 },
169}
170
171// We need to be careful about the Sync response structure.
172// In agents.ts:
173// const result = await researchNetwork.run(researchPrompt);
174// return c.json({ success: true, mode: "sync", result, ... });
175//
176// So 'result' is a string containing the markdown report.
177//
178// The SDK user probably wants a cleaner struct.
179// Let's define a clean struct for success response.
180
181/// Unified response structure for deep research operations
182#[derive(Debug, Clone, Serialize, Deserialize)]
183pub struct DeepResearchResponse {
184 /// Whether the operation was successfully initiated or completed
185 pub success: bool,
186 /// The operation mode ("sync" or "async")
187 pub mode: String,
188 /// The result of the research (only set for sync mode)
189 pub result: Option<serde_json::Value>,
190 /// The unique task identifier (only set for async mode)
191 #[serde(rename = "taskId")]
192 pub task_id: Option<String>,
193 /// ISO 8601 timestamp of generation
194 #[serde(rename = "generatedAt")]
195 pub generated_at: Option<String>,
196 /// The provider used for the operation
197 pub provider: Option<String>,
198 /// Informational message (e.g. error details or task status)
199 pub message: Option<String>,
200}
201
202pub use crate::search::DeepResearchResponse as ResearchApiResponse;