Skip to main content

rainy_sdk/endpoints/
chat.rs

1use crate::client::RainyClient;
2use crate::error::{RainyError, Result};
3use crate::models::{
4    ChatCompletionRequest, ChatCompletionResponse, ChatCompletionStreamResponse,
5    OpenAIChatCompletionRequest, OpenAIChatCompletionResponse,
6};
7use futures::Stream;
8use std::pin::Pin;
9
10impl RainyClient {
11    /// Create a chat completion
12    ///
13    /// This endpoint sends a chat completion request to the Rainy API.
14    ///
15    /// # Arguments
16    ///
17    /// * `request` - The chat completion request parameters
18    ///
19    /// # Returns
20    ///
21    /// Returns the chat completion response from the AI model.
22    ///
23    /// # Example
24    ///
25    /// ```rust,no_run
26    /// # use rainy_sdk::{RainyClient, ChatCompletionRequest, ChatMessage, MessageRole};
27    /// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
28    /// let client = RainyClient::with_api_key("user-api-key")?;
29    ///
30    /// let messages = vec![
31    ///     ChatMessage::user("Hello, how are you?"),
32    /// ];
33    ///
34    /// let request = ChatCompletionRequest::new("gemini-pro", messages)
35    ///     .with_max_tokens(150)
36    ///     .with_temperature(0.7);
37    ///
38    /// let response = client.create_chat_completion(request).await?;
39    ///
40    /// if let Some(choice) = response.choices.first() {
41    ///     println!("Response: {}", choice.message.content);
42    /// }
43    /// # Ok(())
44    /// # }
45    /// ```
46    pub async fn create_chat_completion(
47        &self,
48        request: ChatCompletionRequest,
49    ) -> Result<ChatCompletionResponse> {
50        let body = serde_json::to_value(request)?;
51        self.make_request(reqwest::Method::POST, "/chat/completions", Some(body))
52            .await
53    }
54
55    /// Create an OpenAI-compatible chat completion with full tool-call replay support.
56    ///
57    /// This variant accepts the complete OpenAI message shape, including:
58    /// assistant `tool_calls`, `tool` role messages, multimodal content parts, and
59    /// provider-specific metadata such as thought signatures.
60    pub async fn create_openai_chat_completion(
61        &self,
62        request: OpenAIChatCompletionRequest,
63    ) -> Result<OpenAIChatCompletionResponse> {
64        let body = serde_json::to_value(request)?;
65        self.make_request(reqwest::Method::POST, "/chat/completions", Some(body))
66            .await
67    }
68
69    /// Create a chat completion with streaming
70    ///
71    /// This method provides streaming support for chat completions.
72    ///
73    /// # Arguments
74    ///
75    /// * `request` - The chat completion request parameters
76    ///
77    /// # Returns
78    ///
79    /// Returns a stream of chat completion responses.
80    ///
81    /// # Example
82    ///
83    /// ```rust,no_run
84    /// # use rainy_sdk::{RainyClient, ChatCompletionRequest, ChatMessage, MessageRole};
85    /// # use futures::StreamExt;
86    /// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
87    /// let client = RainyClient::with_api_key("user-api-key")?;
88    ///
89    /// let messages = vec![
90    ///     ChatMessage::user("Tell me a story"),
91    /// ];
92    ///
93    /// let request = ChatCompletionRequest::new("llama-3.1-8b-instant", messages)
94    ///     .with_max_tokens(500)
95    ///     .with_temperature(0.8)
96    ///     .with_stream(true);
97    ///
98    /// let mut stream = client.create_chat_completion_stream(request).await?;
99    ///
100    /// while let Some(chunk) = stream.next().await {
101    ///     match chunk {
102    ///         Ok(response) => {
103    ///             if let Some(choice) = response.choices.first() {
104    ///                 if let Some(content) = &choice.delta.content {
105    ///                     print!("{}", content);
106    ///                 }
107    ///             }
108    ///         }
109    ///         Err(e) => eprintln!("Error: {}", e),
110    ///     }
111    /// }
112    /// # Ok(())
113    /// # }
114    /// ```
115    pub async fn create_chat_completion_stream(
116        &self,
117        request: ChatCompletionRequest,
118    ) -> Result<Pin<Box<dyn Stream<Item = Result<ChatCompletionStreamResponse>> + Send>>> {
119        use eventsource_stream::Eventsource;
120        use futures::StreamExt;
121
122        let mut request_with_stream = request;
123        request_with_stream.stream = Some(true);
124
125        let url = format!("{}/api/v1/chat/completions", self.auth_config().base_url);
126        let headers = self.auth_config().build_headers()?;
127
128        let response = self
129            .http_client()
130            .post(&url)
131            .headers(headers)
132            .json(&request_with_stream)
133            .send()
134            .await?;
135
136        if !response.status().is_success() {
137            return Err(self
138                .handle_response::<ChatCompletionStreamResponse>(response)
139                .await
140                .err()
141                .unwrap());
142        }
143
144        let stream = response
145            .bytes_stream()
146            .eventsource()
147            .filter_map(|event| async move {
148                match event {
149                    Ok(event) => {
150                        // Handle the [DONE] marker
151                        if event.data.trim() == "[DONE]" {
152                            return None;
153                        }
154
155                        // Parse the JSON data
156                        match serde_json::from_str::<ChatCompletionStreamResponse>(&event.data) {
157                            Ok(response) => Some(Ok(response)),
158                            Err(e) => Some(Err(RainyError::Serialization {
159                                message: e.to_string(),
160                                source_error: Some(e.to_string()),
161                            })),
162                        }
163                    }
164                    Err(e) => {
165                        // Convert eventsource error to RainyError
166                        Some(Err(RainyError::Network {
167                            message: format!("SSE parsing error: {e}"),
168                            retryable: true,
169                            source_error: Some(e.to_string()),
170                        }))
171                    }
172                }
173            });
174
175        Ok(Box::pin(stream))
176    }
177
178    /// Create a streaming OpenAI-compatible chat completion.
179    ///
180    /// This method uses the same `/api/v1/chat/completions` route but accepts the full
181    /// OpenAI-compatible message format so callers can replay tool history without a
182    /// separate compatibility bridge.
183    pub async fn create_openai_chat_completion_stream(
184        &self,
185        request: OpenAIChatCompletionRequest,
186    ) -> Result<Pin<Box<dyn Stream<Item = Result<ChatCompletionStreamResponse>> + Send>>> {
187        use eventsource_stream::Eventsource;
188        use futures::StreamExt;
189
190        let mut request_with_stream = request;
191        request_with_stream.stream = Some(true);
192
193        let url = format!("{}/api/v1/chat/completions", self.auth_config().base_url);
194        let headers = self.auth_config().build_headers()?;
195
196        let response = self
197            .http_client()
198            .post(&url)
199            .headers(headers)
200            .json(&request_with_stream)
201            .send()
202            .await?;
203
204        if !response.status().is_success() {
205            return Err(self
206                .handle_response::<ChatCompletionStreamResponse>(response)
207                .await
208                .err()
209                .unwrap());
210        }
211
212        let stream = response
213            .bytes_stream()
214            .eventsource()
215            .filter_map(|event| async move {
216                match event {
217                    Ok(event) => {
218                        if event.data.trim() == "[DONE]" {
219                            return None;
220                        }
221
222                        match serde_json::from_str::<ChatCompletionStreamResponse>(&event.data) {
223                            Ok(response) => Some(Ok(response)),
224                            Err(e) => Some(Err(RainyError::Serialization {
225                                message: e.to_string(),
226                                source_error: Some(e.to_string()),
227                            })),
228                        }
229                    }
230                    Err(e) => Some(Err(RainyError::Network {
231                        message: format!("SSE parsing error: {e}"),
232                        retryable: true,
233                        source_error: Some(e.to_string()),
234                    })),
235                }
236            });
237
238        Ok(Box::pin(stream))
239    }
240}