1use serde::{Deserialize, Serialize};
2use std::cmp::Ordering;
3use std::collections::HashMap;
4
5fn map_is_empty(value: &HashMap<String, serde_json::Value>) -> bool {
6 value.is_empty()
7}
8
9#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
11pub struct ChatMessage {
12 pub role: MessageRole,
14 pub content: String,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
20#[serde(rename_all = "lowercase")]
21pub enum MessageRole {
22 System,
24 User,
26 Assistant,
28}
29
30#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
32#[serde(rename_all = "lowercase")]
33pub enum OpenAIMessageRole {
34 System,
36 User,
38 Assistant,
40 Tool,
42}
43
44#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
46#[serde(untagged)]
47pub enum OpenAIMessageContent {
48 Text(String),
50 Parts(Vec<OpenAIContentPart>),
52}
53
54#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
56#[serde(tag = "type", rename_all = "snake_case")]
57pub enum OpenAIContentPart {
58 Text {
60 text: String,
62 },
63 ImageUrl {
65 image_url: OpenAIImageUrl,
67 },
68}
69
70#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
72pub struct OpenAIImageUrl {
73 pub url: String,
75 #[serde(skip_serializing_if = "Option::is_none")]
77 pub detail: Option<String>,
78}
79
80#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
82pub struct OpenAIFunctionCall {
83 pub name: String,
85 pub arguments: String,
87}
88
89#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
91pub struct OpenAIToolCall {
92 pub id: String,
94 pub r#type: String,
96 #[serde(skip_serializing_if = "Option::is_none")]
98 pub extra_content: Option<serde_json::Value>,
99 pub function: OpenAIFunctionCall,
101}
102
103#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
105pub struct OpenAIChatMessage {
106 pub role: OpenAIMessageRole,
108 #[serde(skip_serializing_if = "Option::is_none")]
110 pub content: Option<OpenAIMessageContent>,
111 #[serde(skip_serializing_if = "Option::is_none")]
113 pub name: Option<String>,
114 #[serde(skip_serializing_if = "Option::is_none")]
116 pub tool_calls: Option<Vec<OpenAIToolCall>>,
117 #[serde(skip_serializing_if = "Option::is_none")]
119 pub tool_call_id: Option<String>,
120}
121
122#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
124#[serde(rename_all = "lowercase")]
125pub enum ResearchProvider {
126 #[default]
128 Exa,
129 Tavily,
131 Auto,
133}
134
135#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
137#[serde(rename_all = "lowercase")]
138pub enum ResearchDepth {
139 #[default]
141 Basic,
142 Advanced,
144}
145
146#[derive(Debug, Clone, Serialize, Deserialize)]
148pub struct ChatCompletionRequest {
149 pub model: String,
151
152 pub messages: Vec<ChatMessage>,
154
155 #[serde(skip_serializing_if = "Option::is_none")]
158 pub temperature: Option<f32>,
159
160 #[serde(skip_serializing_if = "Option::is_none")]
162 pub max_tokens: Option<u32>,
163
164 #[serde(skip_serializing_if = "Option::is_none")]
166 pub max_completion_tokens: Option<u32>,
167
168 #[serde(skip_serializing_if = "Option::is_none")]
171 pub top_p: Option<f32>,
172
173 #[serde(skip_serializing_if = "Option::is_none")]
176 pub frequency_penalty: Option<f32>,
177
178 #[serde(skip_serializing_if = "Option::is_none")]
181 pub presence_penalty: Option<f32>,
182
183 #[serde(skip_serializing_if = "Option::is_none")]
185 pub stop: Option<Vec<String>>,
186
187 #[serde(skip_serializing_if = "Option::is_none")]
190 pub user: Option<String>,
191
192 #[serde(skip_serializing_if = "Option::is_none")]
194 pub provider: Option<String>,
195
196 #[serde(skip_serializing_if = "Option::is_none")]
198 pub stream: Option<bool>,
199
200 #[serde(skip_serializing_if = "Option::is_none")]
202 pub stream_options: Option<serde_json::Value>,
203
204 #[serde(skip_serializing_if = "Option::is_none")]
206 pub logit_bias: Option<serde_json::Value>,
207
208 #[serde(skip_serializing_if = "Option::is_none")]
210 pub logprobs: Option<bool>,
211
212 #[serde(skip_serializing_if = "Option::is_none")]
214 pub top_logprobs: Option<u32>,
215
216 #[serde(skip_serializing_if = "Option::is_none")]
218 pub n: Option<u32>,
219
220 #[serde(skip_serializing_if = "Option::is_none")]
222 pub response_format: Option<ResponseFormat>,
223
224 #[serde(skip_serializing_if = "Option::is_none")]
226 pub tools: Option<Vec<Tool>>,
227
228 #[serde(skip_serializing_if = "Option::is_none")]
230 pub tool_choice: Option<ToolChoice>,
231
232 #[serde(skip_serializing_if = "Option::is_none")]
234 pub parallel_tool_calls: Option<bool>,
235
236 #[serde(skip_serializing_if = "Option::is_none")]
238 pub seed: Option<i64>,
239
240 #[serde(skip_serializing_if = "Option::is_none")]
242 pub prompt_cache_key: Option<String>,
243
244 #[serde(skip_serializing_if = "Option::is_none")]
246 pub provider_options: Option<serde_json::Value>,
247
248 #[serde(skip_serializing_if = "Option::is_none")]
250 pub prompt_cache_retention: Option<String>,
251
252 #[serde(skip_serializing_if = "Option::is_none")]
254 pub reasoning: Option<serde_json::Value>,
255
256 #[serde(skip_serializing_if = "Option::is_none")]
258 pub include_reasoning: Option<bool>,
259
260 #[serde(skip_serializing_if = "Option::is_none")]
262 pub metadata: Option<HashMap<String, String>>,
263
264 #[serde(skip_serializing_if = "Option::is_none")]
266 pub service_tier: Option<String>,
267
268 #[serde(skip_serializing_if = "Option::is_none")]
270 pub store: Option<bool>,
271
272 #[serde(skip_serializing_if = "Option::is_none")]
274 pub safety_identifier: Option<String>,
275
276 #[serde(skip_serializing_if = "Option::is_none")]
278 pub modalities: Option<Vec<String>>,
279
280 #[serde(skip_serializing_if = "Option::is_none")]
282 pub audio: Option<serde_json::Value>,
283
284 #[serde(skip_serializing_if = "Option::is_none")]
286 pub prediction: Option<serde_json::Value>,
287
288 #[serde(skip_serializing_if = "Option::is_none")]
290 pub verbosity: Option<String>,
291
292 #[serde(skip_serializing_if = "Option::is_none")]
294 pub web_search_options: Option<serde_json::Value>,
295
296 #[serde(skip_serializing_if = "Option::is_none")]
298 pub functions: Option<Vec<serde_json::Value>>,
299
300 #[serde(skip_serializing_if = "Option::is_none")]
302 pub function_call: Option<serde_json::Value>,
303
304 #[serde(skip_serializing_if = "Option::is_none")]
306 pub thinking_config: Option<ThinkingConfig>,
307}
308
309#[derive(Debug, Clone, Serialize, Deserialize)]
311pub struct OpenAIChatCompletionRequest {
312 pub model: String,
314
315 pub messages: Vec<OpenAIChatMessage>,
317
318 #[serde(skip_serializing_if = "Option::is_none")]
320 pub temperature: Option<f32>,
321
322 #[serde(skip_serializing_if = "Option::is_none")]
324 pub max_tokens: Option<u32>,
325
326 #[serde(skip_serializing_if = "Option::is_none")]
328 pub max_completion_tokens: Option<u32>,
329
330 #[serde(skip_serializing_if = "Option::is_none")]
332 pub top_p: Option<f32>,
333
334 #[serde(skip_serializing_if = "Option::is_none")]
336 pub frequency_penalty: Option<f32>,
337
338 #[serde(skip_serializing_if = "Option::is_none")]
340 pub presence_penalty: Option<f32>,
341
342 #[serde(skip_serializing_if = "Option::is_none")]
344 pub stop: Option<Vec<String>>,
345
346 #[serde(skip_serializing_if = "Option::is_none")]
348 pub user: Option<String>,
349
350 #[serde(skip_serializing_if = "Option::is_none")]
352 pub provider: Option<String>,
353
354 #[serde(skip_serializing_if = "Option::is_none")]
356 pub stream: Option<bool>,
357
358 #[serde(skip_serializing_if = "Option::is_none")]
360 pub stream_options: Option<serde_json::Value>,
361
362 #[serde(skip_serializing_if = "Option::is_none")]
364 pub logit_bias: Option<serde_json::Value>,
365
366 #[serde(skip_serializing_if = "Option::is_none")]
368 pub logprobs: Option<bool>,
369
370 #[serde(skip_serializing_if = "Option::is_none")]
372 pub top_logprobs: Option<u32>,
373
374 #[serde(skip_serializing_if = "Option::is_none")]
376 pub n: Option<u32>,
377
378 #[serde(skip_serializing_if = "Option::is_none")]
380 pub response_format: Option<ResponseFormat>,
381
382 #[serde(skip_serializing_if = "Option::is_none")]
384 pub tools: Option<Vec<Tool>>,
385
386 #[serde(skip_serializing_if = "Option::is_none")]
388 pub tool_choice: Option<ToolChoice>,
389
390 #[serde(skip_serializing_if = "Option::is_none")]
392 pub parallel_tool_calls: Option<bool>,
393
394 #[serde(skip_serializing_if = "Option::is_none")]
396 pub seed: Option<i64>,
397
398 #[serde(skip_serializing_if = "Option::is_none")]
400 pub prompt_cache_key: Option<String>,
401
402 #[serde(skip_serializing_if = "Option::is_none")]
404 pub provider_options: Option<serde_json::Value>,
405
406 #[serde(skip_serializing_if = "Option::is_none")]
408 pub prompt_cache_retention: Option<String>,
409
410 #[serde(skip_serializing_if = "Option::is_none")]
412 pub reasoning: Option<serde_json::Value>,
413
414 #[serde(skip_serializing_if = "Option::is_none")]
416 pub include_reasoning: Option<bool>,
417
418 #[serde(skip_serializing_if = "Option::is_none")]
420 pub metadata: Option<HashMap<String, String>>,
421
422 #[serde(skip_serializing_if = "Option::is_none")]
424 pub service_tier: Option<String>,
425
426 #[serde(skip_serializing_if = "Option::is_none")]
428 pub store: Option<bool>,
429
430 #[serde(skip_serializing_if = "Option::is_none")]
432 pub safety_identifier: Option<String>,
433
434 #[serde(skip_serializing_if = "Option::is_none")]
436 pub modalities: Option<Vec<String>>,
437
438 #[serde(skip_serializing_if = "Option::is_none")]
440 pub audio: Option<serde_json::Value>,
441
442 #[serde(skip_serializing_if = "Option::is_none")]
444 pub prediction: Option<serde_json::Value>,
445
446 #[serde(skip_serializing_if = "Option::is_none")]
448 pub verbosity: Option<String>,
449
450 #[serde(skip_serializing_if = "Option::is_none")]
452 pub web_search_options: Option<serde_json::Value>,
453
454 #[serde(skip_serializing_if = "Option::is_none")]
456 pub functions: Option<Vec<serde_json::Value>>,
457
458 #[serde(skip_serializing_if = "Option::is_none")]
460 pub function_call: Option<serde_json::Value>,
461
462 #[serde(skip_serializing_if = "Option::is_none")]
464 pub thinking_config: Option<ThinkingConfig>,
465
466 #[serde(skip_serializing_if = "Option::is_none")]
470 pub thinking: Option<serde_json::Value>,
471}
472
473#[derive(Debug, Clone, Serialize, Deserialize)]
475pub struct ChatCompletionResponse {
476 pub id: String,
478
479 pub object: String,
481
482 pub created: u64,
484
485 pub model: String,
487
488 pub choices: Vec<ChatChoice>,
490
491 #[serde(skip_serializing_if = "Option::is_none")]
493 pub usage: Option<Usage>,
494}
495
496#[derive(Debug, Clone, Serialize, Deserialize)]
498pub struct OpenAIChatCompletionResponse {
499 pub id: String,
501
502 pub object: String,
504
505 pub created: u64,
507
508 pub model: String,
510
511 pub choices: Vec<OpenAIChatChoice>,
513
514 #[serde(skip_serializing_if = "Option::is_none")]
516 pub usage: Option<Usage>,
517}
518
519#[derive(Debug, Clone, Serialize, Deserialize)]
521pub struct ChatChoice {
522 pub index: u32,
524
525 pub message: ChatMessage,
527
528 pub finish_reason: String,
530}
531
532#[derive(Debug, Clone, Serialize, Deserialize)]
534pub struct OpenAIChatChoice {
535 pub index: u32,
537
538 pub message: OpenAIChatMessage,
540
541 pub finish_reason: String,
543}
544
545#[derive(Debug, Clone, Serialize, Deserialize)]
547pub struct Usage {
548 pub prompt_tokens: u32,
550
551 pub completion_tokens: u32,
553
554 pub total_tokens: u32,
556}
557
558#[derive(Debug, Clone, Serialize, Deserialize)]
560pub struct HealthStatus {
561 pub status: String,
563
564 pub timestamp: String,
566
567 pub uptime: f64,
569
570 pub services: ServiceStatus,
572}
573
574#[derive(Debug, Clone, Serialize, Deserialize)]
576pub struct ServiceStatus {
577 pub database: bool,
579
580 #[serde(skip_serializing_if = "Option::is_none")]
582 pub redis: Option<bool>,
583
584 pub providers: bool,
586}
587
588#[derive(Debug, Clone, Serialize, Deserialize, Default)]
590pub struct AvailableModels {
591 #[serde(default)]
593 pub providers: HashMap<String, Vec<String>>,
594
595 #[serde(default)]
597 pub total_models: usize,
598
599 #[serde(default)]
601 pub active_providers: Vec<String>,
602}
603
604#[derive(Debug, Clone, Serialize, Deserialize)]
606pub struct CreditInfo {
607 pub current_credits: f64,
609
610 pub estimated_cost: f64,
612
613 pub credits_after_request: f64,
615
616 pub reset_date: String,
618}
619
620#[derive(Debug, Clone)]
622pub struct RequestMetadata {
623 pub response_time: Option<u64>,
625
626 pub provider: Option<String>,
628
629 pub tokens_used: Option<u32>,
631
632 pub credits_used: Option<f64>,
634
635 pub credits_remaining: Option<f64>,
637
638 pub request_id: Option<String>,
640
641 pub compat_warnings: Option<u32>,
643
644 pub response_mode: Option<String>,
646
647 pub billing_plan: Option<String>,
649
650 pub rainy_credits_charged: Option<f64>,
652
653 pub rainy_daily_credits_remaining: Option<String>,
655
656 pub rainy_sanitized_params: Option<String>,
658
659 pub rainy_billing_adjustment: Option<String>,
661
662 pub rainy_billing_outstanding_credits: Option<f64>,
664}
665
666#[derive(Debug, Clone, Serialize, Deserialize)]
668pub struct ResponsesRequest {
669 pub model: String,
671
672 pub input: serde_json::Value,
674
675 #[serde(skip_serializing_if = "Option::is_none")]
677 pub stream: Option<bool>,
678
679 #[serde(skip_serializing_if = "Option::is_none")]
681 pub tools: Option<Vec<serde_json::Value>>,
682
683 #[serde(skip_serializing_if = "Option::is_none")]
685 pub tool_choice: Option<serde_json::Value>,
686
687 #[serde(skip_serializing_if = "Option::is_none")]
689 pub response_format: Option<serde_json::Value>,
690
691 #[serde(skip_serializing_if = "Option::is_none")]
693 pub temperature: Option<f32>,
694
695 #[serde(skip_serializing_if = "Option::is_none")]
697 pub top_p: Option<f32>,
698
699 #[serde(skip_serializing_if = "Option::is_none")]
701 pub max_output_tokens: Option<u32>,
702
703 #[serde(skip_serializing_if = "Option::is_none")]
705 pub user: Option<String>,
706
707 #[serde(skip_serializing_if = "Option::is_none")]
709 pub prompt_cache_key: Option<String>,
710
711 #[serde(skip_serializing_if = "Option::is_none")]
713 pub reasoning: Option<serde_json::Value>,
714
715 #[serde(skip_serializing_if = "Option::is_none")]
717 pub include_reasoning: Option<bool>,
718
719 #[serde(skip_serializing_if = "Option::is_none")]
721 pub parallel_tool_calls: Option<bool>,
722
723 #[serde(skip_serializing_if = "Option::is_none")]
725 pub metadata: Option<HashMap<String, String>>,
726
727 #[serde(skip_serializing_if = "Option::is_none")]
729 pub service_tier: Option<String>,
730
731 #[serde(skip_serializing_if = "Option::is_none")]
733 pub store: Option<bool>,
734
735 #[serde(skip_serializing_if = "Option::is_none")]
737 pub safety_identifier: Option<String>,
738
739 #[serde(skip_serializing_if = "Option::is_none")]
741 pub provider_options: Option<serde_json::Value>,
742
743 #[serde(skip_serializing_if = "Option::is_none")]
745 pub prompt_cache_retention: Option<String>,
746
747 #[serde(skip_serializing_if = "Option::is_none")]
749 pub text: Option<serde_json::Value>,
750
751 #[serde(skip_serializing_if = "Option::is_none")]
753 pub instructions: Option<String>,
754
755 #[serde(skip_serializing_if = "Option::is_none")]
757 pub include: Option<Vec<String>>,
758
759 #[serde(skip_serializing_if = "Option::is_none")]
761 pub previous_response_id: Option<String>,
762
763 #[serde(skip_serializing_if = "Option::is_none")]
765 pub conversation: Option<serde_json::Value>,
766
767 #[serde(skip_serializing_if = "Option::is_none")]
769 pub prompt: Option<serde_json::Value>,
770
771 #[serde(skip_serializing_if = "Option::is_none")]
773 pub background: Option<bool>,
774
775 #[serde(skip_serializing_if = "Option::is_none")]
777 pub context_management: Option<Vec<serde_json::Value>>,
778
779 #[serde(skip_serializing_if = "Option::is_none")]
781 pub truncation: Option<String>,
782
783 #[serde(flatten, skip_serializing_if = "map_is_empty", default)]
785 pub extra: HashMap<String, serde_json::Value>,
786}
787
788impl ResponsesRequest {
789 pub fn new(model: impl Into<String>, input: serde_json::Value) -> Self {
791 Self {
792 model: model.into(),
793 input,
794 stream: None,
795 tools: None,
796 tool_choice: None,
797 response_format: None,
798 temperature: None,
799 top_p: None,
800 max_output_tokens: None,
801 user: None,
802 prompt_cache_key: None,
803 reasoning: None,
804 include_reasoning: None,
805 parallel_tool_calls: None,
806 metadata: None,
807 service_tier: None,
808 store: None,
809 safety_identifier: None,
810 provider_options: None,
811 prompt_cache_retention: None,
812 text: None,
813 instructions: None,
814 include: None,
815 previous_response_id: None,
816 conversation: None,
817 prompt: None,
818 background: None,
819 context_management: None,
820 truncation: None,
821 extra: HashMap::new(),
822 }
823 }
824
825 pub fn text(model: impl Into<String>, input_text: impl Into<String>) -> Self {
827 Self::new(model, serde_json::Value::String(input_text.into()))
828 }
829
830 pub fn with_stream(mut self, stream: bool) -> Self {
832 self.stream = Some(stream);
833 self
834 }
835
836 pub fn with_reasoning(mut self, reasoning: serde_json::Value) -> Self {
838 self.reasoning = Some(reasoning);
839 self
840 }
841
842 pub fn with_include_reasoning(mut self, include_reasoning: bool) -> Self {
844 self.include_reasoning = Some(include_reasoning);
845 self
846 }
847
848 pub fn with_reasoning_effort(mut self, effort: impl Into<String>) -> Self {
850 self.reasoning = Some(serde_json::json!({ "effort": effort.into() }));
851 self
852 }
853
854 pub fn with_max_output_tokens(mut self, max_output_tokens: u32) -> Self {
856 self.max_output_tokens = Some(max_output_tokens);
857 self
858 }
859
860 pub fn with_prompt_cache_key(mut self, prompt_cache_key: impl Into<String>) -> Self {
862 self.prompt_cache_key = Some(prompt_cache_key.into());
863 self
864 }
865
866 pub fn with_user(mut self, user: impl Into<String>) -> Self {
868 self.user = Some(user.into());
869 self
870 }
871
872 pub fn with_provider_options(mut self, provider_options: serde_json::Value) -> Self {
874 self.provider_options = Some(provider_options);
875 self
876 }
877
878 pub fn with_instructions(mut self, instructions: impl Into<String>) -> Self {
880 self.instructions = Some(instructions.into());
881 self
882 }
883
884 pub fn with_previous_response_id(mut self, previous_response_id: impl Into<String>) -> Self {
886 self.previous_response_id = Some(previous_response_id.into());
887 self
888 }
889
890 pub fn with_service_tier(mut self, service_tier: impl Into<String>) -> Self {
892 self.service_tier = Some(service_tier.into());
893 self
894 }
895
896 pub fn with_metadata(mut self, metadata: HashMap<String, String>) -> Self {
898 self.metadata = Some(metadata);
899 self
900 }
901
902 pub fn with_tools(mut self, tools: Vec<serde_json::Value>) -> Self {
904 self.tools = Some(tools);
905 self
906 }
907
908 pub fn add_function_tool(
910 mut self,
911 name: impl Into<String>,
912 description: impl Into<String>,
913 parameters: serde_json::Value,
914 ) -> Self {
915 let mut tools = self.tools.unwrap_or_default();
916 tools.push(serde_json::json!({
917 "type": "function",
918 "name": name.into(),
919 "description": description.into(),
920 "parameters": parameters
921 }));
922 self.tools = Some(tools);
923 self
924 }
925
926 pub fn with_extra(mut self, key: impl Into<String>, value: serde_json::Value) -> Self {
928 self.extra.insert(key.into(), value);
929 self
930 }
931}
932
933#[derive(Debug, Clone, Serialize, Deserialize, Default)]
935pub struct ResponsesUsage {
936 #[serde(skip_serializing_if = "Option::is_none")]
938 pub input_tokens: Option<u32>,
939 #[serde(skip_serializing_if = "Option::is_none")]
941 pub output_tokens: Option<u32>,
942 #[serde(skip_serializing_if = "Option::is_none")]
944 pub cache_creation_input_tokens: Option<u32>,
945 #[serde(skip_serializing_if = "Option::is_none")]
947 pub cache_read_input_tokens: Option<u32>,
948 #[serde(skip_serializing_if = "Option::is_none")]
950 pub output_tokens_details: Option<serde_json::Value>,
951 #[serde(skip_serializing_if = "Option::is_none")]
953 pub completion_tokens_details: Option<serde_json::Value>,
954 #[serde(flatten, default)]
956 pub extra: HashMap<String, serde_json::Value>,
957}
958
959#[derive(Debug, Clone, Serialize, Deserialize, Default)]
961pub struct ResponsesApiResponse {
962 #[serde(skip_serializing_if = "Option::is_none")]
964 pub id: Option<String>,
965 #[serde(skip_serializing_if = "Option::is_none")]
967 pub object: Option<String>,
968 #[serde(skip_serializing_if = "Option::is_none")]
970 pub model: Option<String>,
971 #[serde(skip_serializing_if = "Option::is_none")]
973 pub output_text: Option<String>,
974 #[serde(skip_serializing_if = "Option::is_none")]
976 pub output: Option<Vec<serde_json::Value>>,
977 #[serde(skip_serializing_if = "Option::is_none")]
979 pub status: Option<String>,
980 #[serde(skip_serializing_if = "Option::is_none")]
982 pub error: Option<serde_json::Value>,
983 #[serde(skip_serializing_if = "Option::is_none")]
985 pub incomplete_details: Option<serde_json::Value>,
986 #[serde(skip_serializing_if = "Option::is_none")]
988 pub usage: Option<ResponsesUsage>,
989 #[serde(flatten, default)]
991 pub extra: HashMap<String, serde_json::Value>,
992}
993
994#[derive(Debug, Clone, Serialize, Deserialize)]
996pub struct CompatWarning {
997 pub code: String,
999 pub message: String,
1001 #[serde(skip_serializing_if = "Option::is_none")]
1003 pub path: Option<String>,
1004}
1005
1006#[derive(Debug, Clone, Serialize, Deserialize)]
1008pub struct FeaturesUsed {
1009 pub reasoning: bool,
1011 #[serde(rename = "imageInput")]
1013 pub image_input: bool,
1014 pub tools: bool,
1016 #[serde(rename = "structuredOutput")]
1018 pub structured_output: bool,
1019}
1020
1021#[derive(Debug, Clone, Serialize, Deserialize)]
1023pub struct ReasoningMeta {
1024 pub present: bool,
1026 pub summary_present: bool,
1028 #[serde(skip_serializing_if = "Option::is_none")]
1030 pub tokens: Option<u32>,
1031}
1032
1033#[derive(Debug, Clone, Serialize, Deserialize, Default)]
1035pub struct RainyEnvelopeMeta {
1036 #[serde(
1038 rename = "billingPlan",
1039 alias = "billing_plan",
1040 skip_serializing_if = "Option::is_none"
1041 )]
1042 pub billing_plan: Option<String>,
1043 #[serde(
1045 rename = "creditsCharged",
1046 alias = "credits_charged",
1047 skip_serializing_if = "Option::is_none"
1048 )]
1049 pub credits_charged: Option<f64>,
1050 #[serde(
1052 rename = "markupPercent",
1053 alias = "markup_percent",
1054 skip_serializing_if = "Option::is_none"
1055 )]
1056 pub markup_percent: Option<f64>,
1057 #[serde(
1059 rename = "dailyCreditsRemaining",
1060 alias = "daily_credits_remaining",
1061 skip_serializing_if = "Option::is_none"
1062 )]
1063 pub daily_credits_remaining: Option<String>,
1064 #[serde(
1066 rename = "compatWarnings",
1067 alias = "compat_warnings",
1068 skip_serializing_if = "Option::is_none"
1069 )]
1070 pub compat_warnings: Option<Vec<CompatWarning>>,
1071 #[serde(
1073 rename = "featuresUsed",
1074 alias = "features_used",
1075 skip_serializing_if = "Option::is_none"
1076 )]
1077 pub features_used: Option<FeaturesUsed>,
1078 #[serde(skip_serializing_if = "Option::is_none")]
1080 pub reasoning: Option<ReasoningMeta>,
1081 #[serde(flatten, default)]
1083 pub extra: HashMap<String, serde_json::Value>,
1084}
1085
1086#[derive(Debug, Clone, Serialize, Deserialize)]
1088pub struct RainyEnvelope<T> {
1089 pub success: bool,
1091 pub data: T,
1093 #[serde(skip_serializing_if = "Option::is_none")]
1095 pub meta: Option<RainyEnvelopeMeta>,
1096}
1097
1098pub type ResponsesStreamEvent = serde_json::Value;
1100
1101#[derive(Debug, Clone, Serialize, Deserialize, Default)]
1103pub struct ModelArchitecture {
1104 #[serde(skip_serializing_if = "Option::is_none")]
1106 pub input_modalities: Option<Vec<String>>,
1107 #[serde(skip_serializing_if = "Option::is_none")]
1109 pub output_modalities: Option<Vec<String>>,
1110 #[serde(skip_serializing_if = "Option::is_none")]
1112 pub tokenizer: Option<String>,
1113 #[serde(skip_serializing_if = "Option::is_none")]
1115 pub instruct_type: Option<String>,
1116}
1117
1118#[derive(Debug, Clone, Serialize, Deserialize)]
1120#[serde(untagged)]
1121pub enum CapabilityFlag {
1122 Bool(bool),
1124 Text(String),
1126}
1127
1128#[derive(Debug, Clone, Serialize, Deserialize, Default)]
1130pub struct RainyCapabilities {
1131 #[serde(skip_serializing_if = "Option::is_none")]
1133 pub reasoning: Option<CapabilityFlag>,
1134 #[serde(skip_serializing_if = "Option::is_none")]
1136 pub image_input: Option<CapabilityFlag>,
1137 #[serde(skip_serializing_if = "Option::is_none")]
1139 pub tools: Option<CapabilityFlag>,
1140 #[serde(skip_serializing_if = "Option::is_none")]
1142 pub response_format: Option<CapabilityFlag>,
1143}
1144
1145#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1147#[serde(rename_all = "lowercase")]
1148pub enum ReasoningProvider {
1149 Openai,
1151 Google,
1153 Anthropic,
1155 Other,
1157}
1158
1159#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1161pub struct ThinkingBudget {
1162 pub min: i32,
1164 pub max: i32,
1166 #[serde(skip_serializing_if = "Option::is_none")]
1168 pub dynamic_value: Option<i32>,
1169 #[serde(skip_serializing_if = "Option::is_none")]
1171 pub disable_value: Option<i32>,
1172}
1173
1174#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1176pub struct ReasoningControls {
1177 #[serde(skip_serializing_if = "Option::is_none")]
1179 pub observed_parameters: Option<Vec<String>>,
1180 #[serde(skip_serializing_if = "Option::is_none")]
1182 pub reasoning_toggle: Option<bool>,
1183 #[serde(skip_serializing_if = "Option::is_none")]
1185 pub reasoning_effort: Option<bool>,
1186 #[serde(skip_serializing_if = "Option::is_none")]
1188 pub effort: Option<Vec<String>>,
1189 #[serde(skip_serializing_if = "Option::is_none")]
1191 pub thinking_level: Option<Vec<String>>,
1192 #[serde(skip_serializing_if = "Option::is_none")]
1194 pub thinking_budget: Option<ThinkingBudget>,
1195}
1196
1197#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1199pub struct ReasoningProfile {
1200 pub provider: ReasoningProvider,
1202 pub parameter_path: String,
1204 #[serde(skip_serializing_if = "Option::is_none")]
1206 pub values: Option<Vec<String>>,
1207 #[serde(skip_serializing_if = "Option::is_none")]
1209 pub notes: Option<String>,
1210}
1211
1212#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1214pub struct ReasoningToggle {
1215 #[serde(skip_serializing_if = "Option::is_none")]
1217 pub enable_param: Option<String>,
1218 #[serde(skip_serializing_if = "Option::is_none")]
1220 pub include_reasoning_param: Option<String>,
1221}
1222
1223#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1225pub struct RainyReasoningCapabilitiesV2 {
1226 pub supported: bool,
1228 #[serde(skip_serializing_if = "Option::is_none")]
1230 pub controls: Option<ReasoningControls>,
1231 #[serde(default)]
1233 pub profiles: Vec<ReasoningProfile>,
1234 #[serde(skip_serializing_if = "Option::is_none")]
1236 pub toggle: Option<ReasoningToggle>,
1237}
1238
1239#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1241pub struct RainyMultimodalCapabilitiesV2 {
1242 #[serde(default)]
1244 pub input: Vec<String>,
1245 #[serde(default)]
1247 pub output: Vec<String>,
1248}
1249
1250#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1252pub struct RainyParametersCapabilitiesV2 {
1253 #[serde(default)]
1255 pub accepted: Vec<String>,
1256}
1257
1258#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1260pub struct RainyCapabilitiesV2 {
1261 pub multimodal: RainyMultimodalCapabilitiesV2,
1263 pub reasoning: RainyReasoningCapabilitiesV2,
1265 pub parameters: RainyParametersCapabilitiesV2,
1267}
1268
1269#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1271pub struct ModelPricing {
1272 #[serde(skip_serializing_if = "Option::is_none")]
1274 pub prompt: Option<String>,
1275 #[serde(skip_serializing_if = "Option::is_none")]
1277 pub completion: Option<String>,
1278}
1279
1280#[derive(Debug, Clone, Serialize, Deserialize, Default)]
1282pub struct ModelCatalogItem {
1283 pub id: String,
1285 #[serde(skip_serializing_if = "Option::is_none")]
1287 pub name: Option<String>,
1288 #[serde(skip_serializing_if = "Option::is_none")]
1290 pub context_length: Option<u32>,
1291 #[serde(skip_serializing_if = "Option::is_none")]
1293 pub pricing: Option<ModelPricing>,
1294 #[serde(skip_serializing_if = "Option::is_none")]
1296 pub supported_parameters: Option<Vec<String>>,
1297 #[serde(skip_serializing_if = "Option::is_none")]
1299 pub architecture: Option<ModelArchitecture>,
1300 #[serde(skip_serializing_if = "Option::is_none")]
1302 pub rainy_capabilities: Option<RainyCapabilities>,
1303 #[serde(skip_serializing_if = "Option::is_none")]
1305 pub rainy_capabilities_v2: Option<RainyCapabilitiesV2>,
1306 #[serde(flatten, default)]
1308 pub extra: HashMap<String, serde_json::Value>,
1309}
1310
1311#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1313#[serde(rename_all = "snake_case")]
1314pub enum ReasoningMode {
1315 Effort,
1317 ThinkingLevel,
1319 ThinkingBudget,
1321}
1322
1323#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1325pub struct ModelSelectionCriteria {
1326 #[serde(default)]
1328 pub required_input_modalities: Vec<String>,
1329 #[serde(default)]
1331 pub required_output_modalities: Vec<String>,
1332 #[serde(skip_serializing_if = "Option::is_none")]
1334 pub require_tools: Option<bool>,
1335 #[serde(skip_serializing_if = "Option::is_none")]
1337 pub require_structured_output: Option<bool>,
1338 #[serde(skip_serializing_if = "Option::is_none")]
1340 pub reasoning_mode: Option<ReasoningMode>,
1341 #[serde(skip_serializing_if = "Option::is_none")]
1343 pub reasoning_value: Option<String>,
1344}
1345
1346#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1348pub struct ReasoningPreference {
1349 pub mode: ReasoningMode,
1351 #[serde(skip_serializing_if = "Option::is_none")]
1353 pub value: Option<String>,
1354 #[serde(skip_serializing_if = "Option::is_none")]
1356 pub budget: Option<i32>,
1357}
1358
1359fn parse_price(value: Option<&str>) -> f64 {
1360 value
1361 .and_then(|raw| raw.parse::<f64>().ok())
1362 .filter(|v| v.is_finite())
1363 .unwrap_or(f64::MAX)
1364}
1365
1366fn has_required_modalities(available: &[String], required: &[String]) -> bool {
1367 if required.is_empty() {
1368 return true;
1369 }
1370
1371 required.iter().all(|modality| {
1372 available
1373 .iter()
1374 .any(|candidate| candidate.eq_ignore_ascii_case(modality))
1375 })
1376}
1377
1378fn supports_reasoning_preference(
1379 capabilities: &RainyCapabilitiesV2,
1380 mode: &ReasoningMode,
1381 reasoning_value: Option<&str>,
1382) -> bool {
1383 if !capabilities.reasoning.supported {
1384 return false;
1385 }
1386
1387 let controls = capabilities.reasoning.controls.as_ref();
1388 match mode {
1389 ReasoningMode::Effort => controls
1390 .map(|c| {
1391 c.reasoning_effort == Some(true) || c.effort.as_ref().is_some_and(|v| !v.is_empty())
1392 })
1393 .filter(|supported| *supported)
1394 .map(|_| {
1395 controls
1396 .and_then(|c| c.effort.as_ref())
1397 .map(|values| {
1398 reasoning_value.is_none_or(|value| {
1399 values
1400 .iter()
1401 .any(|candidate| candidate.eq_ignore_ascii_case(value))
1402 })
1403 })
1404 .unwrap_or(reasoning_value.is_none())
1405 })
1406 .unwrap_or(false),
1407 ReasoningMode::ThinkingLevel => controls
1408 .and_then(|c| c.thinking_level.as_ref())
1409 .map(|values| {
1410 reasoning_value.is_none_or(|value| {
1411 values
1412 .iter()
1413 .any(|candidate| candidate.eq_ignore_ascii_case(value))
1414 })
1415 })
1416 .unwrap_or(false),
1417 ReasoningMode::ThinkingBudget => {
1418 controls.and_then(|c| c.thinking_budget.as_ref()).is_some()
1419 }
1420 }
1421}
1422
1423fn catalog_item_supports(item: &ModelCatalogItem, parameter: &str) -> bool {
1424 if let Some(v2) = &item.rainy_capabilities_v2 {
1425 return v2
1426 .parameters
1427 .accepted
1428 .iter()
1429 .any(|candidate| candidate == parameter);
1430 }
1431
1432 item.supported_parameters
1433 .as_ref()
1434 .map(|params| params.iter().any(|candidate| candidate == parameter))
1435 .unwrap_or(false)
1436}
1437
1438pub fn select_models(
1440 models: &[ModelCatalogItem],
1441 criteria: &ModelSelectionCriteria,
1442) -> Vec<ModelCatalogItem> {
1443 let required_inputs: Vec<String> = criteria
1444 .required_input_modalities
1445 .iter()
1446 .map(|v| v.to_lowercase())
1447 .collect();
1448 let required_outputs: Vec<String> = criteria
1449 .required_output_modalities
1450 .iter()
1451 .map(|v| v.to_lowercase())
1452 .collect();
1453
1454 let mut filtered: Vec<ModelCatalogItem> = models
1455 .iter()
1456 .filter(|item| {
1457 let Some(v2) = item.rainy_capabilities_v2.as_ref() else {
1458 return false;
1459 };
1460 let input: Vec<String> = v2
1461 .multimodal
1462 .input
1463 .iter()
1464 .map(|v| v.to_lowercase())
1465 .collect();
1466 let output: Vec<String> = v2
1467 .multimodal
1468 .output
1469 .iter()
1470 .map(|v| v.to_lowercase())
1471 .collect();
1472
1473 if !has_required_modalities(&input, &required_inputs) {
1474 return false;
1475 }
1476
1477 if !has_required_modalities(&output, &required_outputs) {
1478 return false;
1479 }
1480
1481 if criteria.require_tools == Some(true) && !catalog_item_supports(item, "tools") {
1482 return false;
1483 }
1484
1485 if criteria.require_structured_output == Some(true)
1486 && !catalog_item_supports(item, "response_format")
1487 && !catalog_item_supports(item, "structured_outputs")
1488 {
1489 return false;
1490 }
1491
1492 if let Some(mode) = &criteria.reasoning_mode {
1493 let reasoning_value = criteria.reasoning_value.as_deref();
1494 if !supports_reasoning_preference(v2, mode, reasoning_value) {
1495 return false;
1496 }
1497 }
1498
1499 true
1500 })
1501 .cloned()
1502 .collect();
1503
1504 filtered.sort_by(|a, b| {
1505 let a_prompt = parse_price(a.pricing.as_ref().and_then(|p| p.prompt.as_deref()));
1506 let b_prompt = parse_price(b.pricing.as_ref().and_then(|p| p.prompt.as_deref()));
1507 let prompt_cmp = a_prompt.partial_cmp(&b_prompt).unwrap_or(Ordering::Equal);
1508 if prompt_cmp != Ordering::Equal {
1509 return prompt_cmp;
1510 }
1511
1512 let a_completion = parse_price(a.pricing.as_ref().and_then(|p| p.completion.as_deref()));
1513 let b_completion = parse_price(b.pricing.as_ref().and_then(|p| p.completion.as_deref()));
1514 let completion_cmp = a_completion
1515 .partial_cmp(&b_completion)
1516 .unwrap_or(Ordering::Equal);
1517 if completion_cmp != Ordering::Equal {
1518 return completion_cmp;
1519 }
1520
1521 let a_context = a.context_length.unwrap_or_default();
1522 let b_context = b.context_length.unwrap_or_default();
1523 b_context.cmp(&a_context)
1524 });
1525
1526 filtered
1527}
1528
1529pub fn build_reasoning_config(
1531 model: &ModelCatalogItem,
1532 preference: &ReasoningPreference,
1533) -> Option<serde_json::Value> {
1534 let v2 = model.rainy_capabilities_v2.as_ref()?;
1535 if !v2.reasoning.supported {
1536 return None;
1537 }
1538
1539 let profiles = &v2.reasoning.profiles;
1540 let controls = v2.reasoning.controls.as_ref();
1541 match preference.mode {
1542 ReasoningMode::Effort => {
1543 let value = preference.value.clone()?;
1544 let supports_effort = controls
1545 .map(|c| {
1546 c.reasoning_effort == Some(true)
1547 || c.effort.as_ref().is_some_and(|v| !v.is_empty())
1548 })
1549 .unwrap_or(false);
1550 if !supports_effort {
1551 return None;
1552 }
1553 if let Some(efforts) = controls.and_then(|c| c.effort.as_ref()) {
1554 if !efforts.iter().any(|v| v.eq_ignore_ascii_case(&value)) {
1555 return None;
1556 }
1557 }
1558
1559 let effort_profile = profiles
1560 .iter()
1561 .find(|p| p.parameter_path == "reasoning.effort")?;
1562 match effort_profile.parameter_path.as_str() {
1563 "reasoning.effort" => Some(serde_json::json!({
1564 "reasoning": { "effort": value }
1565 })),
1566 _ => None,
1567 }
1568 }
1569 ReasoningMode::ThinkingLevel => {
1570 let value = preference.value.clone()?;
1571 let supports = controls
1572 .and_then(|c| c.thinking_level.as_ref())
1573 .map(|levels| levels.iter().any(|v| v.eq_ignore_ascii_case(&value)))
1574 .unwrap_or(false);
1575 if !supports {
1576 return None;
1577 }
1578 let level_profile = profiles
1579 .iter()
1580 .find(|p| p.parameter_path == "thinking_config.thinking_level")?;
1581 if let Some(values) = &level_profile.values {
1582 if !values.iter().any(|v| v.eq_ignore_ascii_case(&value)) {
1583 return None;
1584 }
1585 }
1586 Some(serde_json::json!({
1587 "thinking_config": { "thinking_level": value }
1588 }))
1589 }
1590 ReasoningMode::ThinkingBudget => {
1591 let budget = preference.budget?;
1592 let supports = controls.and_then(|c| c.thinking_budget.as_ref())?;
1593 if budget < supports.min || budget > supports.max {
1594 return None;
1595 }
1596 let budget_profile = profiles.iter().find(|p| {
1597 p.parameter_path == "thinking.budget_tokens"
1598 || p.parameter_path == "thinking_config.thinking_budget"
1599 })?;
1600
1601 if budget_profile.parameter_path == "thinking.budget_tokens" {
1602 return Some(serde_json::json!({
1603 "thinking": { "budget_tokens": budget }
1604 }));
1605 }
1606 if budget_profile.parameter_path == "thinking_config.thinking_budget" {
1607 return Some(serde_json::json!({
1608 "thinking_config": { "thinking_budget": budget }
1609 }));
1610 }
1611 None
1612 }
1613 }
1614}
1615
1616#[cfg(feature = "legacy")]
1617pub mod model_constants;
1618
1619pub mod providers {
1621 pub const OPENAI: &str = "openai";
1623 pub const ANTHROPIC: &str = "anthropic";
1625 pub const GROQ: &str = "groq";
1627 pub const CEREBRAS: &str = "cerebras";
1629 pub const GEMINI: &str = "gemini";
1631 pub const ENOSISLABS: &str = "enosislabs";
1633}
1634
1635impl ChatCompletionRequest {
1636 pub fn new(model: impl Into<String>, messages: Vec<ChatMessage>) -> Self {
1643 Self {
1644 model: model.into(),
1645 messages,
1646 temperature: None,
1647 max_tokens: None,
1648 max_completion_tokens: None,
1649 top_p: None,
1650 frequency_penalty: None,
1651 presence_penalty: None,
1652 stop: None,
1653 user: None,
1654 provider: None,
1655 stream: None,
1656 stream_options: None,
1657 logit_bias: None,
1658 logprobs: None,
1659 top_logprobs: None,
1660 n: None,
1661 response_format: None,
1662 tools: None,
1663 tool_choice: None,
1664 parallel_tool_calls: None,
1665 seed: None,
1666 prompt_cache_key: None,
1667 provider_options: None,
1668 prompt_cache_retention: None,
1669 reasoning: None,
1670 include_reasoning: None,
1671 metadata: None,
1672 service_tier: None,
1673 store: None,
1674 safety_identifier: None,
1675 modalities: None,
1676 audio: None,
1677 prediction: None,
1678 verbosity: None,
1679 web_search_options: None,
1680 functions: None,
1681 function_call: None,
1682 thinking_config: None,
1683 }
1684 }
1685
1686 pub fn with_temperature(mut self, temperature: f32) -> Self {
1694 self.temperature = Some(temperature.clamp(0.0, 2.0));
1695 self
1696 }
1697
1698 pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
1704 self.max_tokens = Some(max_tokens);
1705 self
1706 }
1707
1708 pub fn with_max_completion_tokens(mut self, max_completion_tokens: u32) -> Self {
1710 self.max_completion_tokens = Some(max_completion_tokens);
1711 self
1712 }
1713
1714 pub fn with_user(mut self, user: impl Into<String>) -> Self {
1720 self.user = Some(user.into());
1721 self
1722 }
1723
1724 pub fn with_provider(mut self, provider: impl Into<String>) -> Self {
1730 self.provider = Some(provider.into());
1731 self
1732 }
1733
1734 pub fn with_stream(mut self, stream: bool) -> Self {
1740 self.stream = Some(stream);
1741 self
1742 }
1743
1744 pub fn with_stream_options(mut self, stream_options: serde_json::Value) -> Self {
1746 self.stream_options = Some(stream_options);
1747 self
1748 }
1749
1750 pub fn with_logit_bias(mut self, logit_bias: serde_json::Value) -> Self {
1756 self.logit_bias = Some(logit_bias);
1757 self
1758 }
1759
1760 pub fn with_logprobs(mut self, logprobs: bool) -> Self {
1766 self.logprobs = Some(logprobs);
1767 self
1768 }
1769
1770 pub fn with_top_logprobs(mut self, top_logprobs: u32) -> Self {
1776 self.top_logprobs = Some(top_logprobs);
1777 self
1778 }
1779
1780 pub fn with_n(mut self, n: u32) -> Self {
1786 self.n = Some(n);
1787 self
1788 }
1789
1790 pub fn with_response_format(mut self, response_format: ResponseFormat) -> Self {
1796 self.response_format = Some(response_format);
1797 self
1798 }
1799
1800 pub fn with_tools(mut self, tools: Vec<Tool>) -> Self {
1806 self.tools = Some(tools);
1807 self
1808 }
1809
1810 pub fn with_tool_choice(mut self, tool_choice: ToolChoice) -> Self {
1816 self.tool_choice = Some(tool_choice);
1817 self
1818 }
1819
1820 pub fn with_thinking_config(mut self, thinking_config: ThinkingConfig) -> Self {
1826 self.thinking_config = Some(thinking_config);
1827 self
1828 }
1829
1830 pub fn with_reasoning(mut self, reasoning: serde_json::Value) -> Self {
1832 self.reasoning = Some(reasoning);
1833 self
1834 }
1835
1836 pub fn with_include_reasoning(mut self, include_reasoning: bool) -> Self {
1838 self.include_reasoning = Some(include_reasoning);
1839 self
1840 }
1841
1842 pub fn with_service_tier(mut self, service_tier: impl Into<String>) -> Self {
1844 self.service_tier = Some(service_tier.into());
1845 self
1846 }
1847
1848 pub fn with_metadata(mut self, metadata: HashMap<String, String>) -> Self {
1850 self.metadata = Some(metadata);
1851 self
1852 }
1853
1854 pub fn with_include_thoughts(mut self, include_thoughts: bool) -> Self {
1860 let mut config = self.thinking_config.unwrap_or_default();
1861 config.include_thoughts = Some(include_thoughts);
1862 self.thinking_config = Some(config);
1863 self
1864 }
1865
1866 pub fn with_thinking_level(mut self, thinking_level: ThinkingLevel) -> Self {
1872 let mut config = self.thinking_config.unwrap_or_default();
1873 config.thinking_level = Some(thinking_level);
1874 self.thinking_config = Some(config);
1875 self
1876 }
1877
1878 pub fn with_thinking_budget(mut self, thinking_budget: i32) -> Self {
1884 let mut config = self.thinking_config.unwrap_or_default();
1885 config.thinking_budget = Some(thinking_budget);
1886 self.thinking_config = Some(config);
1887 self
1888 }
1889
1890 pub fn validate_openai_compatibility(&self) -> Result<(), String> {
1899 if let Some(temp) = self.temperature {
1901 if !(0.0..=2.0).contains(&temp) {
1902 return Err(format!(
1903 "Temperature must be between 0.0 and 2.0, got {}",
1904 temp
1905 ));
1906 }
1907 }
1908
1909 if let Some(top_p) = self.top_p {
1911 if !(0.0..=1.0).contains(&top_p) {
1912 return Err(format!("Top-p must be between 0.0 and 1.0, got {}", top_p));
1913 }
1914 }
1915
1916 if let Some(fp) = self.frequency_penalty {
1918 if !(-2.0..=2.0).contains(&fp) {
1919 return Err(format!(
1920 "Frequency penalty must be between -2.0 and 2.0, got {}",
1921 fp
1922 ));
1923 }
1924 }
1925
1926 if let Some(pp) = self.presence_penalty {
1928 if !(-2.0..=2.0).contains(&pp) {
1929 return Err(format!(
1930 "Presence penalty must be between -2.0 and 2.0, got {}",
1931 pp
1932 ));
1933 }
1934 }
1935
1936 if let Some(mt) = self.max_tokens {
1938 if mt == 0 {
1939 return Err("Max tokens must be greater than 0".to_string());
1940 }
1941 }
1942
1943 if let Some(mct) = self.max_completion_tokens {
1945 if mct == 0 {
1946 return Err("Max completion tokens must be greater than 0".to_string());
1947 }
1948 }
1949
1950 if let Some(tlp) = self.top_logprobs {
1952 if !(0..=20).contains(&tlp) {
1953 return Err(format!(
1954 "Top logprobs must be between 0 and 20, got {}",
1955 tlp
1956 ));
1957 }
1958 }
1959
1960 if let Some(n) = self.n {
1962 if n == 0 {
1963 return Err("n must be greater than 0".to_string());
1964 }
1965 }
1966
1967 if let Some(stop) = &self.stop {
1969 if stop.len() > 4 {
1970 return Err("Cannot have more than 4 stop sequences".to_string());
1971 }
1972 for seq in stop {
1973 if seq.is_empty() {
1974 return Err("Stop sequences cannot be empty".to_string());
1975 }
1976 if seq.len() > 64 {
1977 return Err("Stop sequences cannot be longer than 64 characters".to_string());
1978 }
1979 }
1980 }
1981
1982 if let Some(thinking_config) = &self.thinking_config {
1984 self.validate_thinking_config(thinking_config)?;
1985 }
1986
1987 Ok(())
1988 }
1989
1990 fn validate_thinking_config(&self, config: &ThinkingConfig) -> Result<(), String> {
1992 let is_gemini_3 = self.model.contains("gemini-3");
1993 let is_gemini_2_5 = self.model.contains("gemini-2.5");
1994 let is_gemini_3_pro = self.model.contains("gemini-3-pro");
1995
1996 if let Some(level) = &config.thinking_level {
1998 if !is_gemini_3 {
1999 return Err("thinking_level is only supported for Gemini 3 models".to_string());
2000 }
2001
2002 match level {
2003 ThinkingLevel::Minimal | ThinkingLevel::Medium => {
2004 if is_gemini_3_pro {
2005 return Err(
2006 "Gemini 3 Pro only supports 'low' and 'high' thinking levels"
2007 .to_string(),
2008 );
2009 }
2010 }
2011 _ => {}
2012 }
2013 }
2014
2015 if let Some(budget) = config.thinking_budget {
2017 if !is_gemini_2_5 {
2018 return Err("thinking_budget is only supported for Gemini 2.5 models".to_string());
2019 }
2020
2021 if self.model.contains("2.5-pro") {
2023 if budget != -1 && !(128..=32768).contains(&budget) {
2024 return Err(
2025 "Gemini 2.5 Pro thinking budget must be -1 (dynamic) or between 128-32768"
2026 .to_string(),
2027 );
2028 }
2029 } else if self.model.contains("2.5-flash")
2030 && budget != -1
2031 && !(0..=24576).contains(&budget)
2032 {
2033 return Err(
2034 "Gemini 2.5 Flash thinking budget must be -1 (dynamic) or between 0-24576"
2035 .to_string(),
2036 );
2037 }
2038 }
2039
2040 if config.thinking_level.is_some() && config.thinking_budget.is_some() {
2042 return Err("Cannot specify both thinking_level (Gemini 3) and thinking_budget (Gemini 2.5) in the same request".to_string());
2043 }
2044
2045 Ok(())
2046 }
2047
2048 pub fn supports_thinking(&self) -> bool {
2050 self.model.contains("gemini-3") || self.model.contains("gemini-2.5")
2051 }
2052
2053 pub fn requires_thought_signatures(&self) -> bool {
2055 self.model.contains("gemini-3")
2056 }
2057}
2058
2059impl OpenAIChatCompletionRequest {
2060 pub fn new(model: impl Into<String>, messages: Vec<OpenAIChatMessage>) -> Self {
2062 Self {
2063 model: model.into(),
2064 messages,
2065 temperature: None,
2066 max_tokens: None,
2067 max_completion_tokens: None,
2068 top_p: None,
2069 frequency_penalty: None,
2070 presence_penalty: None,
2071 stop: None,
2072 user: None,
2073 provider: None,
2074 stream: None,
2075 stream_options: None,
2076 logit_bias: None,
2077 logprobs: None,
2078 top_logprobs: None,
2079 n: None,
2080 response_format: None,
2081 tools: None,
2082 tool_choice: None,
2083 parallel_tool_calls: None,
2084 seed: None,
2085 prompt_cache_key: None,
2086 provider_options: None,
2087 prompt_cache_retention: None,
2088 reasoning: None,
2089 include_reasoning: None,
2090 metadata: None,
2091 service_tier: None,
2092 store: None,
2093 safety_identifier: None,
2094 modalities: None,
2095 audio: None,
2096 prediction: None,
2097 verbosity: None,
2098 web_search_options: None,
2099 functions: None,
2100 function_call: None,
2101 thinking_config: None,
2102 thinking: None,
2103 }
2104 }
2105
2106 pub fn with_temperature(mut self, temperature: f32) -> Self {
2108 self.temperature = Some(temperature.clamp(0.0, 2.0));
2109 self
2110 }
2111
2112 pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
2114 self.max_tokens = Some(max_tokens);
2115 self
2116 }
2117
2118 pub fn with_max_completion_tokens(mut self, max_completion_tokens: u32) -> Self {
2120 self.max_completion_tokens = Some(max_completion_tokens);
2121 self
2122 }
2123
2124 pub fn with_user(mut self, user: impl Into<String>) -> Self {
2126 self.user = Some(user.into());
2127 self
2128 }
2129
2130 pub fn with_provider(mut self, provider: impl Into<String>) -> Self {
2132 self.provider = Some(provider.into());
2133 self
2134 }
2135
2136 pub fn with_stream(mut self, stream: bool) -> Self {
2138 self.stream = Some(stream);
2139 self
2140 }
2141
2142 pub fn with_stream_options(mut self, stream_options: serde_json::Value) -> Self {
2144 self.stream_options = Some(stream_options);
2145 self
2146 }
2147
2148 pub fn with_top_p(mut self, top_p: f32) -> Self {
2150 self.top_p = Some(top_p.clamp(0.0, 1.0));
2151 self
2152 }
2153
2154 pub fn with_frequency_penalty(mut self, frequency_penalty: f32) -> Self {
2156 self.frequency_penalty = Some(frequency_penalty.clamp(-2.0, 2.0));
2157 self
2158 }
2159
2160 pub fn with_presence_penalty(mut self, presence_penalty: f32) -> Self {
2162 self.presence_penalty = Some(presence_penalty.clamp(-2.0, 2.0));
2163 self
2164 }
2165
2166 pub fn with_stop(mut self, stop: Vec<String>) -> Self {
2168 self.stop = Some(stop);
2169 self
2170 }
2171
2172 pub fn with_logit_bias(mut self, logit_bias: serde_json::Value) -> Self {
2174 self.logit_bias = Some(logit_bias);
2175 self
2176 }
2177
2178 pub fn with_logprobs(mut self, logprobs: bool) -> Self {
2180 self.logprobs = Some(logprobs);
2181 self
2182 }
2183
2184 pub fn with_top_logprobs(mut self, top_logprobs: u32) -> Self {
2186 self.top_logprobs = Some(top_logprobs);
2187 self
2188 }
2189
2190 pub fn with_n(mut self, n: u32) -> Self {
2192 self.n = Some(n);
2193 self
2194 }
2195
2196 pub fn with_response_format(mut self, response_format: ResponseFormat) -> Self {
2198 self.response_format = Some(response_format);
2199 self
2200 }
2201
2202 pub fn with_tools(mut self, tools: Vec<Tool>) -> Self {
2204 self.tools = Some(tools);
2205 self
2206 }
2207
2208 pub fn with_tool_choice(mut self, tool_choice: ToolChoice) -> Self {
2210 self.tool_choice = Some(tool_choice);
2211 self
2212 }
2213
2214 pub fn with_thinking_config(mut self, thinking_config: ThinkingConfig) -> Self {
2216 self.thinking_config = Some(thinking_config);
2217 self
2218 }
2219
2220 pub fn with_reasoning(mut self, reasoning: serde_json::Value) -> Self {
2222 self.reasoning = Some(reasoning);
2223 self
2224 }
2225
2226 pub fn with_include_reasoning(mut self, include_reasoning: bool) -> Self {
2228 self.include_reasoning = Some(include_reasoning);
2229 self
2230 }
2231
2232 pub fn with_service_tier(mut self, service_tier: impl Into<String>) -> Self {
2234 self.service_tier = Some(service_tier.into());
2235 self
2236 }
2237
2238 pub fn with_metadata(mut self, metadata: HashMap<String, String>) -> Self {
2240 self.metadata = Some(metadata);
2241 self
2242 }
2243
2244 pub fn with_include_thoughts(mut self, include_thoughts: bool) -> Self {
2246 let mut config = self.thinking_config.unwrap_or_default();
2247 config.include_thoughts = Some(include_thoughts);
2248 self.thinking_config = Some(config);
2249 self
2250 }
2251
2252 pub fn with_thinking_level(mut self, thinking_level: ThinkingLevel) -> Self {
2254 let mut config = self.thinking_config.unwrap_or_default();
2255 config.thinking_level = Some(thinking_level);
2256 self.thinking_config = Some(config);
2257 self
2258 }
2259
2260 pub fn with_thinking_budget(mut self, thinking_budget: i32) -> Self {
2262 let mut config = self.thinking_config.unwrap_or_default();
2263 config.thinking_budget = Some(thinking_budget);
2264 self.thinking_config = Some(config);
2265 self
2266 }
2267
2268 pub fn with_anthropic_thinking(mut self, budget_tokens: i32) -> Self {
2273 self.thinking =
2274 Some(serde_json::json!({"type": "enabled", "budget_tokens": budget_tokens}));
2275 self
2276 }
2277
2278 pub fn validate_openai_compatibility(&self) -> Result<(), String> {
2280 ChatCompletionRequest {
2281 model: self.model.clone(),
2282 messages: vec![],
2283 temperature: self.temperature,
2284 max_tokens: self.max_tokens,
2285 max_completion_tokens: self.max_completion_tokens,
2286 top_p: self.top_p,
2287 frequency_penalty: self.frequency_penalty,
2288 presence_penalty: self.presence_penalty,
2289 stop: self.stop.clone(),
2290 user: self.user.clone(),
2291 provider: self.provider.clone(),
2292 stream: self.stream,
2293 stream_options: self.stream_options.clone(),
2294 logit_bias: self.logit_bias.clone(),
2295 logprobs: self.logprobs,
2296 top_logprobs: self.top_logprobs,
2297 n: self.n,
2298 response_format: self.response_format.clone(),
2299 tools: self.tools.clone(),
2300 tool_choice: self.tool_choice.clone(),
2301 parallel_tool_calls: self.parallel_tool_calls,
2302 seed: self.seed,
2303 prompt_cache_key: self.prompt_cache_key.clone(),
2304 provider_options: self.provider_options.clone(),
2305 prompt_cache_retention: self.prompt_cache_retention.clone(),
2306 reasoning: self.reasoning.clone(),
2307 include_reasoning: self.include_reasoning,
2308 metadata: self.metadata.clone(),
2309 service_tier: self.service_tier.clone(),
2310 store: self.store,
2311 safety_identifier: self.safety_identifier.clone(),
2312 modalities: self.modalities.clone(),
2313 audio: self.audio.clone(),
2314 prediction: self.prediction.clone(),
2315 verbosity: self.verbosity.clone(),
2316 web_search_options: self.web_search_options.clone(),
2317 functions: self.functions.clone(),
2318 function_call: self.function_call.clone(),
2319 thinking_config: self.thinking_config.clone(),
2320 }
2321 .validate_openai_compatibility()
2322 }
2323
2324 pub fn supports_thinking(&self) -> bool {
2326 self.model.contains("gemini-3") || self.model.contains("gemini-2.5")
2327 }
2328
2329 pub fn requires_thought_signatures(&self) -> bool {
2331 self.model.contains("gemini-3")
2332 }
2333}
2334
2335impl ChatMessage {
2336 pub fn system(content: impl Into<String>) -> Self {
2342 Self {
2343 role: MessageRole::System,
2344 content: content.into(),
2345 }
2346 }
2347
2348 pub fn user(content: impl Into<String>) -> Self {
2354 Self {
2355 role: MessageRole::User,
2356 content: content.into(),
2357 }
2358 }
2359
2360 pub fn assistant(content: impl Into<String>) -> Self {
2366 Self {
2367 role: MessageRole::Assistant,
2368 content: content.into(),
2369 }
2370 }
2371}
2372
2373impl OpenAIMessageContent {
2374 pub fn text(content: impl Into<String>) -> Self {
2376 Self::Text(content.into())
2377 }
2378
2379 pub fn parts(parts: Vec<OpenAIContentPart>) -> Self {
2381 Self::Parts(parts)
2382 }
2383}
2384
2385impl OpenAIContentPart {
2386 pub fn text(content: impl Into<String>) -> Self {
2388 Self::Text {
2389 text: content.into(),
2390 }
2391 }
2392
2393 pub fn image_url(url: impl Into<String>) -> Self {
2395 Self::ImageUrl {
2396 image_url: OpenAIImageUrl {
2397 url: url.into(),
2398 detail: None,
2399 },
2400 }
2401 }
2402
2403 pub fn image_url_with_detail(url: impl Into<String>, detail: impl Into<String>) -> Self {
2405 Self::ImageUrl {
2406 image_url: OpenAIImageUrl {
2407 url: url.into(),
2408 detail: Some(detail.into()),
2409 },
2410 }
2411 }
2412}
2413
2414impl OpenAIChatMessage {
2415 pub fn system(content: impl Into<OpenAIMessageContent>) -> Self {
2417 Self {
2418 role: OpenAIMessageRole::System,
2419 content: Some(content.into()),
2420 name: None,
2421 tool_calls: None,
2422 tool_call_id: None,
2423 }
2424 }
2425
2426 pub fn user(content: impl Into<OpenAIMessageContent>) -> Self {
2428 Self {
2429 role: OpenAIMessageRole::User,
2430 content: Some(content.into()),
2431 name: None,
2432 tool_calls: None,
2433 tool_call_id: None,
2434 }
2435 }
2436
2437 pub fn assistant(content: impl Into<OpenAIMessageContent>) -> Self {
2439 Self {
2440 role: OpenAIMessageRole::Assistant,
2441 content: Some(content.into()),
2442 name: None,
2443 tool_calls: None,
2444 tool_call_id: None,
2445 }
2446 }
2447
2448 pub fn assistant_with_tool_calls(tool_calls: Vec<OpenAIToolCall>) -> Self {
2450 Self {
2451 role: OpenAIMessageRole::Assistant,
2452 content: None,
2453 name: None,
2454 tool_calls: Some(tool_calls),
2455 tool_call_id: None,
2456 }
2457 }
2458
2459 pub fn tool(tool_call_id: impl Into<String>, content: impl Into<OpenAIMessageContent>) -> Self {
2461 Self {
2462 role: OpenAIMessageRole::Tool,
2463 content: Some(content.into()),
2464 name: None,
2465 tool_calls: None,
2466 tool_call_id: Some(tool_call_id.into()),
2467 }
2468 }
2469
2470 pub fn with_parts(
2472 role: OpenAIMessageRole,
2473 content: Option<OpenAIMessageContent>,
2474 tool_calls: Option<Vec<OpenAIToolCall>>,
2475 tool_call_id: Option<String>,
2476 ) -> Self {
2477 Self {
2478 role,
2479 content,
2480 name: None,
2481 tool_calls,
2482 tool_call_id,
2483 }
2484 }
2485}
2486
2487impl From<String> for OpenAIMessageContent {
2488 fn from(value: String) -> Self {
2489 Self::Text(value)
2490 }
2491}
2492
2493impl From<&str> for OpenAIMessageContent {
2494 fn from(value: &str) -> Self {
2495 Self::Text(value.to_string())
2496 }
2497}
2498
2499#[cfg(feature = "legacy")]
2500mod legacy_types;
2501#[cfg(feature = "legacy")]
2502pub use legacy_types::{
2503 ApiKey, ChatRole, ChatUsage, CreditTransaction, DailyUsage, HealthCheck, HealthServices,
2504 HealthStatusEnum, TransactionType, UsageStats, User,
2505};
2506
2507#[derive(Debug, Clone, Serialize, Deserialize)]
2509#[serde(rename_all = "snake_case")]
2510pub enum ResponseFormat {
2511 Text,
2513 JsonObject,
2515 JsonSchema {
2517 json_schema: serde_json::Value,
2519 },
2520}
2521
2522#[derive(Debug, Clone, Serialize, Deserialize)]
2524pub struct Tool {
2525 pub r#type: ToolType,
2527 pub function: FunctionDefinition,
2529}
2530
2531#[derive(Debug, Clone, Serialize, Deserialize)]
2533#[serde(rename_all = "snake_case")]
2534pub enum ToolType {
2535 Function,
2537}
2538
2539#[derive(Debug, Clone, Serialize, Deserialize)]
2541pub struct FunctionDefinition {
2542 pub name: String,
2544 #[serde(skip_serializing_if = "Option::is_none")]
2546 pub description: Option<String>,
2547 #[serde(skip_serializing_if = "Option::is_none")]
2549 pub parameters: Option<serde_json::Value>,
2550}
2551
2552#[derive(Debug, Clone, Serialize, Deserialize)]
2554#[serde(untagged)]
2555pub enum ToolChoice {
2556 None,
2558 Auto,
2560 Tool {
2562 r#type: ToolType,
2564 function: ToolFunction,
2566 },
2567}
2568
2569#[derive(Debug, Clone, Serialize, Deserialize)]
2571pub struct ToolFunction {
2572 pub name: String,
2574}
2575
2576#[derive(Debug, Clone, Serialize, Deserialize, Default)]
2578pub struct ThinkingConfig {
2579 #[serde(skip_serializing_if = "Option::is_none")]
2581 pub include_thoughts: Option<bool>,
2582
2583 #[serde(skip_serializing_if = "Option::is_none")]
2585 pub thinking_level: Option<ThinkingLevel>,
2586
2587 #[serde(skip_serializing_if = "Option::is_none")]
2589 pub thinking_budget: Option<i32>,
2590}
2591
2592#[derive(Debug, Clone, Serialize, Deserialize)]
2594#[serde(rename_all = "lowercase")]
2595pub enum ThinkingLevel {
2596 Minimal,
2598 Low,
2600 Medium,
2602 High,
2604}
2605
2606#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
2608pub struct ContentPart {
2609 #[serde(skip_serializing_if = "Option::is_none")]
2611 pub text: Option<String>,
2612
2613 #[serde(skip_serializing_if = "Option::is_none")]
2615 pub function_call: Option<FunctionCall>,
2616
2617 #[serde(skip_serializing_if = "Option::is_none")]
2619 pub function_response: Option<FunctionResponse>,
2620
2621 #[serde(skip_serializing_if = "Option::is_none")]
2623 pub thought: Option<bool>,
2624
2625 #[serde(skip_serializing_if = "Option::is_none")]
2627 pub thought_signature: Option<String>,
2628}
2629
2630#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
2632pub struct FunctionCall {
2633 pub name: String,
2635 pub args: serde_json::Value,
2637}
2638
2639#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
2641pub struct FunctionResponse {
2642 pub name: String,
2644 pub response: serde_json::Value,
2646}
2647
2648#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
2650pub struct EnhancedChatMessage {
2651 pub role: MessageRole,
2653 pub parts: Vec<ContentPart>,
2655}
2656
2657#[derive(Debug, Clone, Serialize, Deserialize)]
2659pub struct EnhancedUsage {
2660 pub prompt_tokens: u32,
2662 pub completion_tokens: u32,
2664 pub total_tokens: u32,
2666 #[serde(skip_serializing_if = "Option::is_none")]
2668 pub thoughts_token_count: Option<u32>,
2669}
2670
2671impl ThinkingConfig {
2672 pub fn new() -> Self {
2674 Self::default()
2675 }
2676
2677 pub fn gemini_3(level: ThinkingLevel, include_thoughts: bool) -> Self {
2684 Self {
2685 thinking_level: Some(level),
2686 include_thoughts: Some(include_thoughts),
2687 thinking_budget: None,
2688 }
2689 }
2690
2691 pub fn gemini_2_5(budget: i32, include_thoughts: bool) -> Self {
2698 Self {
2699 thinking_budget: Some(budget),
2700 include_thoughts: Some(include_thoughts),
2701 thinking_level: None,
2702 }
2703 }
2704
2705 pub fn high_reasoning() -> Self {
2707 Self {
2708 thinking_level: Some(ThinkingLevel::High),
2709 include_thoughts: Some(true),
2710 thinking_budget: Some(-1), }
2712 }
2713
2714 pub fn fast_response() -> Self {
2716 Self {
2717 thinking_level: Some(ThinkingLevel::Low),
2718 include_thoughts: Some(false),
2719 thinking_budget: Some(512), }
2721 }
2722}
2723
2724impl ContentPart {
2725 pub fn text(content: impl Into<String>) -> Self {
2727 Self {
2728 text: Some(content.into()),
2729 function_call: None,
2730 function_response: None,
2731 thought: None,
2732 thought_signature: None,
2733 }
2734 }
2735
2736 pub fn function_call(name: impl Into<String>, args: serde_json::Value) -> Self {
2738 Self {
2739 text: None,
2740 function_call: Some(FunctionCall {
2741 name: name.into(),
2742 args,
2743 }),
2744 function_response: None,
2745 thought: None,
2746 thought_signature: None,
2747 }
2748 }
2749
2750 pub fn function_response(name: impl Into<String>, response: serde_json::Value) -> Self {
2752 Self {
2753 text: None,
2754 function_call: None,
2755 function_response: Some(FunctionResponse {
2756 name: name.into(),
2757 response,
2758 }),
2759 thought: None,
2760 thought_signature: None,
2761 }
2762 }
2763
2764 pub fn with_thought_signature(mut self, signature: impl Into<String>) -> Self {
2766 self.thought_signature = Some(signature.into());
2767 self
2768 }
2769
2770 pub fn as_thought(mut self) -> Self {
2772 self.thought = Some(true);
2773 self
2774 }
2775}
2776
2777impl EnhancedChatMessage {
2778 pub fn system(content: impl Into<String>) -> Self {
2780 Self {
2781 role: MessageRole::System,
2782 parts: vec![ContentPart::text(content)],
2783 }
2784 }
2785
2786 pub fn user(content: impl Into<String>) -> Self {
2788 Self {
2789 role: MessageRole::User,
2790 parts: vec![ContentPart::text(content)],
2791 }
2792 }
2793
2794 pub fn assistant(content: impl Into<String>) -> Self {
2796 Self {
2797 role: MessageRole::Assistant,
2798 parts: vec![ContentPart::text(content)],
2799 }
2800 }
2801
2802 pub fn with_parts(role: MessageRole, parts: Vec<ContentPart>) -> Self {
2804 Self { role, parts }
2805 }
2806}
2807
2808#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
2810pub struct RainyBillingUsage {
2811 #[serde(skip_serializing_if = "Option::is_none")]
2813 pub prompt_tokens: Option<u32>,
2814 #[serde(skip_serializing_if = "Option::is_none")]
2816 pub completion_tokens: Option<u32>,
2817 #[serde(skip_serializing_if = "Option::is_none")]
2819 pub reasoning_tokens: Option<u32>,
2820 #[serde(skip_serializing_if = "Option::is_none")]
2822 pub image_units: Option<u32>,
2823}
2824
2825#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
2827pub struct RainyBillingStreamEvent {
2828 #[serde(skip_serializing_if = "Option::is_none")]
2830 pub plan_id: Option<String>,
2831 #[serde(skip_serializing_if = "Option::is_none")]
2833 pub charged_credits: Option<f64>,
2834 #[serde(skip_serializing_if = "Option::is_none")]
2836 pub usage: Option<RainyBillingUsage>,
2837}
2838
2839#[derive(Debug, Clone)]
2841pub enum ChatStreamEvent {
2842 Chunk(ChatCompletionStreamResponse),
2844 Billing(RainyBillingStreamEvent),
2846 Raw(serde_json::Value),
2848}
2849
2850impl ChatStreamEvent {
2851 pub fn from_value(value: serde_json::Value) -> Self {
2853 if let Ok(chunk) = serde_json::from_value::<ChatCompletionStreamResponse>(value.clone()) {
2854 return Self::Chunk(chunk);
2855 }
2856
2857 if let Ok(billing) = serde_json::from_value::<RainyBillingStreamEvent>(value.clone()) {
2858 if billing.plan_id.is_some()
2859 || billing.charged_credits.is_some()
2860 || billing.usage.is_some()
2861 {
2862 return Self::Billing(billing);
2863 }
2864 }
2865
2866 Self::Raw(value)
2867 }
2868}
2869
2870#[derive(Debug, Clone, Serialize, Deserialize)]
2872pub struct ChatCompletionStreamResponse {
2873 pub id: String,
2875 pub object: String,
2877 pub created: u64,
2879 pub model: String,
2881 pub choices: Vec<ChatCompletionStreamChoice>,
2883 #[serde(skip_serializing_if = "Option::is_none")]
2885 pub usage: Option<Usage>,
2886}
2887
2888#[derive(Debug, Clone, Serialize, Deserialize)]
2890pub struct ChatCompletionStreamChoice {
2891 pub index: u32,
2893 pub delta: ChatCompletionStreamDelta,
2895 #[serde(skip_serializing_if = "Option::is_none")]
2897 pub finish_reason: Option<String>,
2898}
2899
2900#[derive(Debug, Clone, Serialize, Deserialize)]
2902pub struct ChatCompletionStreamDelta {
2903 #[serde(skip_serializing_if = "Option::is_none")]
2905 pub role: Option<String>,
2906 #[serde(skip_serializing_if = "Option::is_none")]
2908 pub content: Option<String>,
2909 #[serde(skip_serializing_if = "Option::is_none")]
2911 pub thought: Option<String>,
2912 #[serde(skip_serializing_if = "Option::is_none")]
2914 pub tool_calls: Option<Vec<ToolCall>>,
2915}
2916
2917#[derive(Debug, Clone, Serialize, Deserialize)]
2919pub struct ToolCall {
2920 pub index: u32,
2922 #[serde(skip_serializing_if = "Option::is_none")]
2924 pub id: Option<String>,
2925 #[serde(skip_serializing_if = "Option::is_none")]
2927 pub r#type: Option<String>,
2928 #[serde(skip_serializing_if = "Option::is_none")]
2930 pub function: Option<ToolCallFunction>,
2931}
2932
2933#[derive(Debug, Clone, Serialize, Deserialize)]
2935pub struct ToolCallFunction {
2936 #[serde(skip_serializing_if = "Option::is_none")]
2938 pub name: Option<String>,
2939 #[serde(skip_serializing_if = "Option::is_none")]
2941 pub arguments: Option<String>,
2942}