1pub mod anthropic;
2pub mod copilot;
3pub mod openai;
4
5use std::{future::Future, pin::Pin};
6
7use serde::{Deserialize, Serialize};
8use tokio::sync::mpsc::UnboundedReceiver;
9
10#[derive(Debug, Clone, Serialize, Deserialize)]
11pub struct Message {
12 pub role: Role,
13 pub content: Vec<ContentBlock>,
14}
15
16#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
17#[serde(rename_all = "lowercase")]
18pub enum Role {
19 User,
20 Assistant,
21 System,
22}
23
24#[derive(Debug, Clone, Serialize, Deserialize)]
25#[serde(tag = "type", rename_all = "snake_case")]
26pub enum ContentBlock {
27 Text(String),
28 Image {
29 media_type: String,
30 data: String,
31 },
32 ToolUse {
33 id: String,
34 name: String,
35 input: serde_json::Value,
36 },
37 ToolResult {
38 tool_use_id: String,
39 content: String,
40 is_error: bool,
41 },
42 Thinking {
43 thinking: String,
44 signature: String,
45 },
46 Compaction {
47 content: String,
48 },
49}
50
51#[derive(Debug, Clone, Serialize, Deserialize)]
52pub struct ToolDefinition {
53 pub name: String,
54 pub description: String,
55 pub input_schema: serde_json::Value,
56}
57
58#[derive(Debug, Clone)]
59pub struct StreamEvent {
60 pub event_type: StreamEventType,
61}
62
63#[derive(Debug, Clone)]
64pub enum StreamEventType {
65 TextDelta(String),
66 ThinkingDelta(String),
67 ThinkingComplete {
68 thinking: String,
69 signature: String,
70 },
71 ToolUseStart {
72 id: String,
73 name: String,
74 },
75 ToolUseInputDelta(String),
76 ToolUseEnd,
77 CompactionComplete(String),
78 MessageStart,
79 MessageEnd {
80 stop_reason: StopReason,
81 usage: Usage,
82 },
83 Error(String),
84}
85
86#[derive(Debug, Clone)]
87pub enum StopReason {
88 EndTurn,
89 MaxTokens,
90 ToolUse,
91 StopSequence,
92}
93
94#[derive(Debug, Clone, Default)]
95pub struct Usage {
96 pub input_tokens: u32,
97 pub output_tokens: u32,
98 pub cache_read_tokens: u32,
99 pub cache_write_tokens: u32,
100}
101
102pub trait Provider: Send + Sync {
103 fn name(&self) -> &str;
104 fn model(&self) -> &str;
105 fn set_model(&mut self, model: String);
106 fn available_models(&self) -> Vec<String>;
107 fn context_window(&self) -> u32;
108 fn supports_server_compaction(&self) -> bool {
109 false
110 }
111 fn fetch_context_window(
112 &self,
113 ) -> Pin<Box<dyn Future<Output = anyhow::Result<u32>> + Send + '_>>;
114 fn supports_vision(&self) -> bool {
115 true
116 }
117 fn fetch_models(
118 &self,
119 ) -> Pin<Box<dyn Future<Output = anyhow::Result<Vec<String>>> + Send + '_>>;
120 fn stream(
121 &self,
122 messages: &[Message],
123 system: Option<&str>,
124 tools: &[ToolDefinition],
125 max_tokens: u32,
126 thinking_budget: u32,
127 ) -> Pin<Box<dyn Future<Output = anyhow::Result<UnboundedReceiver<StreamEvent>>> + Send + '_>>;
128
129 fn stream_with_model(
130 &self,
131 model: &str,
132 messages: &[Message],
133 system: Option<&str>,
134 tools: &[ToolDefinition],
135 max_tokens: u32,
136 thinking_budget: u32,
137 ) -> Pin<Box<dyn Future<Output = anyhow::Result<UnboundedReceiver<StreamEvent>>> + Send + '_>>
138 {
139 let _ = model;
140 self.stream(messages, system, tools, max_tokens, thinking_budget)
141 }
142}