1use async_trait::async_trait;
2use futures_util::Stream;
3use models::*;
4use reqwest::header::HeaderMap;
5use rmcp::model::Content;
6use stakpak_shared::models::integrations::openai::{
7 ChatCompletionResponse, ChatCompletionStreamResponse, ChatMessage, Tool,
8};
9use uuid::Uuid;
10
11pub mod client;
12pub mod error;
13pub mod local;
14pub mod models;
15pub mod stakpak;
16pub mod storage;
17
18pub use client::{
20 AgentClient, AgentClientConfig, DEFAULT_STAKPAK_ENDPOINT, ModelOptions, StakpakConfig,
21};
22
23pub use stakai::{Model, ModelCost, ModelLimit};
25
26pub use storage::{
28 BoxedSessionStorage, Checkpoint, CheckpointState, CheckpointSummary, CreateCheckpointRequest,
29 CreateSessionRequest as StorageCreateSessionRequest, CreateSessionResult, ListCheckpointsQuery,
30 ListCheckpointsResult, ListSessionsQuery, ListSessionsResult, LocalStorage, Session,
31 SessionStats, SessionStatus, SessionStorage, SessionSummary, SessionVisibility, StakpakStorage,
32 StorageError, UpdateSessionRequest as StorageUpdateSessionRequest,
33};
34
35pub fn find_model(model_str: &str, use_stakpak: bool) -> Option<Model> {
43 const PROVIDERS: &[&str] = &["anthropic", "openai", "google"];
44
45 let (provider_hint, model_id) = parse_model_string(model_str);
46
47 let model = provider_hint
49 .and_then(|p| find_in_provider(p, model_id))
50 .or_else(|| {
51 PROVIDERS
52 .iter()
53 .find_map(|&p| find_in_provider(p, model_id))
54 })?;
55
56 Some(if use_stakpak {
57 transform_for_stakpak(model)
58 } else {
59 model
60 })
61}
62
63#[allow(clippy::string_slice)] fn parse_model_string(s: &str) -> (Option<&str>, &str) {
66 match s.find('/') {
67 Some(idx) => {
68 let provider = &s[..idx];
69 let model_id = &s[idx + 1..];
70 let normalized = match provider {
71 "gemini" => "google",
72 p => p,
73 };
74 (Some(normalized), model_id)
75 }
76 None => (None, s),
77 }
78}
79
80fn find_in_provider(provider_id: &str, model_id: &str) -> Option<Model> {
82 let models = stakai::load_models_for_provider(provider_id).ok()?;
83
84 if let Some(model) = models.iter().find(|m| m.id == model_id) {
86 return Some(model.clone());
87 }
88
89 let mut best_match: Option<&Model> = None;
92 let mut best_len = 0;
93
94 for model in &models {
95 if model_id.starts_with(&model.id) && model.id.len() > best_len {
96 best_match = Some(model);
97 best_len = model.id.len();
98 }
99 }
100
101 best_match.cloned()
102}
103
104fn transform_for_stakpak(model: Model) -> Model {
106 Model {
107 id: format!("{}/{}", model.provider, model.id),
108 provider: "stakpak".into(),
109 name: model.name,
110 reasoning: model.reasoning,
111 cost: model.cost,
112 limit: model.limit,
113 release_date: model.release_date,
114 }
115}
116
117#[async_trait]
123pub trait AgentProvider: SessionStorage + Send + Sync {
124 async fn get_my_account(&self) -> Result<GetMyAccountResponse, String>;
126 async fn get_billing_info(
127 &self,
128 account_username: &str,
129 ) -> Result<stakpak_shared::models::billing::BillingResponse, String>;
130
131 async fn list_rulebooks(&self) -> Result<Vec<ListRuleBook>, String>;
133 async fn get_rulebook_by_uri(&self, uri: &str) -> Result<RuleBook, String>;
134 async fn create_rulebook(
135 &self,
136 uri: &str,
137 description: &str,
138 content: &str,
139 tags: Vec<String>,
140 visibility: Option<RuleBookVisibility>,
141 ) -> Result<CreateRuleBookResponse, String>;
142 async fn delete_rulebook(&self, uri: &str) -> Result<(), String>;
143
144 async fn chat_completion(
146 &self,
147 model: Model,
148 messages: Vec<ChatMessage>,
149 tools: Option<Vec<Tool>>,
150 session_id: Option<Uuid>,
151 metadata: Option<serde_json::Value>,
152 ) -> Result<ChatCompletionResponse, String>;
153 async fn chat_completion_stream(
154 &self,
155 model: Model,
156 messages: Vec<ChatMessage>,
157 tools: Option<Vec<Tool>>,
158 headers: Option<HeaderMap>,
159 session_id: Option<Uuid>,
160 metadata: Option<serde_json::Value>,
161 ) -> Result<
162 (
163 std::pin::Pin<
164 Box<dyn Stream<Item = Result<ChatCompletionStreamResponse, ApiStreamError>> + Send>,
165 >,
166 Option<String>,
167 ),
168 String,
169 >;
170 async fn cancel_stream(&self, request_id: String) -> Result<(), String>;
171
172 async fn search_docs(&self, input: &SearchDocsRequest) -> Result<Vec<Content>, String>;
174
175 async fn memorize_session(&self, checkpoint_id: Uuid) -> Result<(), String>;
177 async fn search_memory(&self, input: &SearchMemoryRequest) -> Result<Vec<Content>, String>;
178
179 async fn slack_read_messages(
181 &self,
182 input: &SlackReadMessagesRequest,
183 ) -> Result<Vec<Content>, String>;
184 async fn slack_read_replies(
185 &self,
186 input: &SlackReadRepliesRequest,
187 ) -> Result<Vec<Content>, String>;
188 async fn slack_send_message(
189 &self,
190 input: &SlackSendMessageRequest,
191 ) -> Result<Vec<Content>, String>;
192
193 async fn list_models(&self) -> Vec<Model>;
195}