oli_tui/apis/
api_client.rs

1use anyhow::Result;
2use serde::{Deserialize, Serialize};
3use std::sync::Arc;
4
5#[derive(Debug, Clone, Serialize, Deserialize)]
6pub struct Message {
7    pub role: String,
8    pub content: String,
9}
10
11impl Message {
12    pub fn system(content: String) -> Self {
13        Self {
14            role: "system".to_string(),
15            content,
16        }
17    }
18
19    pub fn user(content: String) -> Self {
20        Self {
21            role: "user".to_string(),
22            content,
23        }
24    }
25
26    pub fn assistant(content: String) -> Self {
27        Self {
28            role: "assistant".to_string(),
29            content,
30        }
31    }
32}
33
34/// Manages the conversation session with history of messages
35#[derive(Debug, Clone)]
36pub struct SessionManager {
37    /// History of messages for the current session
38    pub messages: Vec<Message>,
39    /// Maximum number of messages to keep in the session
40    pub max_messages: usize,
41    /// System message to prepend to all conversations
42    pub system_message: Option<Message>,
43}
44
45impl Default for SessionManager {
46    fn default() -> Self {
47        Self {
48            messages: Vec::new(),
49            max_messages: 100,
50            system_message: None,
51        }
52    }
53}
54
55impl SessionManager {
56    /// Create a new session manager with a specific message capacity
57    pub fn new(max_messages: usize) -> Self {
58        Self {
59            messages: Vec::new(),
60            max_messages,
61            system_message: None,
62        }
63    }
64
65    /// Add a system message that will be prepended to the conversation
66    pub fn with_system_message(mut self, content: String) -> Self {
67        self.system_message = Some(Message::system(content));
68        self
69    }
70
71    /// Add a user message to the conversation
72    pub fn add_user_message(&mut self, content: String) {
73        self.add_message(Message::user(content));
74    }
75
76    /// Add an assistant message to the conversation
77    pub fn add_assistant_message(&mut self, content: String) {
78        self.add_message(Message::assistant(content));
79    }
80
81    /// Add a message to the conversation
82    pub fn add_message(&mut self, message: Message) {
83        self.messages.push(message);
84        self.trim_if_needed();
85    }
86
87    /// Replace all messages with a single summary message
88    pub fn replace_with_summary(&mut self, summary: String) {
89        self.messages.clear();
90        self.add_message(Message::system(format!(
91            "Previous conversation summary: {}",
92            summary
93        )));
94    }
95
96    /// Get all messages for the API call, including the system message if present
97    pub fn get_messages_for_api(&self) -> Vec<Message> {
98        let mut api_messages = Vec::new();
99
100        // Add system message if present
101        if let Some(sys_message) = &self.system_message {
102            api_messages.push(sys_message.clone());
103        }
104
105        // Add conversation messages
106        api_messages.extend(self.messages.clone());
107
108        api_messages
109    }
110
111    /// Clear all messages in the session
112    pub fn clear(&mut self) {
113        self.messages.clear();
114    }
115
116    /// Get the current number of messages
117    pub fn message_count(&self) -> usize {
118        self.messages.len()
119    }
120
121    /// Trim messages if the count exceeds max_messages
122    fn trim_if_needed(&mut self) {
123        if self.messages.len() > self.max_messages {
124            let to_remove = self.messages.len() - self.max_messages;
125            self.messages.drain(0..to_remove);
126        }
127    }
128}
129
130#[derive(Debug, Clone, Serialize, Deserialize)]
131pub struct ToolDefinition {
132    pub name: String,
133    pub description: String,
134    pub parameters: serde_json::Value,
135}
136
137#[derive(Debug, Clone, Serialize, Deserialize)]
138pub struct ToolCall {
139    pub id: Option<String>, // Required for OpenAI to map tool results back to calls
140    pub name: String,
141    pub arguments: serde_json::Value,
142}
143
144#[derive(Debug, Clone, Serialize, Deserialize)]
145pub struct ToolResult {
146    pub tool_call_id: String,
147    pub output: String,
148}
149
150#[derive(Debug, Clone, Serialize, Deserialize)]
151pub struct CompletionOptions {
152    pub temperature: Option<f32>,
153    pub top_p: Option<f32>,
154    pub max_tokens: Option<u32>,
155    pub tools: Option<Vec<ToolDefinition>>,
156    pub json_schema: Option<String>,
157    pub require_tool_use: bool,
158}
159
160impl Default for CompletionOptions {
161    fn default() -> Self {
162        Self {
163            temperature: Some(0.7),
164            top_p: Some(0.9),
165            max_tokens: Some(2048),
166            tools: None,
167            json_schema: None,
168            require_tool_use: false,
169        }
170    }
171}
172
173// This trait cannot be made into a dyn trait because it has async methods
174#[async_trait::async_trait]
175pub trait ApiClient: Send + Sync {
176    // Basic completion without tool usage
177    #[allow(dead_code)]
178    async fn complete(&self, messages: Vec<Message>, options: CompletionOptions) -> Result<String>;
179
180    async fn complete_with_tools(
181        &self,
182        messages: Vec<Message>,
183        options: CompletionOptions,
184        tool_results: Option<Vec<ToolResult>>,
185    ) -> Result<(String, Option<Vec<ToolCall>>)>;
186}
187
188// Instead of using a trait object, we'll use an enum to handle different providers
189#[derive(Clone)]
190pub enum ApiClientEnum {
191    Anthropic(Arc<crate::apis::anthropic::AnthropicClient>),
192    OpenAi(Arc<crate::apis::openai::OpenAIClient>),
193    Ollama(Arc<crate::apis::ollama::OllamaClient>),
194}
195
196impl ApiClientEnum {
197    #[allow(dead_code)]
198    pub async fn complete(
199        &self,
200        messages: Vec<Message>,
201        options: CompletionOptions,
202    ) -> Result<String> {
203        match self {
204            Self::Anthropic(client) => client.complete(messages, options).await,
205            Self::OpenAi(client) => client.complete(messages, options).await,
206            Self::Ollama(client) => client.complete(messages, options).await,
207        }
208    }
209
210    pub async fn complete_with_tools(
211        &self,
212        messages: Vec<Message>,
213        options: CompletionOptions,
214        tool_results: Option<Vec<ToolResult>>,
215    ) -> Result<(String, Option<Vec<ToolCall>>)> {
216        match self {
217            Self::Anthropic(client) => {
218                client
219                    .complete_with_tools(messages, options, tool_results)
220                    .await
221            }
222            Self::OpenAi(client) => {
223                client
224                    .complete_with_tools(messages, options, tool_results)
225                    .await
226            }
227            Self::Ollama(client) => {
228                client
229                    .complete_with_tools(messages, options, tool_results)
230                    .await
231            }
232        }
233    }
234}
235
236pub type DynApiClient = ApiClientEnum;