Skip to main content

oxi/
lib.rs

1//! oxi: CLI coding harness
2//!
3//! This crate provides the main application logic for the oxi CLI.
4
5pub mod export;
6pub mod extensions;
7pub mod interactive;
8pub mod packages;
9pub mod print_mode;
10pub mod session;
11pub mod settings;
12pub mod skills;
13pub mod templates;
14pub mod tui_interactive;
15pub mod tui_components;
16
17// Utility modules
18pub mod event_bus;
19pub mod model_resolver;
20pub mod cli;
21pub mod git_utils;
22pub mod keybindings;
23pub mod output_guard;
24pub mod messages;
25pub mod auth_storage;
26pub mod bash_executor;
27pub mod diagnostics;
28pub mod resource_loader;
29pub mod footer_data;
30
31use anyhow::{Error, Result};
32use oxi_agent::{Agent, AgentConfig, AgentEvent};
33use oxi_ai::{get_model, get_provider};
34use parking_lot::RwLock;
35use settings::{Settings, ThinkingLevel};
36use skills::SkillManager;
37use std::sync::Arc;
38use tokio::sync::mpsc;
39use uuid::Uuid;
40
41/// Application state and entry point
42pub struct App {
43    agent: Arc<Agent>,
44    settings: Settings,
45    skills: RwLock<SkillManager>,
46    active_skills: RwLock<Vec<String>>,
47}
48
49/// Chat message for display
50#[derive(Debug, Clone)]
51pub struct ChatMessage {
52    pub role: String,
53    pub content: String,
54    pub timestamp: chrono::DateTime<chrono::Utc>,
55}
56
57impl ChatMessage {
58    pub fn user(content: String) -> Self {
59        Self {
60            role: "user".to_string(),
61            content,
62            timestamp: chrono::Utc::now(),
63        }
64    }
65
66    pub fn assistant(content: String) -> Self {
67        Self {
68            role: "assistant".to_string(),
69            content,
70            timestamp: chrono::Utc::now(),
71        }
72    }
73}
74
75/// Interactive session state
76pub struct InteractiveSession {
77    pub messages: Vec<ChatMessage>,
78    pub thinking: bool,
79    pub current_response: String,
80    pub session_id: Option<Uuid>,
81    pub entries: Vec<session::SessionEntry>,
82}
83
84impl Default for InteractiveSession {
85    fn default() -> Self {
86        Self {
87            messages: Vec::new(),
88            thinking: false,
89            current_response: String::new(),
90            session_id: None,
91            entries: Vec::new(),
92        }
93    }
94}
95
96impl InteractiveSession {
97    pub fn new() -> Self {
98        Self::default()
99    }
100
101    pub fn add_user_message(&mut self, content: String) {
102        self.messages.push(ChatMessage::user(content.clone()));
103        // Also add to entries for session persistence
104        let entry = session::SessionEntry::new(session::AgentMessage::User { content });
105        self.entries.push(entry);
106    }
107
108    pub fn add_assistant_message(&mut self, content: String) {
109        self.messages.push(ChatMessage::assistant(content.clone()));
110        // Also add to entries for session persistence
111        let entry = session::SessionEntry::new(session::AgentMessage::Assistant { content });
112        self.entries.push(entry);
113        self.current_response.clear();
114    }
115
116    pub fn append_to_response(&mut self, text: &str) {
117        self.current_response.push_str(text);
118    }
119
120    pub fn finish_response(&mut self) {
121        if !self.current_response.is_empty() {
122            let response = std::mem::take(&mut self.current_response);
123            self.add_assistant_message(response);
124        }
125    }
126
127    /// Get all entries in the session
128    pub fn entries(&self) -> &[session::SessionEntry] {
129        &self.entries
130    }
131
132    /// Get entry at a specific index
133    pub fn get_entry(&self, index: usize) -> Option<&session::SessionEntry> {
134        self.entries.get(index)
135    }
136
137    /// Get entry by ID
138    pub fn get_entry_by_id(&self, id: Uuid) -> Option<&session::SessionEntry> {
139        self.entries.iter().find(|e| e.id == id)
140    }
141
142    /// Truncate entries at a given index (for branching)
143    pub fn truncate_at(&mut self, index: usize) {
144        self.entries.truncate(index + 1);
145    }
146}
147
148/// Build the system prompt based on thinking level and active skills
149fn build_system_prompt(
150    thinking_level: ThinkingLevel,
151    skill_contents: &[String],
152) -> String {
153    let mut prompt = match thinking_level {
154        ThinkingLevel::None => String::from(
155            "You are a helpful AI assistant. Provide direct, concise answers.",
156        ),
157        ThinkingLevel::Minimal => String::from(
158            "You are a helpful AI assistant. Provide clear and helpful answers.",
159        ),
160        ThinkingLevel::Standard => String::from(
161            "You are a helpful AI coding assistant. Think through problems \
162             step by step when helpful, but keep responses focused and actionable.",
163        ),
164        ThinkingLevel::Thorough => String::from(
165            "You are an expert AI coding assistant. Take time to thoroughly \
166             analyze problems, consider edge cases, and provide comprehensive \
167             solutions with explanations. Think deeply before responding.",
168        ),
169    };
170
171    // Append active skill content
172    for content in skill_contents {
173        prompt.push_str("\n\n---\n# Active Skill\n\n");
174        prompt.push_str(content);
175    }
176
177    prompt
178}
179
180impl App {
181    /// Create a new App instance
182    pub async fn new(settings: Settings) -> Result<Self> {
183        let model_id = settings.effective_model(None);
184        let provider_name = settings.effective_provider(None);
185
186        // Parse model ID to get provider and model
187        let parts: Vec<&str> = model_id.split('/').collect();
188        let (provider_name, model_name) = if parts.len() >= 2 {
189            (parts[0].to_string(), parts[1..].join("/"))
190        } else {
191            (provider_name.clone(), model_id.clone())
192        };
193
194        // Get the model
195        let _model = get_model(&provider_name, &model_name)
196            .ok_or_else(|| Error::msg(format!("Model '{}' not found", model_id)))?;
197
198        // Create a provider for this model
199        let provider = get_provider(&provider_name)
200            .ok_or_else(|| Error::msg(format!("Provider '{}' not found", provider_name)))?;
201
202        // Load skills
203        let skills_dir = SkillManager::skills_dir().unwrap_or_else(|_| {
204            dirs::home_dir()
205                .unwrap_or_default()
206                .join(".oxi")
207                .join("skills")
208        });
209        let skills = SkillManager::load_from_dir(&skills_dir).unwrap_or_else(|e| {
210            tracing::debug!("Skills not loaded: {}", e);
211            SkillManager::load_from_dir(std::path::Path::new("/nonexistent")).unwrap()
212        });
213
214        // Build agent config from settings
215        let system_prompt = build_system_prompt(settings.thinking_level, &[]);
216        let compaction_strategy = if settings.auto_compaction {
217            oxi_ai::CompactionStrategy::Threshold(0.8)
218        } else {
219            oxi_ai::CompactionStrategy::Disabled
220        };
221        let config = AgentConfig {
222            name: "oxi".to_string(),
223            description: Some("oxi CLI agent".to_string()),
224            model_id: model_id.clone(),
225            system_prompt: Some(system_prompt),
226            max_iterations: 10,
227            timeout_seconds: settings.tool_timeout_seconds,
228            temperature: settings.effective_temperature(),
229            max_tokens: settings.effective_max_tokens(),
230            compaction_strategy,
231            compaction_instruction: None,
232            context_window: 128_000,
233        };
234
235        let agent = Arc::new(Agent::new(Arc::from(provider), config));
236
237        Ok(Self {
238            agent,
239            settings,
240            skills: RwLock::new(skills),
241            active_skills: RwLock::new(Vec::new()),
242        })
243    }
244
245    /// Get the current settings
246    pub fn settings(&self) -> &Settings {
247        &self.settings
248    }
249
250    /// Get a reference to the underlying agent.
251    pub fn agent(&self) -> Arc<Agent> {
252        Arc::clone(&self.agent)
253    }
254
255    /// Get the tool registry (for registering extension tools)
256    pub fn agent_tools(&self) -> Arc<oxi_agent::ToolRegistry> {
257        self.agent.tools()
258    }
259
260    /// Get a reference to the skill manager
261    pub fn skills(&self) -> parking_lot::RwLockReadGuard<'_, SkillManager> {
262        self.skills.read()
263    }
264
265    /// Activate a skill by name. Returns an error string if not found.
266    pub fn activate_skill(&self, name: &str) -> Result<(), String> {
267        {
268            let skills = self.skills.read();
269            if skills.get(name).is_none() {
270                return Err(format!("Skill '{}' not found", name));
271            }
272        }
273        let name_lower = name.to_lowercase();
274        {
275            let mut active = self.active_skills.write();
276            if !active.contains(&name_lower) {
277                active.push(name_lower);
278            }
279        }
280        self.rebuild_system_prompt();
281        Ok(())
282    }
283
284    /// Deactivate a skill by name.
285    pub fn deactivate_skill(&self, name: &str) {
286        let name_lower = name.to_lowercase();
287        {
288            let mut active = self.active_skills.write();
289            active.retain(|n| n != &name_lower);
290        }
291        self.rebuild_system_prompt();
292    }
293
294    /// List currently active skill names
295    pub fn active_skills(&self) -> Vec<String> {
296        self.active_skills.read().clone()
297    }
298
299    /// Rebuild the system prompt with current active skills
300    fn rebuild_system_prompt(&self) {
301        let active = self.active_skills.read();
302        let skills = self.skills.read();
303        let contents: Vec<String> = active
304            .iter()
305            .filter_map(|name| skills.get(name).map(|s| s.content.clone()))
306            .collect();
307        let prompt = build_system_prompt(self.settings.thinking_level, &contents);
308        self.agent.set_system_prompt(prompt);
309    }
310
311    /// Get a clone of the current state
312    pub fn agent_state(&self) -> oxi_agent::AgentState {
313        self.agent.state()
314    }
315
316    /// Run a single prompt and return the response
317    pub async fn run_prompt(&self, prompt: String) -> Result<String> {
318        let (response, _events) = self.agent.run(prompt).await?;
319        Ok(response.content)
320    }
321
322    /// Run a prompt with event callback
323    pub async fn run_prompt_with_events<F>(&self, prompt: String, on_event: F) -> Result<String>
324    where
325        F: FnMut(AgentEvent) + Send + 'static,
326    {
327        self.agent.run_streaming(prompt, on_event).await?;
328        // Get the last assistant message's text content
329        let state = self.agent_state();
330        for msg in state.messages.iter().rev() {
331            if let oxi_ai::Message::Assistant(a) = msg {
332                return Ok(a.text_content());
333            }
334        }
335        Ok(String::new())
336    }
337
338    /// Run in interactive mode, returning an event stream
339    pub async fn run_interactive(&self) -> Result<InteractiveLoop<'_>> {
340        let session = InteractiveSession::new();
341        Ok(InteractiveLoop {
342            app: self,
343            session,
344        })
345    }
346
347    /// Reset the conversation
348    pub fn reset(&self) {
349        self.agent.reset();
350    }
351
352    /// Switch the model used for future LLM calls.
353    ///
354    /// See [`Agent::switch_model`] for details.
355    pub fn switch_model(&self, model_id: &str) -> anyhow::Result<()> {
356        self.agent.switch_model(model_id)
357    }
358
359    /// Get the current model ID
360    pub fn model_id(&self) -> String {
361        self.agent.model_id()
362    }
363}
364
365/// Interactive loop handle
366pub struct InteractiveLoop<'a> {
367    app: &'a App,
368    session: InteractiveSession,
369}
370
371impl<'a> InteractiveLoop<'a> {
372    /// Add a user message and get the assistant response
373    pub async fn send_message(&mut self, prompt: String) -> Result<()> {
374        // Add user message
375        self.session.add_user_message(prompt.clone());
376        self.session.thinking = true;
377
378        // Run agent with channel
379        let (tx, mut rx) = mpsc::channel::<AgentEvent>(100);
380
381        // Run the agent — we execute inline instead of spawning because
382        // the agent's internal RwLockReadGuard is not Send-safe across
383        // await points. We use a select-like approach: run the agent in a
384        // local task that doesn't require Send.
385        let agent = Arc::clone(&self.app.agent);
386
387        // Use LocalSet to spawn a non-Send future
388        let local = tokio::task::LocalSet::new();
389        local.spawn_local(async move {
390            let _ = agent.run_with_channel(prompt, tx).await;
391        });
392
393        // Collect events
394        while let Some(event) = rx.recv().await {
395            match event {
396                AgentEvent::TextChunk { text } => {
397                    self.session.append_to_response(&text);
398                }
399                AgentEvent::Thinking => {
400                    // Thinking state
401                }
402                AgentEvent::Complete { .. } => {
403                    self.session.finish_response();
404                    self.session.thinking = false;
405                }
406                AgentEvent::Error { message } => {
407                    self.session.append_to_response(&format!("[Error: {}]", message));
408                    self.session.finish_response();
409                    self.session.thinking = false;
410                }
411                _ => {}
412            }
413        }
414
415        // Run local set to completion (drain remaining agent work)
416        local.await;
417
418        Ok(())
419    }
420
421    /// Get current messages
422    pub fn messages(&self) -> &[ChatMessage] {
423        &self.session.messages
424    }
425
426    /// Get the current partial response (while thinking)
427    pub fn current_response(&self) -> &str {
428        &self.session.current_response
429    }
430
431    /// Check if currently thinking
432    pub fn is_thinking(&self) -> bool {
433        self.session.thinking
434    }
435
436    /// Get session entries for tree navigation
437    pub fn entries(&self) -> &[session::SessionEntry] {
438        self.session.entries()
439    }
440
441    /// Get entry by ID
442    pub fn get_entry(&self, id: Uuid) -> Option<&session::SessionEntry> {
443        self.session.get_entry_by_id(id)
444    }
445
446    /// Switch the model used for future LLM calls
447    pub fn switch_model(&self, model_id: &str) -> anyhow::Result<()> {
448        self.app.switch_model(model_id)
449    }
450
451    /// Get the current model ID
452    pub fn model_id(&self) -> String {
453        self.app.model_id()
454    }
455}