Skip to main content

oxi/
lib.rs

1#![warn(missing_docs)]
2#![warn(clippy::unwrap_used)]
3#![allow(unknown_lints)]
4
5//! oxi: CLI coding harness
6//!
7//! This crate provides the main application logic for the oxi CLI.
8
9// ─── Root-level entry modules ───────────────────────────────────────────────
10// cli must be pub for main.rs binary
11pub mod cli;
12pub mod print_mode;
13pub mod setup_wizard;
14
15// ─── Directory groups ───────────────────────────────────────────────────────
16pub(crate) mod app;
17pub(crate) mod context;
18pub mod extensions; // public for main.rs
19pub(crate) mod infra;
20pub(crate) mod media;
21pub(crate) mod prompt;
22pub(crate) mod rpc_mode;
23pub(crate) mod skills;
24pub mod storage; // public for main.rs (packages)
25                 // Re-exports from storage for main.rs
26pub use storage::packages::PackageManager;
27pub use storage::packages::ResourceKind;
28pub mod tui; // public for main.rs
29pub(crate) mod ui;
30pub(crate) mod util;
31
32// ─── oxi-store re-exports (shared persistent state) ─────────────────────────
33pub use oxi_store::{
34    auth_guidance, auth_storage, model_registry, model_resolver, session, session_cwd,
35    session_navigation, settings, settings_validation, AgentMessage, AssistantContentBlock,
36    AuthStorage, ContentBlock, ContentValue, ModelRegistry, SessionEntry, SessionManager,
37    SessionTreeNode, Settings, ValidationReport,
38};
39
40/// Context for compaction operations, passed to extension hooks
41#[derive(Debug, Clone)]
42pub struct CompactionContext {
43    /// Messages being compacted
44    pub messages_count: usize,
45    /// Estimated tokens before compaction
46    pub tokens_before: usize,
47    /// Target token count after compaction
48    pub target_tokens: usize,
49    /// Strategy being used
50    pub strategy: String,
51}
52
53impl CompactionContext {
54    /// Create a new compaction context
55    pub fn new(
56        messages_count: usize,
57        tokens_before: usize,
58        target_tokens: usize,
59        strategy: impl Into<String>,
60    ) -> Self {
61        Self {
62            messages_count,
63            tokens_before,
64            target_tokens,
65            strategy: strategy.into(),
66        }
67    }
68
69    /// Get expected compression ratio
70    pub fn compression_ratio(&self) -> f32 {
71        if self.tokens_before == 0 {
72            return 1.0;
73        }
74        self.target_tokens as f32 / self.tokens_before as f32
75    }
76}
77
78// ─── Module-level imports ────────────────────────────────────────────────────
79use anyhow::{Error, Result};
80use oxi_agent::{Agent, AgentConfig, AgentEvent};
81use oxi_sdk::OxiBuilder;
82use parking_lot::RwLock;
83use skills::SkillManager;
84use std::sync::Arc;
85use uuid::Uuid;
86
87// ─── Application state ───────────────────────────────────────────────────────
88
89/// Application state and entry point
90pub struct App {
91    /// SDK engine for provider/model resolution
92    #[allow(dead_code)]
93    engine: oxi_sdk::Oxi,
94    agent: Arc<Agent>,
95    settings: Settings,
96    skills: RwLock<SkillManager>,
97    active_skills: RwLock<Vec<String>>,
98    wasm_ext: Option<std::sync::Arc<crate::extensions::WasmExtensionManager>>,
99    questionnaire_bridge:
100        Option<std::sync::Arc<oxi_agent::tools::questionnaire::QuestionnaireBridge>>,
101}
102
103/// Chat message for display
104#[derive(Debug, Clone, serde::Serialize)]
105pub struct ChatMessage {
106    /// Role of the message sender (e.g. "user" or "assistant").
107    pub role: String,
108    /// Text content of the message.
109    pub content: String,
110    /// Timestamp when the message was created.
111    pub timestamp: chrono::DateTime<chrono::Utc>,
112}
113
114impl ChatMessage {
115    /// Create a new user chat message.
116    pub fn user(content: String) -> Self {
117        Self {
118            role: "user".to_string(),
119            content,
120            timestamp: chrono::Utc::now(),
121        }
122    }
123
124    /// Create a new assistant chat message.
125    pub fn assistant(content: String) -> Self {
126        Self {
127            role: "assistant".to_string(),
128            content,
129            timestamp: chrono::Utc::now(),
130        }
131    }
132}
133
134/// Interactive session state
135#[derive(Debug, Clone, Default)]
136pub struct InteractiveSession {
137    /// Chat messages exchanged so far.
138    pub messages: Vec<ChatMessage>,
139    /// Whether the assistant is currently generating a response.
140    pub thinking: bool,
141    /// Partial response text accumulated during streaming.
142    pub current_response: String,
143    /// Unique session identifier.
144    pub session_id: Option<Uuid>,
145    /// Optional human-readable session name.
146    pub name: Option<String>,
147    /// Raw session entries for persistence and tree navigation.
148    pub entries: Vec<SessionEntry>,
149}
150
151impl InteractiveSession {
152    /// Create a new empty interactive session.
153    pub fn new() -> Self {
154        Self::default()
155    }
156
157    /// Add a user message to the session.
158    pub fn add_user_message(&mut self, content: String) {
159        self.messages.push(ChatMessage::user(content.clone()));
160        let entry = SessionEntry::new(AgentMessage::User {
161            content: ContentValue::String(content),
162        });
163        self.entries.push(entry);
164    }
165
166    /// Add an assistant message to the session.
167    pub fn add_assistant_message(&mut self, content: String) {
168        self.messages.push(ChatMessage::assistant(content.clone()));
169        let entry = SessionEntry::new(AgentMessage::Assistant {
170            content: vec![AssistantContentBlock::Text { text: content }],
171            provider: None,
172            model_id: None,
173            usage: None,
174            stop_reason: None,
175        });
176        self.entries.push(entry);
177        self.current_response.clear();
178    }
179
180    /// Append text to the current partial streaming response.
181    pub fn append_to_response(&mut self, text: &str) {
182        self.current_response.push_str(text);
183    }
184
185    /// Finalize the current streaming response into a full assistant message.
186    pub fn finish_response(&mut self) {
187        if !self.current_response.is_empty() {
188            let response = std::mem::take(&mut self.current_response);
189            self.add_assistant_message(response);
190        }
191    }
192
193    /// Get all entries in the session
194    pub fn entries(&self) -> &[SessionEntry] {
195        &self.entries
196    }
197
198    /// Get entry at a specific index
199    pub fn get_entry(&self, index: usize) -> Option<&SessionEntry> {
200        self.entries.get(index)
201    }
202
203    /// Get entry by ID
204    pub fn get_entry_by_id(&self, id: &str) -> Option<&SessionEntry> {
205        self.entries.iter().find(|e| e.id == id)
206    }
207
208    /// Truncate entries at a given index (for branching)
209    pub fn truncate_at(&mut self, index: usize) {
210        self.entries.truncate(index + 1);
211    }
212}
213
214// ─── System prompt builder ───────────────────────────────────────────────────
215
216// TODO: This build_system_prompt duplicates the one in
217// app/agent_session_runtime.rs. Both delegate to prompt::system_prompt::build_system_prompt
218// but with different options (this one passes skills; the other passes tool_snippets).
219// Unify into a single shared utility that accepts all options.
220fn build_system_prompt(
221    thinking_level: oxi_store::settings::ThinkingLevel,
222    skill_contents: &[String],
223) -> String {
224    let custom_prompt = match thinking_level {
225        oxi_store::settings::ThinkingLevel::Off => Some(String::from(
226            "You are a helpful AI assistant. Provide direct, concise answers.",
227        )),
228        oxi_store::settings::ThinkingLevel::Minimal => Some(String::from(
229            "You are a helpful AI assistant. Provide clear and helpful answers.",
230        )),
231        oxi_store::settings::ThinkingLevel::Low => Some(String::from(
232            "You are a helpful AI assistant. Provide brief, actionable responses.",
233        )),
234        oxi_store::settings::ThinkingLevel::Medium => Some(String::from(
235            "You are a helpful AI coding assistant. Think through problems \
236             step by step when helpful, but keep responses focused and actionable.",
237        )),
238        oxi_store::settings::ThinkingLevel::High => Some(String::from(
239            "You are an expert AI coding assistant. Take time to thoroughly \
240             analyze problems, consider edge cases, and provide comprehensive \
241             solutions with explanations. Think deeply before responding.",
242        )),
243        oxi_store::settings::ThinkingLevel::XHigh => Some(String::from(
244            "You are an expert AI coding assistant. Use maximum reasoning depth. \
245             Consider all alternatives, edge cases, and potential implications. \
246             Provide the most thorough, comprehensive analysis possible.",
247        )),
248    };
249
250    let skills: Vec<prompt::system_prompt::Skill> = skill_contents
251        .iter()
252        .enumerate()
253        .map(|(i, content)| prompt::system_prompt::Skill {
254            name: format!("skill-{}", i),
255            content: content.clone(),
256        })
257        .collect();
258
259    let options = prompt::system_prompt::BuildSystemPromptOptions {
260        custom_prompt,
261        skills,
262        cwd: std::env::current_dir()
263            .map(|p| p.to_string_lossy().to_string())
264            .unwrap_or_default(),
265        ..Default::default()
266    };
267
268    prompt::system_prompt::build_system_prompt(&options)
269}
270
271// ─── App implementation ─────────────────────────────────────────────────────
272
273impl App {
274    /// Create a new App instance
275    pub async fn new(settings: Settings) -> Result<Self> {
276        let model_id = settings.effective_model(None).unwrap_or_default();
277        let provider_name = settings
278            .effective_provider(None)
279            .unwrap_or_else(|| model_id.split('/').next().unwrap_or("").to_string());
280
281        let (provider_name, model_name) = if model_id.contains('/') {
282            let parts: Vec<&str> = model_id.split('/').collect();
283            (parts[0].to_string(), parts[1..].join("/"))
284        } else if !model_id.is_empty() {
285            (provider_name.clone(), model_id.clone())
286        } else {
287            (String::new(), String::new())
288        };
289
290        // Build SDK engine with built-in providers and models
291        let engine = OxiBuilder::new().with_builtins().build();
292
293        // Resolve model via SDK (validation only)
294        if !provider_name.is_empty() && !model_name.is_empty() {
295            let _ = engine.resolve_model(&format!("{}/{}", provider_name, model_name));
296        }
297
298        // Resolve provider via SDK
299        let provider: Arc<dyn oxi_ai::Provider> = if !provider_name.is_empty() {
300            engine
301                .create_provider(&provider_name)
302                .map_err(|e| Error::msg(format!("{}", e)))?
303        } else {
304            engine
305                .create_provider("anthropic")
306                .map_err(|e| Error::msg(format!("{}", e)))?
307        };
308
309        let skills_dir = SkillManager::skills_dir().unwrap_or_else(|_| {
310            dirs::home_dir()
311                .unwrap_or_default()
312                .join(".oxi")
313                .join("skills")
314        });
315        let skills = SkillManager::load_from_dir(&skills_dir).unwrap_or_else(|e| {
316            tracing::debug!("Skills not loaded: {}", e);
317            SkillManager::new()
318        });
319
320        let system_prompt = build_system_prompt(settings.thinking_level, &[]);
321        let compaction_strategy = if settings.auto_compaction {
322            oxi_ai::CompactionStrategy::Threshold(0.8)
323        } else {
324            oxi_ai::CompactionStrategy::Disabled
325        };
326        let auth = oxi_store::auth_storage::shared_auth_storage();
327        let api_key = auth.get_api_key(&provider_name);
328
329        let config = AgentConfig {
330            name: "oxi".to_string(),
331            description: Some("oxi CLI agent".to_string()),
332            model_id: model_id.clone(),
333            system_prompt: Some(system_prompt),
334            max_iterations: 10,
335            timeout_seconds: settings.tool_timeout_seconds,
336            temperature: settings.effective_temperature(),
337            max_tokens: settings.effective_max_tokens(),
338            compaction_strategy,
339            compaction_instruction: None,
340            context_window: 128_000,
341            api_key,
342            workspace_dir: Some(
343                std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from(".")),
344            ),
345            output_mode: None,
346        };
347
348        let agent = Arc::new(Agent::new(
349            provider,
350            config,
351            Arc::new(oxi_agent::ToolRegistry::new()),
352        ));
353
354        let bridge =
355            std::sync::Arc::new(oxi_agent::tools::questionnaire::QuestionnaireBridge::new());
356        let questionnaire_tool =
357            oxi_agent::tools::questionnaire::QuestionnaireTool::new(bridge.clone());
358        agent
359            .tools()
360            .register_arc(std::sync::Arc::new(questionnaire_tool));
361
362        Ok(Self {
363            engine,
364            agent,
365            settings,
366            skills: RwLock::new(skills),
367            active_skills: RwLock::new(Vec::new()),
368            wasm_ext: None,
369            questionnaire_bridge: Some(bridge),
370        })
371    }
372
373    /// Access the SDK engine (for provider/model resolution)
374    #[allow(dead_code)]
375    pub(crate) fn engine(&self) -> &oxi_sdk::Oxi {
376        &self.engine
377    }
378
379    /// Get the current settings
380    pub fn settings(&self) -> &Settings {
381        &self.settings
382    }
383
384    /// Set the WASM extension manager
385    pub fn set_wasm_ext(
386        &mut self,
387        ext: Option<std::sync::Arc<crate::extensions::WasmExtensionManager>>,
388    ) {
389        self.wasm_ext = ext;
390    }
391
392    /// Get the WASM extension manager
393    pub fn wasm_ext(&self) -> Option<&std::sync::Arc<crate::extensions::WasmExtensionManager>> {
394        self.wasm_ext.as_ref()
395    }
396
397    /// Get a reference to the underlying agent.
398    pub fn agent(&self) -> Arc<Agent> {
399        Arc::clone(&self.agent)
400    }
401
402    /// Get the tool registry (for registering extension tools)
403    pub fn agent_tools(&self) -> Arc<oxi_agent::ToolRegistry> {
404        self.agent.tools()
405    }
406
407    /// Get the questionnaire bridge, if initialized.
408    pub fn questionnaire_bridge(
409        &self,
410    ) -> Option<&std::sync::Arc<oxi_agent::tools::questionnaire::QuestionnaireBridge>> {
411        self.questionnaire_bridge.as_ref()
412    }
413
414    /// Get a reference to the skill manager
415    pub fn skills(&self) -> parking_lot::RwLockReadGuard<'_, SkillManager> {
416        self.skills.read()
417    }
418
419    /// Activate a skill by name. Returns an error string if not found.
420    pub fn activate_skill(&self, name: &str) -> Result<(), String> {
421        {
422            let skills = self.skills.read();
423            if skills.get(name).is_none() {
424                return Err(format!("Skill '{}' not found", name));
425            }
426        }
427        let name_lower = name.to_lowercase();
428        {
429            let mut active = self.active_skills.write();
430            if !active.contains(&name_lower) {
431                active.push(name_lower);
432            }
433        }
434        self.rebuild_system_prompt();
435        Ok(())
436    }
437
438    /// Deactivate a skill by name.
439    pub fn deactivate_skill(&self, name: &str) {
440        let name_lower = name.to_lowercase();
441        {
442            let mut active = self.active_skills.write();
443            active.retain(|n| n != &name_lower);
444        }
445        self.rebuild_system_prompt();
446    }
447
448    /// List currently active skill names
449    pub fn active_skills(&self) -> Vec<String> {
450        self.active_skills.read().clone()
451    }
452
453    /// Rebuild the system prompt with current active skills
454    fn rebuild_system_prompt(&self) {
455        let active = self.active_skills.read();
456        let skills = self.skills.read();
457        let contents: Vec<String> = active
458            .iter()
459            .filter_map(|name| skills.get(name).map(|s| s.content.clone()))
460            .collect();
461        let prompt = build_system_prompt(self.settings.thinking_level, &contents);
462        self.agent.set_system_prompt(prompt);
463    }
464
465    /// Get a clone of the current state
466    pub fn agent_state(&self) -> oxi_agent::AgentState {
467        self.agent.state()
468    }
469
470    /// Run a single prompt and return the response
471    pub async fn run_prompt(&self, prompt: String) -> Result<String> {
472        let (response, _events) = self.agent.run(prompt).await?;
473        Ok(response.content)
474    }
475
476    /// Run a prompt with event callback
477    pub async fn run_prompt_with_events<F>(&self, prompt: String, on_event: F) -> Result<String>
478    where
479        F: FnMut(AgentEvent) + Send + 'static,
480    {
481        self.agent.run_streaming(prompt, on_event).await?;
482        let state = self.agent_state();
483        for msg in state.messages.iter().rev() {
484            if let oxi_ai::Message::Assistant(a) = msg {
485                return Ok(a.text_content());
486            }
487        }
488        Ok(String::new())
489    }
490
491    /// Run in interactive mode, returning an event stream
492    pub async fn run_interactive(&self) -> Result<InteractiveLoop<'_>> {
493        let session = InteractiveSession::new();
494        Ok(InteractiveLoop { app: self, session })
495    }
496
497    /// Reset the conversation
498    pub fn reset(&self) {
499        self.agent.reset();
500    }
501
502    /// Switch the model used for future LLM calls.
503    pub fn switch_model(&self, model_id: &str) -> anyhow::Result<()> {
504        self.agent.switch_model(model_id)
505    }
506
507    /// Get the current model ID
508    pub fn model_id(&self) -> String {
509        self.agent.model_id()
510    }
511}
512
513/// Interactive loop handle
514pub struct InteractiveLoop<'a> {
515    app: &'a App,
516    session: InteractiveSession,
517}
518
519impl<'a> InteractiveLoop<'a> {
520    /// Add a user message and get the assistant response
521    pub async fn send_message(&mut self, prompt: String) -> Result<()> {
522        self.session.add_user_message(prompt.clone());
523        self.session.thinking = true;
524
525        let (tx, rx) = std::sync::mpsc::channel::<AgentEvent>();
526        let agent = Arc::clone(&self.app.agent);
527
528        let local = tokio::task::LocalSet::new();
529        local.spawn_local(async move {
530            let _ = agent.run_with_channel(prompt, tx).await;
531        });
532
533        while let Ok(event) = rx.recv() {
534            match event {
535                AgentEvent::TextChunk { text } => {
536                    self.session.append_to_response(&text);
537                }
538                AgentEvent::Thinking => {}
539                AgentEvent::Complete { .. } => {
540                    self.session.finish_response();
541                    self.session.thinking = false;
542                }
543                AgentEvent::Error { message, .. } => {
544                    self.session
545                        .append_to_response(&format!("[Error: {}]", message));
546                    self.session.finish_response();
547                    self.session.thinking = false;
548                }
549                _ => {}
550            }
551        }
552
553        local.await;
554        Ok(())
555    }
556
557    /// Get current messages
558    pub fn messages(&self) -> &[ChatMessage] {
559        &self.session.messages
560    }
561
562    /// Get the current partial response (while thinking)
563    pub fn current_response(&self) -> &str {
564        &self.session.current_response
565    }
566
567    /// Check if currently thinking
568    pub fn is_thinking(&self) -> bool {
569        self.session.thinking
570    }
571
572    /// Get session entries for tree navigation
573    pub fn entries(&self) -> &[SessionEntry] {
574        self.session.entries()
575    }
576
577    /// Get entry by ID
578    pub fn get_entry(&self, id: Uuid) -> Option<&SessionEntry> {
579        self.session.get_entry_by_id(&id.to_string())
580    }
581
582    /// Switch the model used for future LLM calls
583    pub fn switch_model(&self, model_id: &str) -> anyhow::Result<()> {
584        self.app.switch_model(model_id)
585    }
586
587    /// Get the current model ID
588    pub fn model_id(&self) -> String {
589        self.app.model_id()
590    }
591}