1#![warn(missing_docs)]
2#![warn(clippy::unwrap_used)]
3#![allow(unknown_lints)]
4
5pub mod cli;
12pub mod print_mode;
13pub mod setup_wizard;
14
15pub(crate) mod app;
17pub(crate) mod context;
18pub mod extensions; pub(crate) mod infra;
20pub(crate) mod media;
21pub(crate) mod prompt;
22pub(crate) mod rpc_mode;
23pub(crate) mod skills;
24pub mod storage; pub use storage::packages::PackageManager;
27pub use storage::packages::ResourceKind;
28pub mod tui; pub(crate) mod ui;
30pub(crate) mod util;
31
32pub use oxi_store::{
34 auth_guidance, auth_storage, model_registry, model_resolver, session, session_cwd,
35 session_navigation, settings, settings_validation, AgentMessage, AssistantContentBlock,
36 AuthStorage, ContentBlock, ContentValue, ModelRegistry, SessionEntry, SessionManager,
37 SessionTreeNode, Settings, ValidationReport,
38};
39
40#[derive(Debug, Clone)]
42pub struct CompactionContext {
43 pub messages_count: usize,
45 pub tokens_before: usize,
47 pub target_tokens: usize,
49 pub strategy: String,
51}
52
53impl CompactionContext {
54 pub fn new(
56 messages_count: usize,
57 tokens_before: usize,
58 target_tokens: usize,
59 strategy: impl Into<String>,
60 ) -> Self {
61 Self {
62 messages_count,
63 tokens_before,
64 target_tokens,
65 strategy: strategy.into(),
66 }
67 }
68
69 pub fn compression_ratio(&self) -> f32 {
71 if self.tokens_before == 0 {
72 return 1.0;
73 }
74 self.target_tokens as f32 / self.tokens_before as f32
75 }
76}
77
78use anyhow::{Error, Result};
80use oxi_agent::{Agent, AgentConfig, AgentEvent};
81use oxi_sdk::OxiBuilder;
82use parking_lot::RwLock;
83use skills::SkillManager;
84use std::sync::Arc;
85use uuid::Uuid;
86
87pub struct App {
91 #[allow(dead_code)]
93 engine: oxi_sdk::Oxi,
94 agent: Arc<Agent>,
95 settings: Settings,
96 skills: RwLock<SkillManager>,
97 active_skills: RwLock<Vec<String>>,
98 wasm_ext: Option<std::sync::Arc<crate::extensions::WasmExtensionManager>>,
99 questionnaire_bridge:
100 Option<std::sync::Arc<oxi_agent::tools::questionnaire::QuestionnaireBridge>>,
101}
102
103#[derive(Debug, Clone, serde::Serialize)]
105pub struct ChatMessage {
106 pub role: String,
108 pub content: String,
110 pub timestamp: chrono::DateTime<chrono::Utc>,
112}
113
114impl ChatMessage {
115 pub fn user(content: String) -> Self {
117 Self {
118 role: "user".to_string(),
119 content,
120 timestamp: chrono::Utc::now(),
121 }
122 }
123
124 pub fn assistant(content: String) -> Self {
126 Self {
127 role: "assistant".to_string(),
128 content,
129 timestamp: chrono::Utc::now(),
130 }
131 }
132}
133
134#[derive(Debug, Clone, Default)]
136pub struct InteractiveSession {
137 pub messages: Vec<ChatMessage>,
139 pub thinking: bool,
141 pub current_response: String,
143 pub session_id: Option<Uuid>,
145 pub name: Option<String>,
147 pub entries: Vec<SessionEntry>,
149}
150
151impl InteractiveSession {
152 pub fn new() -> Self {
154 Self::default()
155 }
156
157 pub fn add_user_message(&mut self, content: String) {
159 self.messages.push(ChatMessage::user(content.clone()));
160 let entry = SessionEntry::new(AgentMessage::User {
161 content: ContentValue::String(content),
162 });
163 self.entries.push(entry);
164 }
165
166 pub fn add_assistant_message(&mut self, content: String) {
168 self.messages.push(ChatMessage::assistant(content.clone()));
169 let entry = SessionEntry::new(AgentMessage::Assistant {
170 content: vec![AssistantContentBlock::Text { text: content }],
171 provider: None,
172 model_id: None,
173 usage: None,
174 stop_reason: None,
175 });
176 self.entries.push(entry);
177 self.current_response.clear();
178 }
179
180 pub fn append_to_response(&mut self, text: &str) {
182 self.current_response.push_str(text);
183 }
184
185 pub fn finish_response(&mut self) {
187 if !self.current_response.is_empty() {
188 let response = std::mem::take(&mut self.current_response);
189 self.add_assistant_message(response);
190 }
191 }
192
193 pub fn entries(&self) -> &[SessionEntry] {
195 &self.entries
196 }
197
198 pub fn get_entry(&self, index: usize) -> Option<&SessionEntry> {
200 self.entries.get(index)
201 }
202
203 pub fn get_entry_by_id(&self, id: &str) -> Option<&SessionEntry> {
205 self.entries.iter().find(|e| e.id == id)
206 }
207
208 pub fn truncate_at(&mut self, index: usize) {
210 self.entries.truncate(index + 1);
211 }
212}
213
214fn build_system_prompt(
221 thinking_level: oxi_store::settings::ThinkingLevel,
222 skill_contents: &[String],
223) -> String {
224 let custom_prompt = match thinking_level {
225 oxi_store::settings::ThinkingLevel::Off => Some(String::from(
226 "You are a helpful AI assistant. Provide direct, concise answers.",
227 )),
228 oxi_store::settings::ThinkingLevel::Minimal => Some(String::from(
229 "You are a helpful AI assistant. Provide clear and helpful answers.",
230 )),
231 oxi_store::settings::ThinkingLevel::Low => Some(String::from(
232 "You are a helpful AI assistant. Provide brief, actionable responses.",
233 )),
234 oxi_store::settings::ThinkingLevel::Medium => Some(String::from(
235 "You are a helpful AI coding assistant. Think through problems \
236 step by step when helpful, but keep responses focused and actionable.",
237 )),
238 oxi_store::settings::ThinkingLevel::High => Some(String::from(
239 "You are an expert AI coding assistant. Take time to thoroughly \
240 analyze problems, consider edge cases, and provide comprehensive \
241 solutions with explanations. Think deeply before responding.",
242 )),
243 oxi_store::settings::ThinkingLevel::XHigh => Some(String::from(
244 "You are an expert AI coding assistant. Use maximum reasoning depth. \
245 Consider all alternatives, edge cases, and potential implications. \
246 Provide the most thorough, comprehensive analysis possible.",
247 )),
248 };
249
250 let skills: Vec<prompt::system_prompt::Skill> = skill_contents
251 .iter()
252 .enumerate()
253 .map(|(i, content)| prompt::system_prompt::Skill {
254 name: format!("skill-{}", i),
255 content: content.clone(),
256 })
257 .collect();
258
259 let options = prompt::system_prompt::BuildSystemPromptOptions {
260 custom_prompt,
261 skills,
262 cwd: std::env::current_dir()
263 .map(|p| p.to_string_lossy().to_string())
264 .unwrap_or_default(),
265 ..Default::default()
266 };
267
268 prompt::system_prompt::build_system_prompt(&options)
269}
270
271impl App {
274 pub async fn new(settings: Settings) -> Result<Self> {
276 let model_id = settings.effective_model(None).unwrap_or_default();
277 let provider_name = settings
278 .effective_provider(None)
279 .unwrap_or_else(|| model_id.split('/').next().unwrap_or("").to_string());
280
281 let (provider_name, model_name) = if model_id.contains('/') {
282 let parts: Vec<&str> = model_id.split('/').collect();
283 (parts[0].to_string(), parts[1..].join("/"))
284 } else if !model_id.is_empty() {
285 (provider_name.clone(), model_id.clone())
286 } else {
287 (String::new(), String::new())
288 };
289
290 let engine = OxiBuilder::new().with_builtins().build();
292
293 if !provider_name.is_empty() && !model_name.is_empty() {
295 let _ = engine.resolve_model(&format!("{}/{}", provider_name, model_name));
296 }
297
298 let provider: Arc<dyn oxi_ai::Provider> = if !provider_name.is_empty() {
300 engine
301 .create_provider(&provider_name)
302 .map_err(|e| Error::msg(format!("{}", e)))?
303 } else {
304 engine
305 .create_provider("anthropic")
306 .map_err(|e| Error::msg(format!("{}", e)))?
307 };
308
309 let skills_dir = SkillManager::skills_dir().unwrap_or_else(|_| {
310 dirs::home_dir()
311 .unwrap_or_default()
312 .join(".oxi")
313 .join("skills")
314 });
315 let skills = SkillManager::load_from_dir(&skills_dir).unwrap_or_else(|e| {
316 tracing::debug!("Skills not loaded: {}", e);
317 SkillManager::new()
318 });
319
320 let system_prompt = build_system_prompt(settings.thinking_level, &[]);
321 let compaction_strategy = if settings.auto_compaction {
322 oxi_ai::CompactionStrategy::Threshold(0.8)
323 } else {
324 oxi_ai::CompactionStrategy::Disabled
325 };
326 let auth = oxi_store::auth_storage::shared_auth_storage();
327 let api_key = auth.get_api_key(&provider_name);
328
329 let config = AgentConfig {
330 name: "oxi".to_string(),
331 description: Some("oxi CLI agent".to_string()),
332 model_id: model_id.clone(),
333 system_prompt: Some(system_prompt),
334 max_iterations: 10,
335 timeout_seconds: settings.tool_timeout_seconds,
336 temperature: settings.effective_temperature(),
337 max_tokens: settings.effective_max_tokens(),
338 compaction_strategy,
339 compaction_instruction: None,
340 context_window: 128_000,
341 api_key,
342 workspace_dir: Some(
343 std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from(".")),
344 ),
345 output_mode: None,
346 };
347
348 let agent = Arc::new(Agent::new(
349 provider,
350 config,
351 Arc::new(oxi_agent::ToolRegistry::new()),
352 ));
353
354 let bridge =
355 std::sync::Arc::new(oxi_agent::tools::questionnaire::QuestionnaireBridge::new());
356 let questionnaire_tool =
357 oxi_agent::tools::questionnaire::QuestionnaireTool::new(bridge.clone());
358 agent
359 .tools()
360 .register_arc(std::sync::Arc::new(questionnaire_tool));
361
362 Ok(Self {
363 engine,
364 agent,
365 settings,
366 skills: RwLock::new(skills),
367 active_skills: RwLock::new(Vec::new()),
368 wasm_ext: None,
369 questionnaire_bridge: Some(bridge),
370 })
371 }
372
373 #[allow(dead_code)]
375 pub(crate) fn engine(&self) -> &oxi_sdk::Oxi {
376 &self.engine
377 }
378
379 pub fn settings(&self) -> &Settings {
381 &self.settings
382 }
383
384 pub fn set_wasm_ext(
386 &mut self,
387 ext: Option<std::sync::Arc<crate::extensions::WasmExtensionManager>>,
388 ) {
389 self.wasm_ext = ext;
390 }
391
392 pub fn wasm_ext(&self) -> Option<&std::sync::Arc<crate::extensions::WasmExtensionManager>> {
394 self.wasm_ext.as_ref()
395 }
396
397 pub fn agent(&self) -> Arc<Agent> {
399 Arc::clone(&self.agent)
400 }
401
402 pub fn agent_tools(&self) -> Arc<oxi_agent::ToolRegistry> {
404 self.agent.tools()
405 }
406
407 pub fn questionnaire_bridge(
409 &self,
410 ) -> Option<&std::sync::Arc<oxi_agent::tools::questionnaire::QuestionnaireBridge>> {
411 self.questionnaire_bridge.as_ref()
412 }
413
414 pub fn skills(&self) -> parking_lot::RwLockReadGuard<'_, SkillManager> {
416 self.skills.read()
417 }
418
419 pub fn activate_skill(&self, name: &str) -> Result<(), String> {
421 {
422 let skills = self.skills.read();
423 if skills.get(name).is_none() {
424 return Err(format!("Skill '{}' not found", name));
425 }
426 }
427 let name_lower = name.to_lowercase();
428 {
429 let mut active = self.active_skills.write();
430 if !active.contains(&name_lower) {
431 active.push(name_lower);
432 }
433 }
434 self.rebuild_system_prompt();
435 Ok(())
436 }
437
438 pub fn deactivate_skill(&self, name: &str) {
440 let name_lower = name.to_lowercase();
441 {
442 let mut active = self.active_skills.write();
443 active.retain(|n| n != &name_lower);
444 }
445 self.rebuild_system_prompt();
446 }
447
448 pub fn active_skills(&self) -> Vec<String> {
450 self.active_skills.read().clone()
451 }
452
453 fn rebuild_system_prompt(&self) {
455 let active = self.active_skills.read();
456 let skills = self.skills.read();
457 let contents: Vec<String> = active
458 .iter()
459 .filter_map(|name| skills.get(name).map(|s| s.content.clone()))
460 .collect();
461 let prompt = build_system_prompt(self.settings.thinking_level, &contents);
462 self.agent.set_system_prompt(prompt);
463 }
464
465 pub fn agent_state(&self) -> oxi_agent::AgentState {
467 self.agent.state()
468 }
469
470 pub async fn run_prompt(&self, prompt: String) -> Result<String> {
472 let (response, _events) = self.agent.run(prompt).await?;
473 Ok(response.content)
474 }
475
476 pub async fn run_prompt_with_events<F>(&self, prompt: String, on_event: F) -> Result<String>
478 where
479 F: FnMut(AgentEvent) + Send + 'static,
480 {
481 self.agent.run_streaming(prompt, on_event).await?;
482 let state = self.agent_state();
483 for msg in state.messages.iter().rev() {
484 if let oxi_ai::Message::Assistant(a) = msg {
485 return Ok(a.text_content());
486 }
487 }
488 Ok(String::new())
489 }
490
491 pub async fn run_interactive(&self) -> Result<InteractiveLoop<'_>> {
493 let session = InteractiveSession::new();
494 Ok(InteractiveLoop { app: self, session })
495 }
496
497 pub fn reset(&self) {
499 self.agent.reset();
500 }
501
502 pub fn switch_model(&self, model_id: &str) -> anyhow::Result<()> {
504 self.agent.switch_model(model_id)
505 }
506
507 pub fn model_id(&self) -> String {
509 self.agent.model_id()
510 }
511}
512
513pub struct InteractiveLoop<'a> {
515 app: &'a App,
516 session: InteractiveSession,
517}
518
519impl<'a> InteractiveLoop<'a> {
520 pub async fn send_message(&mut self, prompt: String) -> Result<()> {
522 self.session.add_user_message(prompt.clone());
523 self.session.thinking = true;
524
525 let (tx, rx) = std::sync::mpsc::channel::<AgentEvent>();
526 let agent = Arc::clone(&self.app.agent);
527
528 let local = tokio::task::LocalSet::new();
529 local.spawn_local(async move {
530 let _ = agent.run_with_channel(prompt, tx).await;
531 });
532
533 while let Ok(event) = rx.recv() {
534 match event {
535 AgentEvent::TextChunk { text } => {
536 self.session.append_to_response(&text);
537 }
538 AgentEvent::Thinking => {}
539 AgentEvent::Complete { .. } => {
540 self.session.finish_response();
541 self.session.thinking = false;
542 }
543 AgentEvent::Error { message, .. } => {
544 self.session
545 .append_to_response(&format!("[Error: {}]", message));
546 self.session.finish_response();
547 self.session.thinking = false;
548 }
549 _ => {}
550 }
551 }
552
553 local.await;
554 Ok(())
555 }
556
557 pub fn messages(&self) -> &[ChatMessage] {
559 &self.session.messages
560 }
561
562 pub fn current_response(&self) -> &str {
564 &self.session.current_response
565 }
566
567 pub fn is_thinking(&self) -> bool {
569 self.session.thinking
570 }
571
572 pub fn entries(&self) -> &[SessionEntry] {
574 self.session.entries()
575 }
576
577 pub fn get_entry(&self, id: Uuid) -> Option<&SessionEntry> {
579 self.session.get_entry_by_id(&id.to_string())
580 }
581
582 pub fn switch_model(&self, model_id: &str) -> anyhow::Result<()> {
584 self.app.switch_model(model_id)
585 }
586
587 pub fn model_id(&self) -> String {
589 self.app.model_id()
590 }
591}