ricecoder_cli/commands/
chat.rs

1// Interactive chat mode
2// Adapted from automation/src/cli/prompts.rs
3
4use super::Command;
5use crate::chat::ChatSession;
6use crate::error::{CliError, CliResult};
7use crate::output::OutputStyle;
8use ricecoder_storage::PathResolver;
9
10/// Interactive chat mode
11pub struct ChatCommand {
12    pub message: Option<String>,
13    pub provider: Option<String>,
14    pub model: Option<String>,
15}
16
17impl ChatCommand {
18    pub fn new(message: Option<String>, provider: Option<String>, model: Option<String>) -> Self {
19        Self {
20            message,
21            provider,
22            model,
23        }
24    }
25
26    /// Validate provider is supported
27    fn validate_provider(&self) -> CliResult<String> {
28        let provider = self.provider.as_deref().unwrap_or("openai");
29
30        // List of supported providers
31        let supported = ["openai", "anthropic", "local"];
32
33        if !supported.contains(&provider) {
34            return Err(CliError::Provider(format!(
35                "Unsupported provider: {}. Supported providers: {}",
36                provider,
37                supported.join(", ")
38            )));
39        }
40
41        Ok(provider.to_string())
42    }
43
44    /// Validate model is specified or use default
45    fn get_model(&self) -> String {
46        self.model.as_deref().unwrap_or("gpt-4").to_string()
47    }
48
49    /// Load project specs for context
50    fn load_project_context(&self) -> CliResult<Vec<String>> {
51        let style = OutputStyle::default();
52        let mut specs = Vec::new();
53
54        // Resolve project path using PathResolver
55        let project_path = PathResolver::resolve_project_path();
56        let specs_path = project_path.join("specs");
57
58        // Check for .agent/specs directory (project-level specs)
59        if specs_path.exists() {
60            println!("{}", style.info("Loading project specs..."));
61            // TODO: Actually load specs from .agent/specs/ directory
62            specs.push("specs_loaded".to_string());
63            println!("{}", style.success("Specs loaded"));
64        }
65
66        Ok(specs)
67    }
68
69    /// Load knowledge base
70    fn load_knowledge_base(&self) -> CliResult<Vec<String>> {
71        let style = OutputStyle::default();
72        let mut kb = Vec::new();
73
74        // TODO: Load knowledge base from global location
75        println!("{}", style.info("Loading knowledge base..."));
76        kb.push("kb_loaded".to_string());
77        println!("{}", style.success("Knowledge base loaded"));
78
79        Ok(kb)
80    }
81
82    /// Process initial message
83    fn process_initial_message(&self, message: &str, session: &mut ChatSession) -> CliResult<()> {
84        let style = OutputStyle::default();
85
86        // Add user message to history
87        session.add_message("user".to_string(), message.to_string());
88
89        println!();
90        println!("{}", style.prompt("r["));
91        println!("{}", message);
92        println!("{}", style.info("Processing message..."));
93
94        // TODO: Send to AI provider and get response
95        let response = "This is a placeholder response. Full AI integration coming soon.";
96        session.add_message("assistant".to_string(), response.to_string());
97
98        println!("{}", style.success(response));
99        println!();
100
101        Ok(())
102    }
103
104    /// Enter interactive chat loop
105    fn run_chat_loop(&self, session: &mut ChatSession) -> CliResult<()> {
106        let style = OutputStyle::default();
107
108        // If initial message provided, process it
109        if let Some(msg) = &self.message {
110            self.process_initial_message(msg, session)?;
111        } else {
112            // Interactive mode
113            println!();
114            println!("{}", style.header("RiceCoder Chat Mode"));
115            println!("{}", style.info("Type 'exit' to quit, 'help' for commands"));
116            println!();
117
118            // Use the chat session's built-in REPL
119            session.start()?;
120        }
121
122        Ok(())
123    }
124}
125
126impl Command for ChatCommand {
127    fn execute(&self) -> CliResult<()> {
128        let style = OutputStyle::default();
129
130        // Validate provider
131        let provider = self.validate_provider()?;
132        let model = self.get_model();
133
134        println!("{}", style.section("Chat Configuration"));
135        println!("{}", style.key_value("Provider", &provider));
136        println!("{}", style.key_value("Model", &model));
137        println!();
138
139        // Load project context
140        let _specs = self.load_project_context()?;
141        let _kb = self.load_knowledge_base()?;
142
143        // Create chat session
144        let mut session = ChatSession::new(provider, model);
145
146        // Run chat loop
147        self.run_chat_loop(&mut session)?;
148
149        println!();
150        println!("{}", style.success("Chat session ended"));
151
152        Ok(())
153    }
154}