ricecoder_cli/commands/
chat.rs1use super::Command;
5use crate::chat::ChatSession;
6use crate::error::{CliError, CliResult};
7use crate::output::OutputStyle;
8use ricecoder_storage::PathResolver;
9
10pub struct ChatCommand {
12 pub message: Option<String>,
13 pub provider: Option<String>,
14 pub model: Option<String>,
15}
16
17impl ChatCommand {
18 pub fn new(message: Option<String>, provider: Option<String>, model: Option<String>) -> Self {
19 Self {
20 message,
21 provider,
22 model,
23 }
24 }
25
26 fn validate_provider(&self) -> CliResult<String> {
28 let provider = self.provider.as_deref().unwrap_or("openai");
29
30 let supported = ["openai", "anthropic", "local"];
32
33 if !supported.contains(&provider) {
34 return Err(CliError::Provider(format!(
35 "Unsupported provider: {}. Supported providers: {}",
36 provider,
37 supported.join(", ")
38 )));
39 }
40
41 Ok(provider.to_string())
42 }
43
44 fn get_model(&self) -> String {
46 self.model.as_deref().unwrap_or("gpt-4").to_string()
47 }
48
49 fn load_project_context(&self) -> CliResult<Vec<String>> {
51 let style = OutputStyle::default();
52 let mut specs = Vec::new();
53
54 let project_path = PathResolver::resolve_project_path();
56 let specs_path = project_path.join("specs");
57
58 if specs_path.exists() {
60 println!("{}", style.info("Loading project specs..."));
61 specs.push("specs_loaded".to_string());
63 println!("{}", style.success("Specs loaded"));
64 }
65
66 Ok(specs)
67 }
68
69 fn load_knowledge_base(&self) -> CliResult<Vec<String>> {
71 let style = OutputStyle::default();
72 let mut kb = Vec::new();
73
74 println!("{}", style.info("Loading knowledge base..."));
76 kb.push("kb_loaded".to_string());
77 println!("{}", style.success("Knowledge base loaded"));
78
79 Ok(kb)
80 }
81
82 fn process_initial_message(&self, message: &str, session: &mut ChatSession) -> CliResult<()> {
84 let style = OutputStyle::default();
85
86 session.add_message("user".to_string(), message.to_string());
88
89 println!();
90 println!("{}", style.prompt("r["));
91 println!("{}", message);
92 println!("{}", style.info("Processing message..."));
93
94 let response = "This is a placeholder response. Full AI integration coming soon.";
96 session.add_message("assistant".to_string(), response.to_string());
97
98 println!("{}", style.success(response));
99 println!();
100
101 Ok(())
102 }
103
104 fn run_chat_loop(&self, session: &mut ChatSession) -> CliResult<()> {
106 let style = OutputStyle::default();
107
108 if let Some(msg) = &self.message {
110 self.process_initial_message(msg, session)?;
111 } else {
112 println!();
114 println!("{}", style.header("RiceCoder Chat Mode"));
115 println!("{}", style.info("Type 'exit' to quit, 'help' for commands"));
116 println!();
117
118 session.start()?;
120 }
121
122 Ok(())
123 }
124}
125
126impl Command for ChatCommand {
127 fn execute(&self) -> CliResult<()> {
128 let style = OutputStyle::default();
129
130 let provider = self.validate_provider()?;
132 let model = self.get_model();
133
134 println!("{}", style.section("Chat Configuration"));
135 println!("{}", style.key_value("Provider", &provider));
136 println!("{}", style.key_value("Model", &model));
137 println!();
138
139 let _specs = self.load_project_context()?;
141 let _kb = self.load_knowledge_base()?;
142
143 let mut session = ChatSession::new(provider, model);
145
146 self.run_chat_loop(&mut session)?;
148
149 println!();
150 println!("{}", style.success("Chat session ended"));
151
152 Ok(())
153 }
154}