mermaid_cli/runtime/
orchestrator.rs1use anyhow::Result;
2use std::path::PathBuf;
3
4use crate::{
5 app::{load_config, persist_last_model, Config},
6 cli::{handle_command, Cli},
7 models::ModelFactory,
8 ollama::{ensure_model as ensure_ollama_model, require_any_model},
9 session::{select_conversation, ConversationManager},
10 tui::{run_ui, App},
11 utils::{check_ollama_available, log_error, log_info, log_progress, log_warn},
12};
13
14pub struct Orchestrator {
16 cli: Cli,
17 config: Config,
18}
19
20impl Orchestrator {
21 pub fn new(cli: Cli) -> Result<Self> {
23 let config = match load_config() {
25 Ok(cfg) => cfg,
26 Err(e) => {
27 log_warn("CONFIG", format!("Config load failed: {:#}. Using defaults.", e));
28 Config::default()
29 },
30 };
31
32 Ok(Self {
33 cli,
34 config,
35 })
36 }
37
38 pub async fn run(self) -> Result<()> {
40 let total_steps = 6; let mut current_step = 0;
43
44 current_step += 1;
46 log_progress(current_step, total_steps, "Processing commands");
47 if let Some(command) = &self.cli.command {
48 if handle_command(command).await? {
49 return Ok(()); }
51 }
53
54 current_step += 1;
56 log_progress(current_step, total_steps, "Configuring model");
57
58 let cli_model_provided = self.cli.model.is_some();
59 let model_id = if let Some(model) = &self.cli.model {
60 model.clone()
62 } else if let Some(last_model) = &self.config.last_used_model {
63 last_model.clone()
65 } else if !self.config.default_model.provider.is_empty()
66 && !self.config.default_model.name.is_empty()
67 {
68 format!(
70 "{}/{}",
71 self.config.default_model.provider, self.config.default_model.name
72 )
73 } else {
74 let available = require_any_model().await?;
76 format!("ollama/{}", available[0])
78 };
79
80 log_info(
81 "MERMAID",
82 format!("Starting Mermaid with model: {}", model_id),
83 );
84
85 current_step += 1;
87 if is_local_model(&model_id) {
88 log_progress(current_step, total_steps, "Checking Ollama availability");
89 let ollama_check = check_ollama_available(
90 &self.config.ollama.host,
91 self.config.ollama.port,
92 ).await;
93
94 if !ollama_check.available {
95 log_error("OLLAMA", &ollama_check.message);
96 std::process::exit(1);
97 }
98 } else {
99 log_progress(current_step, total_steps, "Using API provider");
100 }
101
102 current_step += 1;
104 log_progress(current_step, total_steps, "Checking model availability");
105 ensure_ollama_model(&model_id, true).await?;
106
107 if cli_model_provided {
109 if let Err(e) = persist_last_model(&model_id) {
110 log_warn("CONFIG", format!("Failed to persist model choice: {}", e));
111 }
112 }
113
114 current_step += 1;
116 log_progress(current_step, total_steps, "Initializing model");
117 let backend = if self.config.behavior.backend == "auto" {
119 None
120 } else {
121 Some(self.config.behavior.backend.as_str())
122 };
123 let model = match ModelFactory::create_with_backend(
124 &model_id,
125 Some(&self.config),
126 backend,
127 )
128 .await
129 {
130 Ok(m) => m,
131 Err(e) => {
132 log_error("ERROR", format!("Failed to initialize model: {}", e));
133 log_error(
134 "",
135 "Make sure the model is available and properly configured.",
136 );
137 std::process::exit(1);
138 },
139 };
140
141 let project_path = self.cli.path.clone().unwrap_or_else(|| PathBuf::from("."));
143
144 current_step += 1;
146 log_progress(current_step, total_steps, "Starting UI");
147 let mut app = App::new(model, model_id.clone());
148
149 if self.cli.continue_session || self.cli.sessions {
154 let conversation_manager = ConversationManager::new(&project_path)?;
155
156 if self.cli.sessions {
157 let conversations = conversation_manager.list_conversations()?;
159 if !conversations.is_empty() {
160 if let Some(selected) = select_conversation(conversations)? {
161 log_info(
162 "RESUME",
163 format!("Resuming conversation: {}", selected.title),
164 );
165 app.load_conversation(selected);
166 }
167 } else {
168 log_info("INFO", "No previous conversations found in this directory");
169 }
170 } else {
171 if let Some(last_conv) = conversation_manager.load_last_conversation()? {
173 log_info(
174 "RESUME",
175 format!("Resuming: {}", last_conv.title),
176 );
177 app.load_conversation(last_conv);
178 } else {
179 log_info("INFO", "No previous conversation to continue");
180 }
181 }
182 }
183
184 run_ui(app).await
186 }
187}
188
189fn is_local_model(_model_id: &str) -> bool {
196 true
198}