mermaid_cli/runtime/
orchestrator.rs1use anyhow::Result;
2use std::path::PathBuf;
3
4use crate::{
5 app::{load_config, persist_last_model, Config},
6 cli::{handle_command, Cli},
7 models::ModelFactory,
8 ollama::{ensure_model as ensure_ollama_model, require_any_model},
9 session::{select_conversation, ConversationManager},
10 tui::{run_ui, App},
11 utils::{check_ollama_available, log_error, log_info, log_progress, log_warn},
12};
13
14pub struct Orchestrator {
16 cli: Cli,
17 config: Config,
18}
19
20impl Orchestrator {
21 pub fn new(cli: Cli) -> Result<Self> {
23 let config = match load_config() {
25 Ok(cfg) => cfg,
26 Err(e) => {
27 log_warn("CONFIG", format!("Config load failed: {:#}. Using defaults.", e));
28 Config::default()
29 },
30 };
31
32 Ok(Self {
33 cli,
34 config,
35 })
36 }
37
38 pub async fn run(self) -> Result<()> {
40 let total_steps = 6; let mut current_step = 0;
43
44 current_step += 1;
46 log_progress(current_step, total_steps, "Processing commands");
47 if let Some(command) = &self.cli.command
48 && handle_command(command).await? {
49 return Ok(()); }
51 current_step += 1;
55 log_progress(current_step, total_steps, "Configuring model");
56
57 let cli_model_provided = self.cli.model.is_some();
58 let model_id = if let Some(model) = &self.cli.model {
59 model.clone()
61 } else if let Some(last_model) = &self.config.last_used_model {
62 last_model.clone()
64 } else if !self.config.default_model.provider.is_empty()
65 && !self.config.default_model.name.is_empty()
66 {
67 format!(
69 "{}/{}",
70 self.config.default_model.provider, self.config.default_model.name
71 )
72 } else {
73 let available = require_any_model().await?;
75 format!("ollama/{}", available[0])
77 };
78
79 log_info(
80 "MERMAID",
81 format!("Starting Mermaid with model: {}", model_id),
82 );
83
84 current_step += 1;
86 log_progress(current_step, total_steps, "Checking Ollama availability");
87 let ollama_check = check_ollama_available(
88 &self.config.ollama.host,
89 self.config.ollama.port,
90 ).await;
91
92 if !ollama_check.available {
93 log_error("OLLAMA", &ollama_check.message);
94 std::process::exit(1);
95 }
96
97 current_step += 1;
99 log_progress(current_step, total_steps, "Checking model availability");
100 ensure_ollama_model(&model_id).await?;
101
102 if cli_model_provided
104 && let Err(e) = persist_last_model(&model_id) {
105 log_warn("CONFIG", format!("Failed to persist model choice: {}", e));
106 }
107
108 current_step += 1;
110 log_progress(current_step, total_steps, "Initializing model");
111 let backend = if self.config.behavior.backend == "auto" {
113 None
114 } else {
115 Some(self.config.behavior.backend.as_str())
116 };
117 let model = match ModelFactory::create_with_backend(
118 &model_id,
119 Some(&self.config),
120 backend,
121 )
122 .await
123 {
124 Ok(m) => m,
125 Err(e) => {
126 log_error("ERROR", format!("Failed to initialize model: {}", e));
127 log_error(
128 "",
129 "Make sure the model is available and properly configured.",
130 );
131 std::process::exit(1);
132 },
133 };
134
135 let project_path = self.cli.path.clone().unwrap_or_else(|| PathBuf::from("."));
137
138 current_step += 1;
140 log_progress(current_step, total_steps, "Starting UI");
141 let mut app = App::new(model, model_id.clone());
142
143 app.model_state.temperature = self.config.default_model.temperature;
145 app.model_state.max_tokens = self.config.default_model.max_tokens;
146
147 if self.cli.continue_session || self.cli.sessions {
152 let conversation_manager = ConversationManager::new(&project_path)?;
153
154 if self.cli.sessions {
155 let conversations = conversation_manager.list_conversations()?;
157 if !conversations.is_empty() {
158 if let Some(selected) = select_conversation(conversations)? {
159 log_info(
160 "RESUME",
161 format!("Resuming conversation: {}", selected.title),
162 );
163 app.load_conversation(selected);
164 }
165 } else {
166 log_info("INFO", "No previous conversations found in this directory");
167 }
168 } else {
169 if let Some(last_conv) = conversation_manager.load_last_conversation()? {
171 log_info(
172 "RESUME",
173 format!("Resuming: {}", last_conv.title),
174 );
175 app.load_conversation(last_conv);
176 } else {
177 log_info("INFO", "No previous conversation to continue");
178 }
179 }
180 }
181
182 run_ui(app).await
184 }
185}
186