1use crate::agent::core::Agent;
2use crate::apis::api_client::{ApiClient, SessionManager};
3use crate::app::history::ConversationSummary;
4use crate::app::logger::{format_log_with_color, LogLevel};
5use crate::models;
6use crate::models::ModelConfig;
7use anyhow::Result;
8use serde::{Deserialize, Serialize};
9use std::collections::HashMap;
10use std::time::{Instant, SystemTime, UNIX_EPOCH};
11use tokio::runtime::Runtime;
12use uuid::Uuid;
13
14#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
16pub enum AppState {
17 Setup,
18 ApiKeyInput,
19 Error(String),
20 Ready,
21 Chat,
22}
23
24#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
26pub enum TaskStatus {
27 InProgress,
29 Completed {
31 duration_secs: u64,
32 tool_uses: u32,
33 input_tokens: u32,
34 output_tokens: u32,
35 },
36 Failed(String),
38}
39
40#[derive(Debug, Clone, Serialize, Deserialize)]
42pub struct Task {
43 pub id: String,
44 pub description: String,
45 pub status: TaskStatus,
46 pub created_at: u64, pub updated_at: u64, pub tool_count: u32,
49 pub input_tokens: u32,
50 pub output_tokens: u32,
51}
52
53impl Task {
54 pub fn new(description: &str) -> Self {
56 let now = std::time::SystemTime::now()
57 .duration_since(std::time::UNIX_EPOCH)
58 .unwrap_or_default()
59 .as_secs();
60
61 Self {
62 id: format!("{}", Uuid::new_v4().simple()),
63 description: description.to_string(),
64 status: TaskStatus::InProgress,
65 created_at: now,
66 updated_at: now,
67 tool_count: 0,
68 input_tokens: 0,
69 output_tokens: 0,
70 }
71 }
72
73 pub fn complete(&mut self, output_tokens: u32) {
75 let now = std::time::SystemTime::now()
77 .duration_since(std::time::UNIX_EPOCH)
78 .unwrap_or_default()
79 .as_secs();
80
81 let duration_secs = now - self.created_at;
82
83 self.output_tokens = output_tokens;
85
86 self.status = TaskStatus::Completed {
87 duration_secs,
88 tool_uses: self.tool_count,
89 input_tokens: self.input_tokens,
90 output_tokens: self.output_tokens,
91 };
92 self.updated_at = now;
93 }
94
95 pub fn fail(&mut self, error: &str) {
97 self.status = TaskStatus::Failed(error.to_string());
98 self.updated_at = std::time::SystemTime::now()
99 .duration_since(std::time::UNIX_EPOCH)
100 .unwrap_or_default()
101 .as_secs();
102 }
103
104 pub fn add_tool_use(&mut self) {
106 self.tool_count += 1;
107 self.updated_at = std::time::SystemTime::now()
108 .duration_since(std::time::UNIX_EPOCH)
109 .unwrap_or_default()
110 .as_secs();
111 }
112
113 pub fn add_input_tokens(&mut self, tokens: u32) {
115 self.input_tokens += tokens;
116 self.updated_at = std::time::SystemTime::now()
117 .duration_since(std::time::UNIX_EPOCH)
118 .unwrap_or_default()
119 .as_secs();
120 }
121
122 pub fn is_in_progress(&self) -> bool {
124 matches!(self.status, TaskStatus::InProgress)
125 }
126}
127
128#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
130pub enum ToolExecutionStatus {
131 Running,
133 Success,
135 Error,
137}
138
139#[derive(Debug, Clone, Serialize, Deserialize)]
141pub struct ToolExecution {
142 pub id: String, pub task_id: String, pub name: String, pub status: ToolExecutionStatus, pub start_time: u64, pub end_time: Option<u64>, pub message: String, pub metadata: HashMap<String, serde_json::Value>, }
151
152impl ToolExecution {
153 pub fn new(task_id: &str, name: &str) -> Self {
155 let now = SystemTime::now()
156 .duration_since(UNIX_EPOCH)
157 .unwrap_or_default()
158 .as_millis() as u64;
159
160 Self {
161 id: format!("tool-{}-{}", name, Uuid::new_v4().simple()),
162 task_id: task_id.to_string(),
163 name: name.to_string(),
164 status: ToolExecutionStatus::Running,
165 start_time: now,
166 end_time: None,
167 message: format!("Starting {}", name),
168 metadata: HashMap::new(),
169 }
170 }
171
172 pub fn complete(&mut self, message: &str) {
174 let now = SystemTime::now()
175 .duration_since(UNIX_EPOCH)
176 .unwrap_or_default()
177 .as_millis() as u64;
178
179 self.status = ToolExecutionStatus::Success;
180 self.end_time = Some(now);
181 self.message = message.to_string();
182 }
183
184 pub fn fail(&mut self, error: &str) {
186 let now = SystemTime::now()
187 .duration_since(UNIX_EPOCH)
188 .unwrap_or_default()
189 .as_millis() as u64;
190
191 self.status = ToolExecutionStatus::Error;
192 self.end_time = Some(now);
193 self.message = format!("Error: {}", error);
194 }
195
196 pub fn update_progress(&mut self, message: &str) {
198 self.message = message.to_string();
199 }
200
201 pub fn add_metadata(&mut self, key: &str, value: serde_json::Value) {
203 self.metadata.insert(key.to_string(), value);
204 }
205}
206
207pub struct App {
209 pub state: AppState,
210 pub messages: Vec<String>,
211 pub logs: Vec<String>,
212 pub available_models: Vec<ModelConfig>,
213 pub error_message: Option<String>,
214 pub last_query_time: Instant,
215 pub use_agent: bool,
216 pub agent: Option<Agent>,
217 pub tokio_runtime: Option<Runtime>,
218 pub api_key: Option<String>,
219 pub current_working_dir: Option<String>,
220 pub tasks: Vec<Task>,
221 pub current_task_id: Option<String>,
222 pub conversation_summaries: Vec<ConversationSummary>,
223 pub session_manager: Option<SessionManager>,
224 pub session_id: String,
225 pub tool_executions: HashMap<String, ToolExecution>,
227}
228
229impl App {
230 pub fn new() -> Self {
232 let _ = dotenv::dotenv();
234
235 let tokio_runtime = Runtime::new().ok();
237
238 let current_working_dir = std::env::current_dir()
240 .ok()
241 .map(|p| p.to_string_lossy().to_string());
242
243 let session_manager = Some(
245 SessionManager::new(100)
246 .with_system_message(crate::prompts::DEFAULT_SESSION_PROMPT.to_string()),
247 );
248
249 let session_id = Uuid::new_v4().to_string();
251
252 Self {
253 state: AppState::Setup,
254 messages: vec![],
255 logs: vec![],
256 available_models: models::get_available_models(),
257 error_message: None,
258 last_query_time: std::time::Instant::now(),
259 use_agent: false,
260 agent: None,
261 tokio_runtime,
262 api_key: None,
263 current_working_dir,
264 tasks: Vec::new(),
265 current_task_id: None,
266 conversation_summaries: Vec::new(),
267 session_manager,
268 session_id,
269 tool_executions: HashMap::new(),
270 }
271 }
272
273 pub fn current_model(&self, index: usize) -> Result<&ModelConfig> {
275 self.available_models
276 .get(index)
277 .ok_or_else(|| anyhow::anyhow!("Invalid model index"))
278 }
279
280 pub fn query_model(&mut self, prompt: &str) -> Result<String> {
282 let task_id = self.create_task(prompt);
286
287 eprintln!(
289 "{}",
290 format_log_with_color(LogLevel::Info, &format!("Processing query: '{}'", prompt))
291 );
292
293 self.last_query_time = Instant::now();
295
296 self.messages.push(format!("[user] {}", prompt));
298
299 if self.tokio_runtime.is_none() {
301 return Err(anyhow::anyhow!("Async runtime not available"));
302 }
303
304 let model_index = 0; let model = match self.available_models.get(model_index) {
309 Some(m) => m,
310 None => return Err(anyhow::anyhow!("No models available")),
311 };
312
313 let model_name = model.name.clone();
314 let model_file_name = model.file_name.clone();
315 let supports_agent = model.has_agent_support();
316
317 eprintln!(
319 "{}",
320 format_log_with_color(LogLevel::Info, &format!("Using model: {}", model_name))
321 );
322
323 let api_key = self.api_key.clone().unwrap_or_else(|| {
325 std::env::var("ANTHROPIC_API_KEY")
326 .or_else(|_| std::env::var("OPENAI_API_KEY"))
327 .unwrap_or_default()
328 });
329
330 if api_key.is_empty() {
331 return Err(anyhow::anyhow!("No API key available. Please set ANTHROPIC_API_KEY or OPENAI_API_KEY environment variable."));
332 }
333
334 if self.session_manager.is_none() {
336 return Err(anyhow::anyhow!("Session manager not available"));
337 }
338
339 if let Some(session) = &mut self.session_manager {
341 session.add_user_message(prompt.to_string());
342 }
343
344 let messages = match &self.session_manager {
346 Some(session) => session.get_messages_for_api(),
347 None => return Err(anyhow::anyhow!("Session manager not available")),
348 };
349
350 let model_name_lower = model_name.to_lowercase();
352 let unrecognized = !model_name_lower.contains("claude")
353 && !model_name_lower.contains("gpt")
354 && !model_name_lower.contains("local");
355
356 if unrecognized {
357 eprintln!(
358 "{}",
359 format_log_with_color(
360 LogLevel::Warning,
361 &format!("Warning: Unrecognized model type: {}", model_name)
362 )
363 );
364 }
365
366 let runtime = self.tokio_runtime.as_ref().unwrap();
368 let options = crate::apis::api_client::CompletionOptions {
369 temperature: Some(0.7),
370 top_p: Some(0.9),
371 max_tokens: Some(2048),
372 ..Default::default()
373 };
374
375 let (progress_tx, progress_rx) = std::sync::mpsc::channel();
377
378 let task_id_clone = task_id.clone();
380 std::thread::spawn(move || {
381 while let Ok(message) = progress_rx.recv() {
382 if let Some(rpc_server) = crate::communication::rpc::get_global_rpc_server() {
384 let _ = rpc_server.event_sender().send((
385 "processing_progress".to_string(),
386 serde_json::json!({
387 "task_id": task_id_clone,
388 "message": message
389 }),
390 ));
391 }
392 }
393 });
394
395 if supports_agent && self.use_agent {
397 let _working_dir = self.current_working_dir.clone().unwrap_or_else(|| {
399 std::env::current_dir()
400 .map(|p| p.to_string_lossy().to_string())
401 .unwrap_or_else(|_| ".".to_string())
402 });
403
404 let has_anthropic_key =
406 !api_key.is_empty() && std::env::var("ANTHROPIC_API_KEY").is_ok();
407 let has_openai_key = !api_key.is_empty() && std::env::var("OPENAI_API_KEY").is_ok();
408
409 use crate::agent::core::LLMProvider;
411
412 let provider = match model_name_lower.as_str() {
414 name if name.contains("claude") => {
415 if has_anthropic_key {
416 Some(LLMProvider::Anthropic)
417 } else {
418 None
419 }
420 }
421 name if name.contains("gpt") => {
422 if has_openai_key {
423 Some(LLMProvider::OpenAI)
424 } else {
425 None
426 }
427 }
428 name if name.contains("local") => Some(LLMProvider::Ollama),
429 _ => {
430 if has_anthropic_key {
431 Some(LLMProvider::Anthropic)
432 } else if has_openai_key {
433 Some(LLMProvider::OpenAI)
434 } else {
435 None
436 }
437 }
438 }
439 .ok_or_else(|| anyhow::anyhow!("Could not determine provider for agent"))?;
440
441 let agent_model = match model_name_lower.as_str() {
443 name if name.contains("claude") => {
444 if has_anthropic_key {
445 Some("claude-3-7-sonnet-20250219".to_string())
446 } else {
447 None
448 }
449 }
450 name if name.contains("gpt") => {
451 if has_openai_key {
452 Some("gpt-4o".to_string())
453 } else {
454 None
455 }
456 }
457 name if name.contains("local") => Some(model_file_name.clone()),
458 _ => None,
459 }
460 .ok_or_else(|| anyhow::anyhow!("Could not determine model for agent"))?;
461
462 let mut agent = crate::agent::core::Agent::new(provider);
464 agent = agent.with_model(agent_model);
465
466 let (progress_tx_sender, mut progress_rx_receiver) =
468 tokio::sync::mpsc::channel::<String>(100);
469
470 agent = agent.with_progress_sender(progress_tx_sender);
472
473 let progress_tx_clone = progress_tx.clone();
475 let task_id_clone2 = task_id.clone();
476
477 std::thread::spawn(move || {
479 let rt = tokio::runtime::Runtime::new().unwrap();
480 rt.block_on(async {
481 while let Some(message) = progress_rx_receiver.recv().await {
482 let is_view_output = message.lines().next()
484 .map(|first_line|
485 first_line.contains(" | ") &&
486 first_line.trim().chars().take(5).all(|c| c.is_ascii_digit() || c.is_whitespace() || c == '|')
487 )
488 .unwrap_or(false);
489
490 if is_view_output {
491 if let Some(rpc_server) = crate::communication::rpc::get_global_rpc_server() {
492 let line_count = message.lines().count();
494
495 let tool_id = format!("{}.view-{}", task_id, std::time::SystemTime::now()
497 .duration_since(std::time::UNIX_EPOCH)
498 .unwrap_or_default()
499 .as_millis());
500
501 let tool_status = serde_json::json!({
503 "type": "updated",
504 "execution": {
505 "id": tool_id,
506 "task_id": task_id,
507 "name": "View",
508 "status": "success",
509 "startTime": std::time::SystemTime::now()
510 .duration_since(std::time::UNIX_EPOCH)
511 .unwrap_or_default()
512 .as_millis(),
513 "endTime": std::time::SystemTime::now()
514 .duration_since(std::time::UNIX_EPOCH)
515 .unwrap_or_default()
516 .as_millis() + 100, "message": format!("Read {} lines", line_count),
518 "metadata": {
519 "lines": line_count,
520 "description": format!("Read {} lines", line_count),
521 "file_path": "view-result", }
523 }
524 });
525
526 rpc_server.send_notification("tool_status", tool_status).ok();
528 }
529 }
530
531 if message.starts_with('[') && message.contains(']') {
533 if let Some(rpc_server) =
534 crate::communication::rpc::get_global_rpc_server()
535 {
536 let parts: Vec<&str> = message.splitn(2, ']').collect();
537 if parts.len() == 2 {
538 let tool_name = parts[0].trim_start_matches('[').trim();
539 let tool_message = parts[1].trim();
540
541 eprintln!("Detected tool message: [{}] {}", tool_name, tool_message);
543
544 let status = if message.contains("[error]")
546 || message.contains("ERROR")
547 {
548 "error"
549 } else if message.contains("[completed]")
550 || message.contains("completed")
551 || message.contains("success")
552 {
553 "success"
554 } else {
555 "running"
556 };
557
558 let file_path = if tool_message.contains("file_path:") {
560 let path_parts: Vec<&str> =
561 tool_message.split("file_path:").collect();
562 if path_parts.len() > 1 {
563 let path_with_quotes = path_parts[1].trim();
564 if path_with_quotes.starts_with('"')
566 && path_with_quotes.contains('"')
567 {
568 let end_quote_pos = path_with_quotes[1..]
569 .find('"')
570 .map(|pos| pos + 1);
571 end_quote_pos
572 .map(|pos| path_with_quotes[1..pos].to_string())
573 } else {
574 Some(
575 path_with_quotes
576 .split_whitespace()
577 .next()
578 .unwrap_or("")
579 .to_string(),
580 )
581 }
582 } else {
583 None
584 }
585 } else {
586 None
587 };
588
589 let lines = if tool_message.contains("lines") {
591 let line_parts: Vec<&str> =
592 tool_message.split("lines").collect();
593 if line_parts.len() > 1 {
594 let numbers: Vec<&str> = line_parts[0]
596 .split_whitespace()
597 .chain(line_parts[1].split_whitespace())
598 .filter(|word| word.parse::<usize>().is_ok())
599 .collect();
600
601 numbers
602 .first()
603 .and_then(|num| num.parse::<usize>().ok())
604 } else {
605 tool_message
607 .split_whitespace()
608 .find(|word| word.parse::<usize>().is_ok())
609 .and_then(|num| num.parse::<usize>().ok())
610 }
611 } else {
612 None
613 };
614
615 let description = match tool_name {
617 "View" => {
618 if let Some(_path) = &file_path {
619 if let Some(line_count) = lines {
620 format!(
621 "Read {} lines (ctrl+r to expand)",
622 line_count
623 )
624 } else {
625 "Reading file contents (ctrl+r to expand)"
626 .to_string()
627 }
628 } else {
629 "Reading file".to_string()
630 }
631 }
632 "GlobTool" => "Finding files by pattern".to_string(),
633 "GrepTool" => "Searching code for pattern".to_string(),
634 "LS" => "Listing directory contents".to_string(),
635 "Edit" => "Modifying file".to_string(),
636 "Replace" => "Replacing file contents".to_string(),
637 "Bash" => "Executing command".to_string(),
638 _ => "Executing tool".to_string(),
639 };
640
641 let tool_id = format!(
643 "tool-{}-{}",
644 tool_name,
645 uuid::Uuid::new_v4().simple()
646 );
647
648 let now = std::time::SystemTime::now()
650 .duration_since(std::time::UNIX_EPOCH)
651 .unwrap_or_default()
652 .as_millis()
653 as u64;
654
655 let tool_execution = ToolExecution {
657 id: tool_id.clone(),
658 task_id: task_id_clone2.clone(),
659 name: tool_name.to_string(),
660 status: match status {
661 "running" => ToolExecutionStatus::Running,
662 "success" => ToolExecutionStatus::Success,
663 "error" => ToolExecutionStatus::Error,
664 _ => ToolExecutionStatus::Running,
665 },
666 start_time: now,
667 end_time: if status != "running" {
668 Some(now)
669 } else {
670 None
671 },
672 message: tool_message.to_string(),
673 metadata: {
674 let mut meta = std::collections::HashMap::new();
675 if let Some(path) = &file_path {
676 meta.insert(
677 "file_path".to_string(),
678 serde_json::Value::String(path.clone()),
679 );
680 }
681 if let Some(line_count) = lines {
682 meta.insert(
683 "lines".to_string(),
684 serde_json::Value::Number(
685 serde_json::Number::from(line_count),
686 ),
687 );
688 }
689 meta.insert(
690 "description".to_string(),
691 serde_json::Value::String(description.clone()),
692 );
693 meta
694 },
695 };
696
697 eprintln!(
699 "Created tool execution: {} ({}) - task_id={}, status={:?}, message={}",
700 tool_execution.id,
701 tool_execution.name,
702 tool_execution.task_id,
703 tool_execution.status,
704 tool_execution.message
705 );
706
707 let _ = rpc_server.send_notification(
709 "tool_status",
710 serde_json::json!({
711 "type": "started",
712 "execution": tool_execution
713 }),
714 );
715
716 let _ = rpc_server.event_sender().send((
718 "tool_execution".to_string(),
719 serde_json::json!({
720 "task_id": task_id_clone2,
721 "tool": tool_name,
722 "message": tool_message,
723 "status": status,
724 "description": description,
725 "file_path": file_path,
726 "lines": lines,
727 "timestamp": now
728 }),
729 ));
730 }
731 }
732 }
733
734 let _ = progress_tx_clone.send(message.to_string());
736
737 eprintln!(
739 "{}",
740 format_log_with_color(LogLevel::Debug, &format!("Agent: {}", message))
741 );
742 }
743 });
744 });
745
746 runtime.block_on(async { agent.initialize_with_api_key(api_key.clone()).await })?;
748
749 let response = runtime.block_on(async { agent.execute(prompt).await })?;
751
752 if let Some(task) = self.current_task_mut() {
754 task.tool_count = 1; }
756
757 if let Some(session) = &mut self.session_manager {
759 session.add_assistant_message(response.clone());
760 }
761
762 self.messages.push(format!("[assistant] {}", response));
764
765 let estimated_tokens = (response.len() as f64 / 4.0).ceil() as u32;
767 self.complete_current_task(estimated_tokens);
768
769 eprintln!(
770 "{}",
771 format_log_with_color(
772 LogLevel::Info,
773 &format!(
774 "Agent query completed, received approximately {} tokens",
775 estimated_tokens
776 )
777 )
778 );
779
780 Ok(response)
781 } else {
782 let response = if model_name_lower.contains("claude") {
784 runtime.block_on(async {
786 let client = crate::apis::anthropic::AnthropicClient::with_api_key(
787 api_key.clone(),
788 Some(model_file_name.clone()),
789 )?;
790
791 let _ = progress_tx.send(format!("Sending request to {}", model_name));
793
794 client.complete(messages.clone(), options).await
795 })?
796 } else if model_name_lower.contains("gpt") {
797 runtime.block_on(async {
799 let client = crate::apis::openai::OpenAIClient::with_api_key(
800 api_key.clone(),
801 Some(model_file_name.clone()),
802 )?;
803
804 let _ = progress_tx.send(format!("Sending request to {}", model_name));
806
807 client.complete(messages.clone(), options).await
808 })?
809 } else if model_name_lower.contains("local") {
810 runtime.block_on(async {
812 let client =
813 crate::apis::ollama::OllamaClient::new(Some(model_file_name.clone()))?;
814
815 let _ = progress_tx.send(format!(
817 "Sending request to local model {}",
818 model_file_name
819 ));
820
821 client.complete(messages.clone(), options).await
822 })?
823 } else {
824 format!("I couldn't send your message to a language model. The model '{}' is not currently supported.", model_name)
826 };
827
828 if let Some(session) = &mut self.session_manager {
830 session.add_assistant_message(response.clone());
831 }
832
833 self.messages.push(format!("[assistant] {}", response));
835
836 let estimated_tokens = (response.len() as f64 / 4.0).ceil() as u32;
838 self.complete_current_task(estimated_tokens);
839
840 eprintln!(
841 "{}",
842 format_log_with_color(
843 LogLevel::Info,
844 &format!(
845 "Query completed, received approximately {} tokens",
846 estimated_tokens
847 )
848 )
849 );
850
851 Ok(response)
852 }
853 }
854
855 pub fn has_active_tasks(&self) -> bool {
857 self.tasks.iter().any(|task| task.is_in_progress())
858 }
859
860 pub fn get_task_statuses(&self) -> Vec<serde_json::Value> {
862 self.tasks
863 .iter()
864 .map(|task| {
865 let status = match &task.status {
866 TaskStatus::InProgress => "in_progress",
867 TaskStatus::Completed { .. } => "completed",
868 TaskStatus::Failed(_) => "failed",
869 };
870
871 serde_json::json!({
872 "id": task.id,
873 "description": task.description,
874 "status": status,
875 "tool_count": task.tool_count,
876 "input_tokens": task.input_tokens,
877 "output_tokens": task.output_tokens,
878 "created_at": task.created_at,
879 })
880 })
881 .collect()
882 }
883
884 pub fn create_task(&mut self, description: &str) -> String {
886 let task = Task::new(description);
887 let task_id = task.id.clone();
888 self.tasks.push(task);
889 self.current_task_id = Some(task_id.clone());
890 task_id
891 }
892
893 pub fn current_task(&self) -> Option<&Task> {
895 if let Some(id) = &self.current_task_id {
896 self.tasks.iter().find(|t| &t.id == id)
897 } else {
898 None
899 }
900 }
901
902 pub fn current_task_mut(&mut self) -> Option<&mut Task> {
904 if let Some(id) = &self.current_task_id {
905 let id_clone = id.clone();
906 self.tasks.iter_mut().find(|t| t.id == id_clone)
907 } else {
908 None
909 }
910 }
911
912 pub fn add_tool_use(&mut self) {
914 if let Some(task) = self.current_task_mut() {
915 task.add_tool_use();
916 }
917 }
918
919 pub fn add_input_tokens(&mut self, tokens: u32) {
921 if let Some(task) = self.current_task_mut() {
922 task.add_input_tokens(tokens);
923 }
924 }
925
926 pub fn complete_current_task(&mut self, tokens: u32) {
928 if let Some(task) = self.current_task_mut() {
929 task.complete(tokens);
930 }
931 self.current_task_id = None;
932 }
933
934 pub fn fail_current_task(&mut self, error: &str) {
936 if let Some(task) = self.current_task_mut() {
937 task.fail(error);
938 }
939 self.current_task_id = None;
940 }
941
942 pub fn start_tool_execution(&mut self, name: &str) -> Option<String> {
944 if let Some(task_id) = &self.current_task_id {
946 let tool_execution = ToolExecution::new(task_id, name);
948 let tool_id = tool_execution.id.clone();
949
950 self.tool_executions.insert(tool_id.clone(), tool_execution);
952
953 if let Some(task) = self.current_task_mut() {
955 task.add_tool_use();
956 }
957
958 if let Some(rpc_server) = crate::communication::rpc::get_global_rpc_server() {
960 eprintln!(
962 "Sending tool_status started notification for tool {}: {}",
963 name, tool_id
964 );
965
966 let tool_exec = self.tool_executions.get(&tool_id).cloned();
968
969 if let Some(exec) = tool_exec {
970 let result = rpc_server.send_notification(
971 "tool_status",
972 serde_json::json!({
973 "type": "started",
974 "execution": exec
975 }),
976 );
977
978 if let Err(e) = result {
979 eprintln!("Error sending tool_status notification: {}", e);
980 }
981 } else {
982 eprintln!("Tool execution not found for ID: {}", tool_id);
983 }
984 } else {
985 eprintln!("No RPC server available to send tool_status notification");
986 }
987
988 Some(tool_id)
989 } else {
990 None
991 }
992 }
993
994 pub fn update_tool_progress(
996 &mut self,
997 tool_id: &str,
998 message: &str,
999 metadata: Option<HashMap<String, serde_json::Value>>,
1000 ) {
1001 if let Some(tool) = self.tool_executions.get_mut(tool_id) {
1002 tool.update_progress(message);
1003
1004 if let Some(meta) = metadata {
1006 for (key, value) in meta {
1007 tool.add_metadata(&key, value);
1008 }
1009 }
1010
1011 if let Some(rpc_server) = crate::communication::rpc::get_global_rpc_server() {
1013 let _ = rpc_server.send_notification(
1014 "tool_status",
1015 serde_json::json!({
1016 "type": "updated",
1017 "execution": tool
1018 }),
1019 );
1020 }
1021 }
1022 }
1023
1024 pub fn complete_tool_execution(
1026 &mut self,
1027 tool_id: &str,
1028 message: &str,
1029 metadata: Option<HashMap<String, serde_json::Value>>,
1030 ) {
1031 if let Some(tool) = self.tool_executions.get_mut(tool_id) {
1032 tool.complete(message);
1033
1034 if let Some(meta) = metadata {
1036 for (key, value) in meta {
1037 tool.add_metadata(&key, value);
1038 }
1039 }
1040
1041 if let Some(rpc_server) = crate::communication::rpc::get_global_rpc_server() {
1043 let _ = rpc_server.send_notification(
1044 "tool_status",
1045 serde_json::json!({
1046 "type": "updated",
1047 "execution": tool
1048 }),
1049 );
1050 }
1051 }
1052 }
1053
1054 pub fn fail_tool_execution(&mut self, tool_id: &str, error: &str) {
1056 if let Some(tool) = self.tool_executions.get_mut(tool_id) {
1057 tool.fail(error);
1058
1059 if let Some(rpc_server) = crate::communication::rpc::get_global_rpc_server() {
1061 let _ = rpc_server.send_notification(
1062 "tool_status",
1063 serde_json::json!({
1064 "type": "updated",
1065 "execution": tool
1066 }),
1067 );
1068 }
1069 }
1070 }
1071
1072 pub fn cleanup_old_tool_executions(&mut self) {
1074 let now = SystemTime::now()
1075 .duration_since(UNIX_EPOCH)
1076 .unwrap_or_default()
1077 .as_millis() as u64;
1078
1079 let ten_minutes_ms = 10 * 60 * 1000;
1080
1081 let old_tool_ids: Vec<String> = self
1083 .tool_executions
1084 .iter()
1085 .filter(|(_, tool)| {
1086 if let Some(end_time) = tool.end_time {
1087 match tool.status {
1089 ToolExecutionStatus::Running => false,
1090 _ => now - end_time > ten_minutes_ms,
1091 }
1092 } else {
1093 false
1094 }
1095 })
1096 .map(|(id, _)| id.clone())
1097 .collect();
1098
1099 for id in old_tool_ids {
1101 self.tool_executions.remove(&id);
1102 }
1103 }
1104
1105 #[deprecated(
1107 since = "0.2.0",
1108 note = "Use eprintln with format_log_with_color instead"
1109 )]
1110 pub fn log(&mut self, _message: &str) {
1111 }
1113}
1114
1115impl Default for App {
1116 fn default() -> Self {
1117 Self::new()
1118 }
1119}