use super::super::{get_thread_working_directory, McpToolCall};
use crate::mcp::fs::{directory, file_ops, text_editing};
use crate::utils::truncation::format_extracted_content_smart;
use anyhow::{anyhow, bail, Result};
use serde_json::Value;
use std::collections::HashMap;
use std::path::Path;
use std::sync::Mutex;
use std::sync::OnceLock;
use tokio::fs as tokio_fs;
pub fn resolve_path(path_str: &str) -> std::path::PathBuf {
let path = Path::new(path_str);
if path.is_absolute() {
path.to_path_buf()
} else {
get_thread_working_directory().join(path)
}
}
fn resolve_line_index(index: i64, total_lines: usize) -> Result<usize, String> {
if index == 0 {
return Err("Line numbers are 1-indexed, use 1 for first line".to_string());
}
if index > 0 {
let pos_index = index as usize;
if pos_index > total_lines {
return Err(format!(
"Line {index} exceeds file length ({total_lines} lines)"
));
}
Ok(pos_index)
} else {
let from_end = (-index) as usize;
if from_end > total_lines {
return Err(format!(
"Negative index {index} exceeds file length ({total_lines} lines)"
));
}
Ok(total_lines - from_end + 1)
}
}
fn resolve_line_range(start: i64, end: i64, total_lines: usize) -> Result<(usize, usize), String> {
let resolved_start = resolve_line_index(start, total_lines)?;
let resolved_end = resolve_line_index(end, total_lines)?;
if resolved_start > resolved_end {
return Err(format!(
"Start line ({start}) cannot be greater than end line ({end})"
));
}
Ok((resolved_start, resolved_end))
}
static FILE_HISTORY: OnceLock<Mutex<HashMap<String, Vec<String>>>> = OnceLock::new();
pub fn get_file_history() -> &'static Mutex<HashMap<String, Vec<String>>> {
FILE_HISTORY.get_or_init(|| Mutex::new(HashMap::new()))
}
pub async fn save_file_history(path: &Path) -> Result<()> {
if path.exists() {
let content = tokio_fs::read_to_string(path).await?;
let path_str = path.to_string_lossy().to_string();
let file_history = get_file_history();
{
let mut history_guard = file_history
.lock()
.map_err(|_| anyhow!("Failed to acquire lock on file history"))?;
let history = history_guard.entry(path_str).or_insert_with(Vec::new);
if history.len() >= 10 {
history.remove(0);
}
history.push(content);
} }
Ok(())
}
pub async fn undo_edit(path: &Path) -> Result<String> {
let path_str = path.to_string_lossy().to_string();
let previous_content = {
let file_history = get_file_history();
let mut history_guard = file_history
.lock()
.map_err(|_| anyhow!("Failed to acquire lock on file history"))?;
if let Some(history) = history_guard.get_mut(&path_str) {
history.pop()
} else {
None
}
};
if let Some(prev_content) = previous_content {
text_editing::atomic_write(path, &prev_content).await?;
Ok(format!(
"Successfully undid the last edit to {}",
path.to_string_lossy()
))
} else {
bail!("No more undo history for this file (up to 10 levels are stored per file).");
}
}
pub fn detect_language(ext: &str) -> &str {
match ext {
"rs" => "rust",
"py" => "python",
"js" => "javascript",
"ts" => "typescript",
"jsx" => "jsx",
"tsx" => "tsx",
"html" => "html",
"css" => "css",
"json" => "json",
"md" => "markdown",
"go" => "go",
"java" => "java",
"c" | "h" | "cpp" => "cpp",
"toml" => "toml",
"yaml" | "yml" => "yaml",
"php" => "php",
"xml" => "xml",
"sh" => "bash",
_ => "text",
}
}
pub async fn execute_text_editor(call: &McpToolCall) -> Result<String> {
let command = match call.parameters.get("command") {
Some(Value::String(cmd)) => cmd.clone(),
Some(_) => {
bail!("Command parameter must be a string");
}
None => {
bail!("Missing required 'command' parameter");
}
};
match command.as_str() {
"create" => {
let path = match call.parameters.get("path") {
Some(Value::String(p)) => p.clone(),
_ => {
bail!("Missing or invalid 'path' parameter for create command");
}
};
let content = match call.parameters.get("content") {
Some(Value::String(txt)) => txt.clone(),
_ => {
bail!("Missing or invalid 'content' parameter for create command");
}
};
file_ops::create_file_spec(&resolve_path(&path), &content).await
}
"str_replace" => {
let path = match call.parameters.get("path") {
Some(Value::String(p)) => p.clone(),
_ => {
bail!("Missing or invalid 'path' parameter for str_replace command");
}
};
let old_text = match call.parameters.get("old_text") {
Some(Value::String(s)) => s.clone(),
_ => {
bail!("Missing or invalid 'old_text' parameter");
}
};
let new_text = match call.parameters.get("new_text") {
Some(Value::String(s)) => s.clone(),
_ => {
bail!("Missing or invalid 'new_text' parameter");
}
};
text_editing::str_replace_spec(&resolve_path(&path), &old_text, &new_text).await
}
"undo_edit" => {
let path = match call.parameters.get("path") {
Some(Value::String(p)) => p.clone(),
_ => {
bail!("Missing or invalid 'path' parameter for undo_edit command");
}
};
undo_edit(&resolve_path(&path)).await
}
_ => bail!(
"Invalid command: {command}. Allowed commands are: create, str_replace, undo_edit"
),
}
}
pub async fn execute_view(call: &McpToolCall) -> Result<String> {
let paths: Vec<String> = match call.parameters.get("paths") {
Some(Value::Array(arr)) => {
let path_strings: Result<Vec<String>, _> = arr
.iter()
.map(|p| {
p.as_str()
.ok_or_else(|| anyhow!("Invalid path in array"))
.map(|s| s.to_string())
})
.collect();
path_strings?
}
Some(Value::String(s)) => vec![s.clone()],
_ => {
bail!("Missing or invalid 'paths' parameter.");
}
};
if paths.is_empty() {
bail!("'paths' must contain at least one element.");
}
if paths.len() > 50 {
bail!("Too many files requested. Maximum 50 files per request.");
}
if paths.len() > 1 {
return file_ops::view_many_files_spec(&paths).await;
}
let path = &paths[0];
let resolved = resolve_path(path);
if resolved.is_dir() {
return directory::list_directory(call, path).await;
}
if let Some(content_pattern) = call.parameters.get("content").and_then(|v| v.as_str()) {
if !content_pattern.trim().is_empty() {
let context_lines = call
.parameters
.get("context")
.and_then(|v| v.as_i64())
.unwrap_or(0) as usize;
return file_ops::view_file_with_content_search(
&resolved,
content_pattern,
context_lines,
)
.await;
}
}
let lines = match call.parameters.get("lines") {
Some(Value::Array(arr)) if arr.len() == 2 => {
if let (Some(start), Some(end)) = (arr[0].as_i64(), arr[1].as_i64()) {
let total_lines = match tokio_fs::read_to_string(&resolved).await {
Ok(c) => c.lines().count(),
Err(_) => 0,
};
if total_lines > 0 {
match resolve_line_range(start, end, total_lines) {
Ok((s, e)) => Some((s, e as i64)),
Err(err) => {
bail!("Invalid lines parameter: {err}");
}
}
} else {
Some((start as usize, end))
}
} else if let (Some(start_hash), Some(end_hash)) = (arr[0].as_str(), arr[1].as_str()) {
let content = tokio_fs::read_to_string(&resolved)
.await
.map_err(|e| anyhow!("Cannot read file for hash resolution: {}", e))?;
let file_lines: Vec<&str> = content.lines().collect();
let start = crate::utils::line_hash::resolve_hash_to_line(start_hash, &file_lines)
.map_err(|e| anyhow!("Invalid start hash: {}", e))?;
let end = crate::utils::line_hash::resolve_hash_to_line(end_hash, &file_lines)
.map_err(|e| anyhow!("Invalid end hash: {}", e))?;
if start > end {
bail!(
"Start hash '{}' (line {}) is after end hash '{}' (line {}) — range must go forward",
start_hash, start, end_hash, end
);
}
Some((start, end as i64))
} else {
bail!("lines array elements must be integers or hash strings");
}
}
Some(Value::Array(_)) => {
bail!("lines must be an array with exactly 2 elements");
}
Some(_) => {
bail!("lines must be an array");
}
None => None,
};
file_ops::view_file_spec(&resolved, lines).await
}
pub async fn execute_extract_lines(call: &McpToolCall) -> Result<String> {
let from_path = match call.parameters.get("from_path") {
Some(Value::String(p)) => {
if p.trim().is_empty() {
bail!("Parameter 'from_path' cannot be empty");
}
p.clone()
}
Some(_) => {
bail!("Parameter 'from_path' must be a string");
}
None => {
bail!("Missing required parameter 'from_path'");
}
};
enum FromRange {
Lines(i64, i64),
Hashes(String, String),
}
let from_range_raw = match call.parameters.get("from_range") {
Some(Value::Array(arr)) => {
if arr.len() != 2 {
bail!("Parameter 'from_range' must be an array with exactly 2 elements");
}
if arr[0].is_string() || arr[1].is_string() {
let start = arr[0]
.as_str()
.ok_or_else(|| {
anyhow::anyhow!("from_range elements must both be hash strings")
})?
.to_string();
let end = arr[1]
.as_str()
.ok_or_else(|| {
anyhow::anyhow!("from_range elements must both be hash strings")
})?
.to_string();
FromRange::Hashes(start, end)
} else {
let start = match arr[0].as_i64() {
Some(0) => bail!("Line numbers are 1-indexed, use 1 for first line"),
Some(n) => n,
None => bail!("Start line number must be an integer"),
};
let end = match arr[1].as_i64() {
Some(0) => bail!("Line numbers are 1-indexed, use 1 for first line"),
Some(n) => n,
None => bail!("End line number must be an integer"),
};
FromRange::Lines(start, end)
}
}
Some(_) => {
bail!("Parameter 'from_range' must be an array");
}
None => {
bail!("Missing required parameter 'from_range'");
}
};
let append_path = match call.parameters.get("append_path") {
Some(Value::String(p)) => {
if p.trim().is_empty() {
bail!("Parameter 'append_path' cannot be empty");
}
p.clone()
}
Some(_) => {
bail!("Parameter 'append_path' must be a string");
}
None => {
bail!("Missing required parameter 'append_path'");
}
};
enum AppendLine {
Position(i64),
Hash(String),
}
let append_line_raw = match call.parameters.get("append_line") {
Some(Value::Number(n)) => match n.as_i64() {
Some(line) => AppendLine::Position(line),
None => bail!("Parameter 'append_line' must be an integer"),
},
Some(Value::String(h)) => AppendLine::Hash(h.clone()),
Some(_) => {
bail!("Parameter 'append_line' must be an integer or hash string");
}
None => {
bail!("Missing required parameter 'append_line'");
}
};
let from_path_obj = resolve_path(&from_path);
if !from_path_obj.exists() {
bail!("Source file does not exist: {from_path}");
}
let source_content = match tokio_fs::read_to_string(&from_path_obj).await {
Ok(content) => content,
Err(e) => {
bail!("Failed to read source file '{from_path}': {e}");
}
};
let source_lines: Vec<&str> = source_content.lines().collect();
let total_lines = source_lines.len();
let from_range = match from_range_raw {
FromRange::Hashes(start_hash, end_hash) => {
let start = crate::utils::line_hash::resolve_hash_to_line(&start_hash, &source_lines)
.map_err(|e| anyhow::anyhow!("Invalid from_range start: {e}"))?;
let end = crate::utils::line_hash::resolve_hash_to_line(&end_hash, &source_lines)
.map_err(|e| anyhow::anyhow!("Invalid from_range end: {e}"))?;
if start > end {
bail!(
"Hash range is reversed: '{}' is line {} but '{}' is line {} (which comes before it). Did you mean from_range: [\"{}\", \"{}\"]?",
start_hash, start, end_hash, end, end_hash, start_hash
);
}
(start, end)
}
FromRange::Lines(start_raw, end_raw) => {
match resolve_line_range(start_raw, end_raw, total_lines) {
Ok(range) => range,
Err(err) => bail!("Invalid from_range: {err}"),
}
}
};
let extracted_lines: Vec<&str> = source_lines[(from_range.0 - 1)..from_range.1].to_vec();
let extracted_hashes: Option<Vec<String>> = if crate::utils::line_hash::is_hash_mode() {
let all_hashes = crate::utils::line_hash::compute_line_hashes(&source_lines);
Some(all_hashes[(from_range.0 - 1)..from_range.1].to_vec())
} else {
None
};
let extracted_content_display = format_extracted_content_smart(
&extracted_lines,
from_range.0, Some(30), extracted_hashes.as_deref(),
);
let source_ends_with_newline = source_content.ends_with('\n');
let extracting_last_line = from_range.1 == total_lines;
let extracted_content =
if extracted_lines.len() == 1 && extracting_last_line && !source_ends_with_newline {
extracted_lines[0].to_string()
} else if extracting_last_line && source_ends_with_newline {
format!("{}\n", extracted_lines.join("\n"))
} else {
extracted_lines.join("\n")
};
let append_path_obj = resolve_path(&append_path);
if let Some(parent) = append_path_obj.parent() {
if let Err(e) = tokio_fs::create_dir_all(parent).await {
bail!("Failed to create parent directories for '{append_path}': {e}");
}
}
let target_content = if append_path_obj.exists() {
match tokio_fs::read_to_string(&append_path_obj).await {
Ok(content) => content,
Err(e) => {
bail!("Failed to read target file '{append_path}': {e}");
}
}
} else {
String::new()
};
let append_line: i64 = match append_line_raw {
AppendLine::Position(n) => n,
AppendLine::Hash(hash) => {
if target_content.is_empty() {
bail!("Cannot use hash identifier for append_line on an empty or non-existent target file");
}
let target_lines: Vec<&str> = target_content.lines().collect();
let line = crate::utils::line_hash::resolve_hash_to_line(&hash, &target_lines)
.map_err(|e| anyhow::anyhow!("Invalid append_line: {e}"))?;
line as i64
}
};
let final_content = if append_line == 0 {
if target_content.is_empty() {
extracted_content.clone()
} else {
if extracted_content.ends_with('\n') {
format!("{extracted_content}{target_content}")
} else {
format!("{extracted_content}\n{target_content}")
}
}
} else if append_line == -1 {
if target_content.is_empty() {
extracted_content.clone()
} else if target_content.ends_with('\n') {
format!("{target_content}{extracted_content}")
} else {
format!("{target_content}\n{extracted_content}")
}
} else {
let target_lines: Vec<&str> = target_content.lines().collect();
let insert_after = append_line as usize;
if insert_after > target_lines.len() {
bail!(
"Insert position {insert_after} exceeds target file length ({}) lines) in '{append_path}'",
target_lines.len()
);
}
let mut new_lines = Vec::new();
new_lines.extend(target_lines[..insert_after].iter().map(|s| s.to_string()));
new_lines.extend(extracted_lines.iter().map(|s| s.to_string()));
if insert_after < target_lines.len() {
new_lines.extend(target_lines[insert_after..].iter().map(|s| s.to_string()));
}
let target_ends_with_newline = target_content.ends_with('\n');
if target_ends_with_newline {
format!("{}\n", new_lines.join("\n"))
} else {
new_lines.join("\n")
}
};
if let Err(e) = tokio_fs::write(&append_path_obj, &final_content).await {
bail!("Failed to write to target file '{append_path}': {e}");
}
let lines_extracted = from_range.1 - from_range.0 + 1;
let position_desc = match append_line {
0 => "beginning of file".to_string(),
-1 => "end of file".to_string(),
n => format!("after line {n}"),
};
Ok(format!(
"Successfully extracted {lines_extracted} lines (lines {}-{}) from '{from_path}' and appended to '{append_path}' at {position_desc}.\n\nExtracted content:\n{extracted_content_display}",
from_range.0,
from_range.1
))
}
pub async fn execute_batch_edit(call: &McpToolCall) -> Result<String> {
let (operations_vec, ai_format_warning) = match call.parameters.get("operations") {
Some(Value::Array(ops)) => {
if ops.len() > 50 {
bail!("Too many operations in batch. Maximum 50 operations allowed.");
}
(ops.clone(), false)
}
Some(Value::String(ops_str)) => {
match serde_json::from_str::<Vec<Value>>(ops_str) {
Ok(parsed_ops) => {
if parsed_ops.len() > 50 {
bail!("Too many operations in batch. Maximum 50 operations allowed.");
}
tracing::debug!("AI passed operations as JSON string instead of array - parsing defensively");
(parsed_ops, true)
}
Err(_) => {
bail!("Invalid 'operations' parameter for batch_edit - must be an array or valid JSON array string");
}
}
}
_ => {
bail!("Missing or invalid 'operations' parameter for batch_edit - must be an array");
}
};
let mut modified_call = call.clone();
if ai_format_warning {
modified_call
.parameters
.as_object_mut()
.unwrap()
.insert("_ai_format_warning".to_string(), Value::Bool(true));
}
text_editing::batch_edit_spec(&modified_call, &operations_vec).await
}