use super::text_edit::{
is_whitespace_significant_file, normalize_blank_lines, strip_orphaned_doc_comment,
};
use super::{FinalizeEditParams, InsertEdge};
use crate::server::helpers::{
check_occ, check_sandbox_access, io_error_data, parse_semantic_path, require_symbol_target,
};
use crate::server::types::{
DeleteSymbolParams, EditResponse, InsertAfterParams, InsertBeforeParams, ReplaceBodyParams,
ReplaceFullParams, ValidateOnlyParams,
};
use pathfinder_common::indent::dedent_then_reindent;
use pathfinder_common::normalize::normalize_for_full_replace;
use pathfinder_common::types::VersionHash;
use rmcp::handler::server::wrapper::Json;
use rmcp::model::ErrorData;
use tracing::instrument;
impl crate::server::PathfinderServer {
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn replace_body_impl(
&self,
params: ReplaceBodyParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "replace_body",
semantic_path = %params.semantic_path,
"replace_body: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"replace_body",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"replace_body",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "replace_body",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn replace_full_impl(
&self,
params: ReplaceFullParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "replace_full",
semantic_path = %params.semantic_path,
"replace_full: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"replace_full",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"replace_full",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "replace_full",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn insert_before_impl(
&self,
params: InsertBeforeParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "insert_before",
semantic_path = %params.semantic_path,
"insert_before: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"insert_before",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"insert_before",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "insert_before",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn insert_after_impl(
&self,
params: InsertAfterParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "insert_after",
semantic_path = %params.semantic_path,
"insert_after: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"insert_after",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"insert_after",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "insert_after",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
pub(crate) async fn resolve_insert_position(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
edge: InsertEdge,
) -> Result<(usize, usize, std::sync::Arc<[u8]>, VersionHash), ErrorData> {
if semantic_path.is_bare_file() {
let absolute_path = self.workspace_root.resolve(&semantic_path.file_path);
let bytes = tokio::fs::read(&absolute_path)
.await
.map_err(|e| io_error_data(format!("failed to read file: {e}")))?;
let hash = VersionHash::compute(&bytes);
let offset = match edge {
InsertEdge::Before => 0,
InsertEdge::After => bytes.len(),
};
return Ok((offset, 0, std::sync::Arc::from(bytes), hash));
}
let (symbol_range, source, hash) = self
.surgeon
.resolve_symbol_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let insert_byte = match edge {
InsertEdge::Before => symbol_range.start_byte,
InsertEdge::After => symbol_range.end_byte,
};
Ok((insert_byte, symbol_range.indent_column, source, hash))
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn delete_symbol_impl(
&self,
params: DeleteSymbolParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "delete_symbol",
semantic_path = %params.semantic_path,
"delete_symbol: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"delete_symbol",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(&semantic_path, ¶ms.semantic_path, "delete", None)
.await?;
if !params.ignore_validation_failures {
if let Some(symbol_chain) = &semantic_path.symbol_chain {
if let Some(symbol) = symbol_chain.segments.last() {
let symbol_name = &symbol.name;
let workspace_path = self.workspace_root.path().to_string_lossy().to_string();
let absolute_target = self
.workspace_root
.path()
.join(&semantic_path.file_path)
.to_string_lossy()
.to_string();
let mut cmd = tokio::process::Command::new("rg");
cmd.arg("-l")
.arg("-w")
.arg(symbol_name)
.arg(&workspace_path)
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::null());
if let Ok(out) = cmd.output().await {
if out.status.success() {
let stdout = String::from_utf8_lossy(&out.stdout);
let mut reference_count = 0u32;
for line in stdout.lines() {
let line = line.trim();
if line.is_empty() {
continue;
}
if line != absolute_target {
reference_count += 1;
}
}
if reference_count > 0 {
let err =
pathfinder_common::error::PathfinderError::InvalidTarget {
semantic_path: params.semantic_path.clone(),
reason: format!(
"Symbol '{symbol_name}' is still referenced in \
{reference_count} other file(s). Delete or update \
those references first, or pass \
'ignore_validation_failures: true' to force deletion."
),
edit_index: None,
valid_edit_types: None,
};
return Err(crate::server::helpers::pathfinder_to_error_data(&err));
}
}
}
}
}
}
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "delete_symbol",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path, edit_type = %params.edit_type))]
pub(crate) async fn validate_only_impl(
&self,
params: ValidateOnlyParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "validate_only",
semantic_path = %params.semantic_path,
edit_type = %params.edit_type,
"validate_only: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"validate_only",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
¶ms.edit_type,
params.new_code.as_deref(),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let original_str = std::str::from_utf8(&source).unwrap_or("");
let new_str = std::str::from_utf8(&new_bytes).unwrap_or("");
let validation_outcome = self
.run_lsp_validation(&semantic_path.file_path, original_str, new_str, false)
.await;
let duration_ms = start.elapsed().as_millis();
tracing::info!(
tool = "validate_only",
semantic_path = %params.semantic_path,
duration_ms,
engines_used = ?if validation_outcome.skipped { vec!["tree-sitter"] } else { vec!["tree-sitter", "lsp"] },
"validate_only: complete"
);
Ok(Json(EditResponse {
success: true,
new_version_hash: None, formatted: false,
validation: validation_outcome.validation,
validation_skipped: validation_outcome.skipped,
validation_skipped_reason: validation_outcome.skipped_reason,
}))
}
#[allow(clippy::too_many_lines)]
pub(crate) async fn resolve_edit_content(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
raw_semantic_path: &str,
edit_type: &str,
new_code: Option<&str>,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
match edit_type {
"replace_body" => {
require_symbol_target(semantic_path, raw_semantic_path)?;
let new_code = new_code.unwrap_or_default();
let (body_range, source, current_hash) = self
.surgeon
.resolve_body_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let normalized = pathfinder_common::normalize::normalize_for_body_replace(new_code);
let indented = pathfinder_common::indent::dedent_then_reindent(
&normalized,
body_range.body_indent_column,
);
let new_content =
super::text_edit::build_body_replacement(&source, &body_range, &indented)?;
Ok((source, current_hash, new_content.as_bytes().to_vec()))
}
"replace_full" => {
let new_code = new_code.unwrap_or_default();
if semantic_path.is_bare_file() {
let absolute_path = self.workspace_root.resolve(&semantic_path.file_path);
let source = tokio::fs::read(&absolute_path)
.await
.map_err(|e| io_error_data(format!("failed to read file: {e}")))?;
let current_hash = VersionHash::compute(&source);
let new_bytes = new_code.as_bytes().to_vec();
if let Ok(new_str) = std::str::from_utf8(&new_bytes) {
if let Some(lang) =
pathfinder_treesitter::language::SupportedLanguage::detect(
&semantic_path.file_path,
)
{
match pathfinder_treesitter::parser::AstParser::parse_source(
&semantic_path.file_path,
lang,
new_str.as_bytes(),
) {
Ok(tree) => {
if tree.root_node().has_error() {
tracing::warn!(
file = %semantic_path.file_path.display(),
"replace_full: bare file content has parse errors"
);
}
}
Err(e) => {
tracing::warn!(error = %e, "replace_full: tree-sitter error");
}
}
}
}
Ok((std::sync::Arc::from(source), current_hash, new_bytes))
} else {
let (full_range, source, current_hash) = self
.surgeon
.resolve_full_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let normalized = normalize_for_full_replace(new_code);
let indented = dedent_then_reindent(&normalized, full_range.indent_column);
let before = &source[..full_range.start_byte];
let after = &source[full_range.end_byte..];
let mut new_bytes =
Vec::with_capacity(before.len() + indented.len() + after.len());
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(indented.as_bytes());
new_bytes.extend_from_slice(after);
Ok((source, current_hash, new_bytes))
}
}
"insert_before" => {
let new_code = new_code.unwrap_or_default();
let (insert_byte, indent_column, source, current_hash) = self
.resolve_insert_position(semantic_path, InsertEdge::Before)
.await?;
let normalized = normalize_for_full_replace(new_code);
let indented = dedent_then_reindent(&normalized, indent_column);
let before = &source[..insert_byte];
let after = &source[insert_byte..];
let sep = if before.ends_with(b"\n\n")
|| after.starts_with(b"\n\n")
|| (before.ends_with(b"\n") && after.starts_with(b"\n"))
{
""
} else if after.starts_with(b"\n") {
"\n"
} else {
"\n\n"
};
let trailing = if indented.ends_with('\n') { "" } else { "\n" };
let mut new_bytes = Vec::with_capacity(
before.len() + indented.len() + sep.len() + trailing.len() + after.len(),
);
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(indented.as_bytes());
new_bytes.extend_from_slice(trailing.as_bytes());
new_bytes.extend_from_slice(sep.as_bytes());
new_bytes.extend_from_slice(after);
if !is_whitespace_significant_file(std::path::Path::new(&semantic_path.file_path)) {
new_bytes = normalize_blank_lines(&new_bytes);
}
Ok((source, current_hash, new_bytes))
}
"insert_after" => {
let new_code = new_code.unwrap_or_default();
let (insert_byte, indent_column, source, current_hash) = self
.resolve_insert_position(semantic_path, InsertEdge::After)
.await?;
let normalized = normalize_for_full_replace(new_code);
let indented = dedent_then_reindent(&normalized, indent_column);
let before = &source[..insert_byte];
let after = &source[insert_byte..];
let before_sep = if before.ends_with(b"\n\n")
|| after.starts_with(b"\n\n")
|| (before.ends_with(b"\n") && after.starts_with(b"\n"))
{
""
} else if before.ends_with(b"\n") {
"\n"
} else {
"\n\n"
};
let after_sep = if indented.ends_with('\n') { "" } else { "\n" };
let mut new_bytes = Vec::with_capacity(
before.len()
+ before_sep.len()
+ indented.len()
+ after_sep.len()
+ after.len(),
);
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(before_sep.as_bytes());
new_bytes.extend_from_slice(indented.as_bytes());
new_bytes.extend_from_slice(after_sep.as_bytes());
new_bytes.extend_from_slice(after);
if !is_whitespace_significant_file(std::path::Path::new(&semantic_path.file_path)) {
new_bytes = normalize_blank_lines(&new_bytes);
}
Ok((source, current_hash, new_bytes))
}
"delete" => {
require_symbol_target(semantic_path, raw_semantic_path)?;
let (full_range, source, current_hash) = self
.surgeon
.resolve_full_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let before_end = strip_orphaned_doc_comment(&source, full_range.start_byte);
let mut b_end = before_end;
while b_end > 0 && source[b_end - 1].is_ascii_whitespace() {
b_end -= 1;
}
let mut a_start = full_range.end_byte;
while a_start < source.len() && source[a_start].is_ascii_whitespace() {
a_start += 1;
}
let before = &source[..b_end];
let after = &source[a_start..];
let sep = if before.is_empty() || after.is_empty() {
b"\n" as &[u8]
} else {
b"\n\n"
};
let mut new_bytes = Vec::with_capacity(before.len() + sep.len() + after.len());
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(sep);
new_bytes.extend_from_slice(after);
Ok((source, current_hash, new_bytes))
}
unknown => {
let err = pathfinder_common::error::PathfinderError::InvalidTarget {
semantic_path: raw_semantic_path.to_owned(),
reason: format!(
"unsupported edit type: '{unknown}'. Must be one of: replace_body, replace_full, insert_before, insert_after, delete."
),
edit_index: None,
valid_edit_types: None,
};
Err(crate::server::helpers::pathfinder_to_error_data(&err))
}
}
}
}