use super::text_edit::{
is_whitespace_significant_file, normalize_blank_lines, strip_orphaned_doc_comment,
};
use super::{FinalizeEditParams, InsertEdge};
use crate::server::helpers::{
check_occ, check_sandbox_access, io_error_data, parse_semantic_path, require_symbol_target,
};
use crate::server::types::{
DeleteSymbolParams, EditResponse, InsertAfterParams, InsertBeforeParams, ReplaceBodyParams,
ReplaceFullParams, ValidateOnlyParams,
};
use pathfinder_common::indent::dedent_then_reindent;
use pathfinder_common::normalize::normalize_for_full_replace;
use pathfinder_common::types::VersionHash;
use rmcp::handler::server::wrapper::Json;
use rmcp::model::ErrorData;
use tracing::instrument;
impl crate::server::PathfinderServer {
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn replace_body_impl(
&self,
params: ReplaceBodyParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "replace_body",
semantic_path = %params.semantic_path,
"replace_body: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"replace_body",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"replace_body",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "replace_body",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
warning: None,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn replace_full_impl(
&self,
params: ReplaceFullParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "replace_full",
semantic_path = %params.semantic_path,
"replace_full: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"replace_full",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"replace_full",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "replace_full",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
warning: None,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn insert_before_impl(
&self,
params: InsertBeforeParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "insert_before",
semantic_path = %params.semantic_path,
"insert_before: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"insert_before",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"insert_before",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "insert_before",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
warning: None,
start_time: start,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn insert_after_impl(
&self,
params: InsertAfterParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "insert_after",
semantic_path = %params.semantic_path,
"insert_after: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"insert_after",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
"insert_after",
Some(¶ms.new_code),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "insert_after",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
new_content: new_bytes,
warning: None,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn insert_into_impl(
&self,
params: crate::server::types::InsertIntoParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "insert_into",
semantic_path = %params.semantic_path,
"insert_into: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
require_symbol_target(&semantic_path, ¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"insert_into",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes, container_kind) = self
.resolve_insert_into(&semantic_path, Some(¶ms.new_code))
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let is_rust_struct_target = semantic_path
.file_path
.extension()
.is_some_and(|ext| ext == "rs")
&& container_kind == pathfinder_treesitter::surgeon::SymbolKind::Struct;
let warning = if is_rust_struct_target {
tracing::warn!(
tool = "insert_into",
semantic_path = %params.semantic_path,
"insert_into targeting a Rust struct — methods should go in an impl block"
);
Some(
"Target appears to be a Rust struct. Methods should be inserted into \
an impl block (e.g., 'file.rs::impl MyStruct'), not the struct body. \
Structs contain fields; methods belong in impl blocks."
.to_owned(),
)
} else {
None
};
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "insert_into",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
original_hash: ¤t_hash,
warning,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
pub(crate) async fn resolve_insert_position(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
edge: InsertEdge,
) -> Result<(usize, usize, std::sync::Arc<[u8]>, VersionHash), ErrorData> {
if semantic_path.is_bare_file() {
let absolute_path = self.workspace_root.resolve(&semantic_path.file_path);
let bytes = tokio::fs::read(&absolute_path)
.await
.map_err(|e| io_error_data(format!("failed to read file: {e}")))?;
let hash = VersionHash::compute(&bytes);
let offset = match edge {
InsertEdge::Before => 0,
InsertEdge::After => bytes.len(),
};
return Ok((offset, 0, std::sync::Arc::from(bytes), hash));
}
let (symbol_range, resolved) = self
.surgeon
.resolve_symbol_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let insert_byte = match edge {
InsertEdge::Before => symbol_range.start_byte,
InsertEdge::After => symbol_range.end_byte,
};
Ok((
insert_byte,
symbol_range.indent_column,
resolved.source,
resolved.version_hash,
))
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path))]
pub(crate) async fn delete_symbol_impl(
&self,
params: DeleteSymbolParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "delete_symbol",
semantic_path = %params.semantic_path,
"delete_symbol: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"delete_symbol",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(&semantic_path, ¶ms.semantic_path, "delete", None)
.await?;
if !params.ignore_validation_failures {
if let Some(symbol_chain) = &semantic_path.symbol_chain {
if let Some(symbol) = symbol_chain.segments.last() {
let symbol_name = &symbol.name;
let search_result = self
.scout
.search(&pathfinder_search::SearchParams {
workspace_root: self.workspace_root.path().to_path_buf(),
query: symbol_name.clone(),
is_regex: false,
path_glob: "**/*".to_owned(),
exclude_glob: String::default(),
max_results: 50,
offset: 0,
context_lines: 0,
})
.await;
if let Ok(result) = search_result {
let target_file = semantic_path.file_path.to_string_lossy().to_string();
let reference_count = result
.matches
.iter()
.filter(|m| m.file != target_file)
.count();
if reference_count > 0 {
let err = pathfinder_common::error::PathfinderError::InvalidTarget {
semantic_path: params.semantic_path.clone(),
reason: format!(
"Symbol '{symbol_name}' is still referenced in \
{reference_count} other file(s). Delete or update \
those references first, or pass \
'ignore_validation_failures: true' to force deletion."
),
edit_index: None,
valid_edit_types: None,
};
return Err(crate::server::helpers::pathfinder_to_error_data(&err));
}
}
}
}
}
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let resolve_ms = start.elapsed().as_millis();
self.finalize_edit(FinalizeEditParams {
tool_name: "delete_symbol",
semantic_path: &semantic_path,
raw_semantic_path_str: ¶ms.semantic_path,
source: &source,
warning: None,
original_hash: ¤t_hash,
new_content: new_bytes,
ignore_validation_failures: params.ignore_validation_failures,
start_time: start,
resolve_ms,
})
.await
}
#[instrument(skip(self, params), fields(semantic_path = %params.semantic_path, edit_type = %params.edit_type))]
pub(crate) async fn validate_only_impl(
&self,
params: ValidateOnlyParams,
) -> Result<Json<EditResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "validate_only",
semantic_path = %params.semantic_path,
edit_type = %params.edit_type,
"validate_only: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
check_sandbox_access(
&self.sandbox,
&semantic_path.file_path,
"validate_only",
¶ms.semantic_path,
)?;
let (source, current_hash, new_bytes) = self
.resolve_edit_content(
&semantic_path,
¶ms.semantic_path,
¶ms.edit_type,
params.new_code.as_deref(),
)
.await?;
check_occ(
¶ms.base_version,
¤t_hash,
semantic_path.file_path.clone(),
)?;
let original_str = std::str::from_utf8(&source).unwrap_or("");
let new_str = std::str::from_utf8(&new_bytes).unwrap_or("");
let validation_outcome = self
.run_lsp_validation(&semantic_path.file_path, original_str, new_str, false)
.await;
let validation_outcome = if validation_outcome.skipped {
Self::apply_treesitter_fallback(&semantic_path.file_path, new_str, validation_outcome)
} else {
validation_outcome
};
let duration_ms = start.elapsed().as_millis();
let engines_used = if validation_outcome.skipped {
vec!["tree-sitter"]
} else {
vec!["tree-sitter", "lsp"]
};
tracing::info!(
tool = "validate_only",
semantic_path = %params.semantic_path,
duration_ms,
engines_used = ?engines_used,
"validate_only: complete"
);
Ok(Json(EditResponse {
success: true,
new_version_hash: None, formatted: false,
validation: validation_outcome.validation,
validation_skipped: validation_outcome.skipped,
validation_skipped_reason: validation_outcome.skipped_reason,
warning: None,
}))
}
pub(crate) async fn resolve_edit_content(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
raw_semantic_path: &str,
edit_type: &str,
new_code: Option<&str>,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
match edit_type {
"replace_body" => {
self.resolve_replace_body(semantic_path, raw_semantic_path, new_code)
.await
}
"replace_full" => self.resolve_replace_full(semantic_path, new_code).await,
"insert_before" => {
self.resolve_insert(semantic_path, new_code, InsertEdge::Before)
.await
}
"insert_after" => {
self.resolve_insert(semantic_path, new_code, InsertEdge::After)
.await
}
"insert_into" => {
let (source, hash, bytes, _container_kind) =
self.resolve_insert_into(semantic_path, new_code).await?;
Ok((source, hash, bytes))
}
"delete" => self.resolve_delete(semantic_path, raw_semantic_path).await,
unknown => Err(Self::unsupported_edit_type_error(
raw_semantic_path,
unknown,
)),
}
}
async fn resolve_replace_body(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
raw_semantic_path: &str,
new_code: Option<&str>,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
require_symbol_target(semantic_path, raw_semantic_path)?;
let new_code = new_code.unwrap_or_default();
let (body_range, resolved) = self
.surgeon
.resolve_body_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let normalized = pathfinder_common::normalize::normalize_for_body_replace(new_code);
let indented = pathfinder_common::indent::dedent_then_reindent(
&normalized,
body_range.body_indent_column,
);
let new_content =
super::text_edit::build_body_replacement(&resolved.source, &body_range, &indented)?;
Ok((
resolved.source,
resolved.version_hash,
new_content.as_bytes().to_vec(),
))
}
async fn resolve_replace_full(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
new_code: Option<&str>,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
let new_code = new_code.unwrap_or_default();
if semantic_path.is_bare_file() {
self.resolve_replace_full_bare_file(semantic_path, new_code)
.await
} else {
self.resolve_replace_full_symbol(semantic_path, new_code)
.await
}
}
async fn resolve_replace_full_bare_file(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
new_code: &str,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
let absolute_path = self.workspace_root.resolve(&semantic_path.file_path);
let source = tokio::fs::read(&absolute_path)
.await
.map_err(|e| io_error_data(format!("failed to read file: {e}")))?;
let current_hash = VersionHash::compute(&source);
let new_bytes = new_code.as_bytes().to_vec();
if let Ok(new_str) = std::str::from_utf8(&new_bytes) {
if let Some(lang) =
pathfinder_treesitter::language::SupportedLanguage::detect(&semantic_path.file_path)
{
match pathfinder_treesitter::parser::AstParser::parse_source(
&semantic_path.file_path,
lang,
new_str.as_bytes(),
) {
Ok(tree) => {
if tree.root_node().has_error() {
tracing::warn!(
file = %semantic_path.file_path.display(),
"replace_full: bare file content has parse errors"
);
}
}
Err(e) => {
tracing::warn!(error = %e, "replace_full: tree-sitter error");
}
}
}
}
Ok((std::sync::Arc::from(source), current_hash, new_bytes))
}
async fn resolve_replace_full_symbol(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
new_code: &str,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
let (full_range, resolved) = self
.surgeon
.resolve_full_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let normalized = normalize_for_full_replace(new_code);
let indented = dedent_then_reindent(&normalized, full_range.indent_column);
let before = &resolved.source[..full_range.start_byte];
let after = &resolved.source[full_range.end_byte..];
let mut new_bytes = Vec::with_capacity(before.len() + indented.len() + after.len());
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(indented.as_bytes());
new_bytes.extend_from_slice(after);
Ok((resolved.source, resolved.version_hash, new_bytes))
}
async fn resolve_insert(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
new_code: Option<&str>,
edge: InsertEdge,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
match edge {
InsertEdge::Before => self.resolve_insert_before(semantic_path, new_code).await,
InsertEdge::After => self.resolve_insert_after(semantic_path, new_code).await,
}
}
async fn resolve_insert_before(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
new_code: Option<&str>,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
let new_code = new_code.unwrap_or_default();
let (insert_byte, indent_column, source, current_hash) = self
.resolve_insert_position(semantic_path, InsertEdge::Before)
.await?;
let normalized = normalize_for_full_replace(new_code);
let indented = dedent_then_reindent(&normalized, indent_column);
let before = &source[..insert_byte];
let after = &source[insert_byte..];
let sep = if before.ends_with(b"\n\n")
|| after.starts_with(b"\n\n")
|| (before.ends_with(b"\n") && after.starts_with(b"\n"))
{
""
} else if after.starts_with(b"\n") {
"\n"
} else {
"\n\n"
};
let trailing = if indented.ends_with('\n') { "" } else { "\n" };
let mut new_bytes = Vec::with_capacity(
before.len() + indented.len() + sep.len() + trailing.len() + after.len(),
);
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(indented.as_bytes());
new_bytes.extend_from_slice(trailing.as_bytes());
new_bytes.extend_from_slice(sep.as_bytes());
new_bytes.extend_from_slice(after);
if !is_whitespace_significant_file(std::path::Path::new(&semantic_path.file_path)) {
new_bytes = normalize_blank_lines(&new_bytes);
}
Ok((source, current_hash, new_bytes))
}
async fn resolve_insert_after(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
new_code: Option<&str>,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
let new_code = new_code.unwrap_or_default();
let (insert_byte, indent_column, source, current_hash) = self
.resolve_insert_position(semantic_path, InsertEdge::After)
.await?;
let normalized = normalize_for_full_replace(new_code);
let indented = dedent_then_reindent(&normalized, indent_column);
let before = &source[..insert_byte];
let after = &source[insert_byte..];
let inserted_starts_doc_comment = indented.bytes().next().is_some_and(|b| b == b'/')
&& (indented.starts_with("///")
|| indented.starts_with("//!")
|| indented.starts_with("/**")
|| indented.starts_with("/*!"));
let before_sep = if before.ends_with(b"\n\n")
|| after.starts_with(b"\n\n")
|| (before.ends_with(b"\n") && after.starts_with(b"\n"))
{
if inserted_starts_doc_comment && !before.ends_with(b"\n\n") {
"\n" } else {
""
}
} else if before.ends_with(b"\n") {
if inserted_starts_doc_comment {
"\n\n" } else {
"\n"
}
} else {
"\n\n"
};
let after_sep = if indented.ends_with('\n') { "" } else { "\n" };
let mut new_bytes = Vec::with_capacity(
before.len() + before_sep.len() + indented.len() + after_sep.len() + after.len(),
);
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(before_sep.as_bytes());
new_bytes.extend_from_slice(indented.as_bytes());
new_bytes.extend_from_slice(after_sep.as_bytes());
new_bytes.extend_from_slice(after);
if !is_whitespace_significant_file(std::path::Path::new(&semantic_path.file_path)) {
new_bytes = normalize_blank_lines(&new_bytes);
}
Ok((source, current_hash, new_bytes))
}
async fn resolve_insert_into(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
new_code: Option<&str>,
) -> Result<
(
std::sync::Arc<[u8]>,
VersionHash,
Vec<u8>,
pathfinder_treesitter::surgeon::SymbolKind,
),
ErrorData,
> {
let new_code = new_code.unwrap_or_default();
let (body_end, resolved) = self
.surgeon
.resolve_body_end_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let container_kind = body_end.container_kind;
let normalized = normalize_for_full_replace(new_code);
let indented = dedent_then_reindent(&normalized, body_end.body_indent_column);
let before = &resolved.source[..body_end.insert_byte];
let after = &resolved.source[body_end.insert_byte..];
let sep = if before.ends_with(b"\n\n") || before.ends_with(b"{\n") {
""
} else {
"\n"
};
let trailing = if indented.ends_with('\n') { "" } else { "\n" };
let mut new_bytes = Vec::with_capacity(
before.len() + sep.len() + indented.len() + trailing.len() + after.len(),
);
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(sep.as_bytes());
new_bytes.extend_from_slice(indented.as_bytes());
new_bytes.extend_from_slice(trailing.as_bytes());
new_bytes.extend_from_slice(after);
Ok((
resolved.source,
resolved.version_hash,
new_bytes,
container_kind,
))
}
async fn resolve_delete(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
raw_semantic_path: &str,
) -> Result<(std::sync::Arc<[u8]>, VersionHash, Vec<u8>), ErrorData> {
require_symbol_target(semantic_path, raw_semantic_path)?;
let (full_range, resolved) = self
.surgeon
.resolve_full_range(self.workspace_root.path(), semantic_path)
.await
.map_err(crate::server::helpers::treesitter_error_to_error_data)?;
let before_end = strip_orphaned_doc_comment(&resolved.source, full_range.start_byte);
let mut b_end = before_end;
while b_end > 0 && resolved.source[b_end - 1].is_ascii_whitespace() {
b_end -= 1;
}
let mut a_start = full_range.end_byte;
while a_start < resolved.source.len() && resolved.source[a_start].is_ascii_whitespace() {
a_start += 1;
}
let before = &resolved.source[..b_end];
let after = &resolved.source[a_start..];
let sep = if before.is_empty() || after.is_empty() {
b"\n" as &[u8]
} else {
b"\n\n"
};
let mut new_bytes = Vec::with_capacity(before.len() + sep.len() + after.len());
new_bytes.extend_from_slice(before);
new_bytes.extend_from_slice(sep);
new_bytes.extend_from_slice(after);
Ok((resolved.source, resolved.version_hash, new_bytes))
}
fn unsupported_edit_type_error(raw_semantic_path: &str, edit_type: &str) -> ErrorData {
let err = pathfinder_common::error::PathfinderError::InvalidTarget {
semantic_path: raw_semantic_path.to_owned(),
reason: format!(
"unsupported edit type: '{edit_type}'. Must be one of: replace_body, replace_full, insert_before, insert_after, insert_into, delete."
),
edit_index: None,
valid_edit_types: None,
};
crate::server::helpers::pathfinder_to_error_data(&err)
}
}