mod backup;
mod batch_loader;
pub mod pattern;
use crate::error::{Diagnostic, DiagnosticLevel, Result, SpliceError};
use crate::symbol::Language as SymbolLanguage;
use crate::validate::{self, AnalyzerMode};
use crate::verify;
use ropey::Rope;
use serde::Serialize;
use sha2::{Digest, Sha256};
use std::collections::BTreeMap;
use std::ffi::OsStr;
use std::fs::{self, File};
use std::io::Write;
use std::path::{Path, PathBuf};
use tempfile::TempDir;
pub use backup::{restore_from_manifest, BackupManifest, BackupWriter};
pub use batch_loader::load_batches_from_file;
pub use pattern::{
apply_pattern_replace, find_pattern_in_files, PatternReplaceConfig, PatternReplaceResult,
};
#[derive(Debug, Clone, Serialize)]
pub struct SpanReplacement {
pub file: PathBuf,
pub start: usize,
pub end: usize,
pub content: String,
}
impl SpanReplacement {
pub fn new(file: PathBuf, start: usize, end: usize, content: String) -> Self {
Self {
file,
start,
end,
content,
}
}
}
#[derive(Debug, Clone)]
pub struct SpanBatch {
replacements: Vec<SpanReplacement>,
}
impl SpanBatch {
pub fn new(replacements: Vec<SpanReplacement>) -> Self {
Self { replacements }
}
pub fn replacements(&self) -> &[SpanReplacement] {
&self.replacements
}
pub fn push(&mut self, replacement: SpanReplacement) {
self.replacements.push(replacement);
}
pub fn is_empty(&self) -> bool {
self.replacements.is_empty()
}
}
#[derive(Debug, Clone, Serialize)]
pub struct FilePatchSummary {
pub file: PathBuf,
pub before_hash: String,
pub after_hash: String,
}
#[derive(Debug, Clone, Serialize)]
pub struct PreviewReport {
pub file: String,
pub line_start: usize,
pub line_end: usize,
pub lines_added: usize,
pub lines_removed: usize,
pub bytes_added: usize,
pub bytes_removed: usize,
}
pub fn apply_patch_with_validation(
file_path: &Path,
start: usize,
end: usize,
new_content: &str,
workspace_dir: &Path,
language: SymbolLanguage,
analyzer_mode: AnalyzerMode,
strict: bool,
skip: bool,
) -> Result<(String, String)> {
let db_path = workspace_dir.join(".magellan/magellan.db"); let pre_checks =
verify::pre_verify_patch(file_path, None, workspace_dir, &db_path, strict, skip)?;
for check in &pre_checks {
if check.is_blocking() {
return Err(SpliceError::PreVerificationFailed {
check: format!("{:?}", check),
});
}
}
for check in &pre_checks {
if check.is_warning() {
log::warn!("Pre-verification warning: {:?}", check);
}
}
if let Some(function_name) = extract_function_name_from_patch(new_content) {
if let Ok(complexity) =
crate::cfg_analysis::check_function_complexity(&db_path, &function_name, file_path)
{
match complexity.risk_level {
crate::cfg_analysis::RiskLevel::VeryHigh => {
log::warn!(
"VERY HIGH COMPLEXITY: Function '{}' has branch distance={}, dominator depth={}, loop nesting={}. \
Consider manual review before automated refactoring.",
function_name,
complexity.max_branch_distance,
complexity.max_dominator_depth,
complexity.max_loop_nesting
);
}
crate::cfg_analysis::RiskLevel::High => {
log::warn!(
"HIGH COMPLEXITY: Function '{}' has branch distance={}, dominator depth={}. \
Automated refactoring may be risky.",
function_name,
complexity.max_branch_distance,
complexity.max_dominator_depth
);
}
crate::cfg_analysis::RiskLevel::Medium => {
log::info!(
"Medium complexity: Function '{}' (branch distance={}, dominator depth={})",
function_name,
complexity.max_branch_distance,
complexity.max_dominator_depth
);
}
crate::cfg_analysis::RiskLevel::Low => {
log::debug!(
"Low complexity: Function '{}' (branch distance={})",
function_name,
complexity.max_branch_distance
);
}
}
}
}
let replaced = std::fs::read(file_path)?;
let before_hash = compute_hash(&replaced);
if start > end || end > replaced.len() {
return Err(SpliceError::InvalidSpan {
file: file_path.to_path_buf(),
start,
end,
file_size: replaced.len(),
});
}
std::str::from_utf8(&replaced[start..end]).map_err(|_| SpliceError::InvalidSpan {
file: file_path.to_path_buf(),
start,
end,
file_size: replaced.len(),
})?;
let mut rope = Rope::from_str(std::str::from_utf8(&replaced)?);
let start_char = rope.byte_to_char(start);
let end_char = rope.byte_to_char(end);
rope.remove(start_char..end_char);
rope.insert(start_char, new_content);
let patched_content = rope.to_string();
let patched_bytes = patched_content.into_bytes();
write_atomic(file_path, &patched_bytes, "patch")?;
match run_validation_gates(file_path, workspace_dir, language, analyzer_mode) {
Ok(_) => {}
Err(e) => {
log::warn!("Validation failed, rolling back patch: {:?}", e);
if let Err(rollback_err) = write_atomic(file_path, &replaced, "rollback") {
log::error!(
"Failed to restore {} during rollback: {}",
file_path.display(),
rollback_err
);
}
return Err(e);
}
}
let refreshed_bytes = std::fs::read(file_path)?;
let after_hash = compute_hash(&refreshed_bytes);
let mut post_verify = verify::verify_after_patch(file_path, workspace_dir, &before_hash)?;
let localized = verify::verify_localized_change(file_path, &replaced, (start, end));
match &localized {
Ok(true) => {
log::info!("Localized change verification passed");
}
Ok(false) => {
log::warn!("Localized change verification detected modifications outside target span");
post_verify.add_warning("File modified outside target span");
}
Err(e) => {
log::warn!("Localized change verification failed: {}", e);
post_verify.add_warning(format!("Could not verify localized change: {}", e));
}
}
for warning in &post_verify.warnings {
log::warn!("Post-verification warning: {}", warning);
}
for error in &post_verify.errors {
log::error!("Post-verification error: {}", error);
}
log::info!(
"Post-verification: syntax={}, compiler={}, semantic={}, changed={}",
post_verify.syntax_ok,
post_verify.compiler_ok,
post_verify.semantic_ok,
post_verify.file_changed(),
);
Ok((before_hash, after_hash))
}
pub fn apply_batch_with_validation(
batches: &[SpanBatch],
workspace_dir: &Path,
language: SymbolLanguage,
analyzer_mode: AnalyzerMode,
) -> Result<Vec<FilePatchSummary>> {
if batches.is_empty() {
return Ok(Vec::new());
}
let mut grouped: BTreeMap<PathBuf, Vec<SpanReplacement>> = BTreeMap::new();
for batch in batches {
for replacement in batch.replacements() {
grouped
.entry(replacement.file.clone())
.or_default()
.push(replacement.clone());
}
}
let mut applied = Vec::new();
for (file_path, mut replacements) in grouped {
if replacements.is_empty() {
continue;
}
let pre_check = verify::verify_file_ready(&file_path, None, workspace_dir);
if pre_check.is_blocking() {
log::warn!(
"Skipping {:?}: pre-verification failed: {:?}",
file_path,
pre_check
);
continue;
}
replacements.sort_by_key(|r| std::cmp::Reverse(r.start));
let (replaced, before_hash) = read_with_hash(&file_path)?;
validate_replacements(&file_path, &replacements, &replaced)?;
let patched_bytes = apply_replacements(&replaced, &replacements)?;
let after_hash = compute_hash(&patched_bytes);
if let Err(write_err) = write_atomic(&file_path, &patched_bytes, "batch") {
rollback_files(&applied);
return Err(write_err);
}
applied.push(AppliedFile {
file: file_path,
replaced,
before_hash,
after_hash,
});
}
let validation = run_batch_validations(&applied, workspace_dir, language, analyzer_mode);
if let Err(err) = validation {
rollback_files(&applied);
return Err(err);
}
Ok(applied
.into_iter()
.map(|file| FilePatchSummary {
file: file.file,
before_hash: file.before_hash,
after_hash: file.after_hash,
})
.collect())
}
pub fn preview_patch(
file_path: &Path,
start: usize,
end: usize,
new_content: &str,
workspace_root: &Path,
language: SymbolLanguage,
analyzer_mode: AnalyzerMode,
) -> Result<(FilePatchSummary, PreviewReport)> {
let preview_workspace = clone_workspace_for_preview(workspace_root)?;
let relative = file_path
.strip_prefix(workspace_root)
.map_err(|_| SpliceError::Other("File not under workspace root".to_string()))?;
let preview_file = preview_workspace.path().join(relative);
let (before_hash, after_hash) = apply_patch_with_validation(
&preview_file,
start,
end,
new_content,
preview_workspace.path(),
language,
analyzer_mode,
false, true, )?;
let preview_report = compute_preview_report(&file_path, start, end, new_content)?;
Ok((
FilePatchSummary {
file: file_path.to_path_buf(),
before_hash,
after_hash,
},
preview_report,
))
}
pub fn preview_patch_with_content(
file_path: &Path,
start: usize,
end: usize,
new_content: &str,
workspace_root: &Path,
language: SymbolLanguage,
analyzer_mode: AnalyzerMode,
) -> Result<(FilePatchSummary, PreviewReport, String, String)> {
let file_path = std::fs::canonicalize(file_path).map_err(|e| SpliceError::Io {
path: file_path.to_path_buf(),
source: e,
})?;
let workspace_root = std::fs::canonicalize(workspace_root).map_err(|e| SpliceError::Io {
path: workspace_root.to_path_buf(),
source: e,
})?;
let preview_workspace = clone_workspace_for_preview(&workspace_root)?;
let relative = file_path.strip_prefix(&workspace_root).map_err(|_| {
SpliceError::Other(format!(
"File {} not under workspace root {}",
file_path.display(),
workspace_root.display()
))
})?;
let preview_file = preview_workspace.path().join(relative);
let before_content = std::fs::read_to_string(&preview_file)?;
let (before_hash, after_hash) = apply_patch_with_validation(
&preview_file,
start,
end,
new_content,
preview_workspace.path(),
language,
analyzer_mode,
false, true, )?;
let after_content = std::fs::read_to_string(&preview_file)?;
let preview_report = compute_preview_report(&file_path, start, end, new_content)?;
Ok((
FilePatchSummary {
file: file_path.to_path_buf(),
before_hash,
after_hash,
},
preview_report,
before_content,
after_content,
))
}
fn run_validation_gates(
file_path: &Path,
workspace_dir: &Path,
language: SymbolLanguage,
analyzer_mode: AnalyzerMode,
) -> Result<()> {
gate_tree_sitter_reparse(file_path, language)?;
gate_compiler_validation(file_path, workspace_dir, language)?;
if language == SymbolLanguage::Rust {
use crate::validate::gate_rust_analyzer;
gate_rust_analyzer(workspace_dir, analyzer_mode)?;
}
Ok(())
}
fn gate_tree_sitter_reparse(file_path: &Path, language: SymbolLanguage) -> Result<()> {
let source = std::fs::read(file_path)?;
let mut parser = tree_sitter::Parser::new();
let tree_sitter_lang = get_tree_sitter_language(language);
parser
.set_language(&tree_sitter_lang)
.map_err(|e| SpliceError::Parse {
file: file_path.to_path_buf(),
message: format!("Failed to set language: {:?}", e),
})?;
let tree = parser
.parse(&source, None)
.ok_or_else(|| SpliceError::ParseValidationFailed {
file: file_path.to_path_buf(),
message: "Parse failed - no tree returned".to_string(),
})?;
if tree.root_node().has_error() {
return Err(SpliceError::ParseValidationFailed {
file: file_path.to_path_buf(),
message: format!(
"Tree-sitter detected syntax errors in patched {} file",
language.as_str()
),
});
}
Ok(())
}
fn get_tree_sitter_language(language: SymbolLanguage) -> tree_sitter::Language {
match language {
SymbolLanguage::Rust => tree_sitter_rust::language(),
SymbolLanguage::Python => tree_sitter_python::language(),
SymbolLanguage::C => tree_sitter_c::language(),
SymbolLanguage::Cpp => tree_sitter_cpp::language(),
SymbolLanguage::Java => tree_sitter_java::language(),
SymbolLanguage::JavaScript => tree_sitter_javascript::language(),
SymbolLanguage::TypeScript => tree_sitter_typescript::language_typescript(),
}
}
fn gate_compiler_validation(
file_path: &Path,
workspace_dir: &Path,
language: SymbolLanguage,
) -> Result<()> {
match language {
SymbolLanguage::Rust => {
gate_cargo_check(workspace_dir)?;
}
_ => {
use crate::validate::gates::validate_file;
let outcome = validate_file(file_path)?;
let tool_metadata = tool_invocation_for_language(language)
.map(|inv| validate::collect_tool_metadata(inv.binary, inv.version_args));
if !outcome.is_valid {
if !outcome.tool_available {
log::warn!(
"Compiler validation tool not available for {}, skipping validation",
language.as_str()
);
return Ok(());
}
let mut diagnostics = Vec::new();
let tool_name = format!("{}-compiler", language.as_str());
for err in outcome.errors {
let remediation = err
.code
.as_deref()
.and_then(validate::remediation_link_for_code);
diagnostics.push(
Diagnostic::new(&tool_name, DiagnosticLevel::Error, err.message)
.with_file(file_for_diagnostic(&err.file, file_path))
.with_position(nonzero(err.line), nonzero(err.column))
.with_code(err.code.clone())
.with_note(err.note.clone())
.with_tool_metadata(tool_metadata.as_ref())
.with_remediation(remediation),
);
}
for warn in outcome.warnings {
let remediation = warn
.code
.as_deref()
.and_then(validate::remediation_link_for_code);
diagnostics.push(
Diagnostic::new(&tool_name, DiagnosticLevel::Warning, warn.message)
.with_file(file_for_diagnostic(&warn.file, file_path))
.with_position(nonzero(warn.line), nonzero(warn.column))
.with_code(warn.code.clone())
.with_note(warn.note.clone())
.with_tool_metadata(tool_metadata.as_ref())
.with_remediation(remediation),
);
}
return Err(SpliceError::CompilerValidationFailed {
file: file_path.to_path_buf(),
language: language.as_str().to_string(),
diagnostics,
});
}
}
}
Ok(())
}
fn file_for_diagnostic(reported: &str, fallback: &Path) -> PathBuf {
if reported.is_empty() {
fallback.to_path_buf()
} else {
PathBuf::from(reported)
}
}
fn nonzero(value: usize) -> Option<usize> {
if value == 0 {
None
} else {
Some(value)
}
}
struct ToolInvocation {
binary: &'static str,
version_args: &'static [&'static str],
}
fn tool_invocation_for_language(language: SymbolLanguage) -> Option<ToolInvocation> {
match language {
SymbolLanguage::Python => Some(ToolInvocation {
binary: "python",
version_args: &["--version"],
}),
SymbolLanguage::C => Some(ToolInvocation {
binary: "gcc",
version_args: &["--version"],
}),
SymbolLanguage::Cpp => Some(ToolInvocation {
binary: "g++",
version_args: &["--version"],
}),
SymbolLanguage::Java => Some(ToolInvocation {
binary: "javac",
version_args: &["-version"],
}),
SymbolLanguage::JavaScript => Some(ToolInvocation {
binary: "node",
version_args: &["--version"],
}),
SymbolLanguage::TypeScript => Some(ToolInvocation {
binary: "tsc",
version_args: &["--version"],
}),
_ => None,
}
}
fn gate_cargo_check(workspace_dir: &Path) -> Result<()> {
use std::process::Command;
use std::thread;
use std::time::Duration;
let workspace_path = workspace_dir.to_path_buf();
let (tx, rx) = std::sync::mpsc::channel();
thread::spawn(move || {
let output = Command::new("cargo")
.arg("check")
.current_dir(&workspace_path)
.output();
let _ = tx.send(output);
});
let output = match rx.recv_timeout(Duration::from_secs(120)) {
Ok(result) => result?,
Err(_) => {
return Err(SpliceError::Other(
"cargo check timed out after 120 seconds".to_string(),
));
}
};
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout);
let combined = format!("{}{}", stderr, stdout);
if output.status.success() {
return Ok(());
}
let compiler_errors = validate::parse_cargo_output(&stderr);
let mut diagnostics = Vec::new();
let cargo_meta = validate::collect_tool_metadata("cargo", &["--version"]);
if compiler_errors.is_empty() {
diagnostics.push(
Diagnostic::new("cargo-check", DiagnosticLevel::Error, combined.clone())
.with_file(workspace_dir.to_path_buf())
.with_tool_metadata(Some(&cargo_meta)),
);
} else {
for err in compiler_errors {
let remediation = err
.code
.as_deref()
.and_then(validate::remediation_link_for_code);
diagnostics.push(
Diagnostic::new("cargo-check", DiagnosticLevel::from(err.level), err.message)
.with_file(PathBuf::from(err.file))
.with_position(nonzero(err.line), nonzero(err.column))
.with_code(err.code.clone())
.with_note(err.note.clone())
.with_tool_metadata(Some(&cargo_meta))
.with_remediation(remediation),
);
}
}
Err(SpliceError::CargoCheckFailed {
workspace: workspace_dir.to_path_buf(),
output: combined,
diagnostics,
})
}
fn compute_hash(bytes: &[u8]) -> String {
let mut hasher = Sha256::new();
hasher.update(bytes);
let result = hasher.finalize();
format!("{:x}", result)
}
pub fn replace_span(file_path: &Path, start: usize, end: usize, new_content: &str) -> Result<()> {
let replaced = std::fs::read_to_string(file_path)?;
let file_size = replaced.len();
if start > end || end > file_size {
return Err(SpliceError::InvalidSpan {
file: file_path.to_path_buf(),
start,
end,
file_size,
});
}
if end > file_size || start > end {
return Err(SpliceError::InvalidSpan {
file: file_path.to_path_buf(),
start,
end,
file_size,
});
}
let mut rope = Rope::from_str(&replaced);
let start_char = rope.byte_to_char(start);
let end_char = rope.byte_to_char(end);
rope.remove(start_char..end_char);
rope.insert(start_char, new_content);
std::fs::write(file_path, rope.to_string())?;
Ok(())
}
fn run_batch_validations(
files: &[AppliedFile],
workspace_dir: &Path,
language: SymbolLanguage,
analyzer_mode: AnalyzerMode,
) -> Result<()> {
if files.is_empty() {
return Ok(());
}
let mut requires_rust_validation = false;
for file in files {
gate_tree_sitter_reparse(&file.file, language)?;
if language == SymbolLanguage::Rust {
requires_rust_validation = true;
} else {
gate_compiler_validation(&file.file, workspace_dir, language)?;
}
}
if requires_rust_validation {
gate_cargo_check(workspace_dir)?;
if language == SymbolLanguage::Rust {
if analyzer_mode != AnalyzerMode::Off {
use crate::validate::gate_rust_analyzer;
gate_rust_analyzer(workspace_dir, analyzer_mode)?;
}
}
}
Ok(())
}
fn validate_replacements(
file_path: &Path,
replacements: &[SpanReplacement],
replaced: &[u8],
) -> Result<()> {
if replacements.is_empty() {
return Ok(());
}
let file_len = replaced.len();
let mut sorted = replacements.to_vec();
sorted.sort_by_key(|r| r.start);
let mut previous_end: Option<usize> = None;
for replacement in &sorted {
if replacement.start > replacement.end || replacement.end > file_len {
return Err(SpliceError::InvalidSpan {
file: file_path.to_path_buf(),
start: replacement.start,
end: replacement.end,
file_size: file_len,
});
}
std::str::from_utf8(&replaced[replacement.start..replacement.end]).map_err(|_| {
SpliceError::InvalidSpan {
file: file_path.to_path_buf(),
start: replacement.start,
end: replacement.end,
file_size: file_len,
}
})?;
if let Some(prev_end) = previous_end {
if replacement.start < prev_end {
return Err(SpliceError::Other(format!(
"Overlapping replacements detected in {}",
file_path.display()
)));
}
}
previous_end = Some(replacement.end);
}
Ok(())
}
fn apply_replacements(replaced: &[u8], replacements: &[SpanReplacement]) -> Result<Vec<u8>> {
let content = std::str::from_utf8(replaced)?;
let mut rope = Rope::from_str(content);
for replacement in replacements {
let start_char = rope.byte_to_char(replacement.start);
let end_char = rope.byte_to_char(replacement.end);
rope.remove(start_char..end_char);
rope.insert(start_char, &replacement.content);
}
Ok(rope.to_string().into_bytes())
}
fn read_with_hash(path: &Path) -> Result<(Vec<u8>, String)> {
let data = std::fs::read(path)?;
let hash = compute_hash(&data);
Ok((data, hash))
}
fn rollback_files(files: &[AppliedFile]) {
for file in files.iter().rev() {
if let Err(err) = write_atomic(&file.file, &file.replaced, "rollback") {
log::error!(
"Rollback failed for {}: {}",
file.file.display(),
err.to_string()
);
}
}
}
fn write_atomic(file_path: &Path, content: &[u8], suffix: &str) -> Result<()> {
let temp_path = temp_path_for(file_path, suffix)?;
let mut temp_file = File::create(&temp_path)?;
temp_file.write_all(content)?;
temp_file.sync_all()?;
std::fs::rename(&temp_path, file_path)?;
Ok(())
}
fn temp_path_for(file_path: &Path, suffix: &str) -> Result<PathBuf> {
let file_dir = file_path
.parent()
.ok_or_else(|| SpliceError::Other("File has no parent directory".to_string()))?;
let file_name = file_path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("tmp");
Ok(file_dir.join(format!(".{}.{}.tmp", file_name, suffix)))
}
struct AppliedFile {
file: PathBuf,
replaced: Vec<u8>,
before_hash: String,
after_hash: String,
}
fn clone_workspace_for_preview(workspace_root: &Path) -> Result<TempDir> {
let preview_dir = TempDir::new()?;
let preview_path = preview_dir.path();
copy_dir_recursive(workspace_root, preview_path)?;
if let Ok(local_deps) = extract_local_path_dependencies(workspace_root) {
let preview_parent = preview_path.parent().unwrap_or(preview_path);
for dep_path in local_deps {
let (source_path, target_name) = if let Some(dep_parent) = dep_path.parent() {
if let Some(grandparent) = dep_parent.parent() {
if let Some(workspace_parent) = workspace_root.parent() {
if grandparent == workspace_parent {
let parent_name = dep_parent
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| {
SpliceError::Other(format!(
"Invalid dependency parent path: {:?}",
dep_parent
))
})?;
(dep_parent.to_path_buf(), parent_name.to_string())
} else {
let dep_name = dep_path
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| {
SpliceError::Other(format!(
"Invalid dependency path: {:?}",
dep_path
))
})?;
(dep_path.clone(), dep_name.to_string())
}
} else {
let dep_name =
dep_path
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| {
SpliceError::Other(format!(
"Invalid dependency path: {:?}",
dep_path
))
})?;
(dep_path.clone(), dep_name.to_string())
}
} else {
let dep_name =
dep_path
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| {
SpliceError::Other(format!(
"Invalid dependency path: {:?}",
dep_path
))
})?;
(dep_path.clone(), dep_name.to_string())
}
} else {
let dep_name = dep_path
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| {
SpliceError::Other(format!("Invalid dependency path: {:?}", dep_path))
})?;
(dep_path.clone(), dep_name.to_string())
};
let dep_dest = preview_parent.join(&target_name);
if dep_dest.exists() || source_path == workspace_root {
continue;
}
if let Err(e) = copy_dir_recursive(&source_path, &dep_dest) {
log::warn!(
"Failed to copy local dependency {:?} to {:?}: {}",
source_path,
dep_dest,
e
);
}
}
}
Ok(preview_dir)
}
fn extract_local_path_dependencies(workspace_root: &Path) -> Result<Vec<PathBuf>> {
let cargo_toml_path = workspace_root.join("Cargo.toml");
let cargo_content = fs::read_to_string(&cargo_toml_path)?;
let mut local_deps = Vec::new();
let mut seen_deps = std::collections::HashSet::new();
for line in cargo_content.lines() {
let line = line.trim();
if line.contains("{") && line.contains("path") {
if let Some(start) = line.find("path = \"") {
let start_idx = start + 8; if let Some(end) = line[start_idx..].find('"') {
let rel_path = &line[start_idx..start_idx + end];
if rel_path.starts_with("..") {
let dep_path = workspace_root.join(rel_path);
if dep_path.exists() {
if let Ok(canonical) = dep_path.canonicalize() {
if !seen_deps.contains(&canonical) {
seen_deps.insert(canonical.clone());
local_deps.push(canonical);
}
}
}
}
}
}
}
}
if let Some(parent) = workspace_root.parent() {
let workspace_cargo = parent.join("Cargo.toml");
if workspace_cargo.exists() {
if let Ok(ws_content) = fs::read_to_string(&workspace_cargo) {
if let Some(start) = ws_content.find("members = [") {
let members_start = start + 11;
if let Some(end) = ws_content[members_start..].find(']') {
let members_str = &ws_content[members_start..members_start + end];
for member in members_str.split(',') {
let member = member.trim().trim_matches('"').trim_matches('\'');
let member_path = parent.join(member);
if member_path.exists() && member_path != workspace_root {
if let Ok(canonical) = member_path.canonicalize() {
if !seen_deps.contains(&canonical) {
seen_deps.insert(canonical.clone());
local_deps.push(canonical);
}
}
}
}
}
}
}
}
}
Ok(local_deps)
}
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> {
fs::create_dir_all(dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
if should_skip_entry(&entry.file_name()) {
continue;
}
let dest = dst.join(entry.file_name());
let file_type = entry.file_type()?;
if file_type.is_dir() {
copy_dir_recursive(&entry.path(), &dest)?;
} else if file_type.is_file() {
if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?;
}
fs::copy(entry.path(), &dest)?;
}
}
Ok(())
}
fn should_skip_entry(name: &OsStr) -> bool {
matches!(
name.to_string_lossy().as_ref(),
".git"
| ".splice-backup"
| "target"
| "node_modules"
| ".splice_graph.db"
| ".splice_graph.db-shm"
| ".splice_graph.db-wal"
| "codegraph.db"
| "magellan.db"
| "operations.db"
| "splice_map.db"
| "syncore_code_graph.db"
| "syncore_code_graph.db-shm"
| "syncore_code_graph.db-wal"
)
}
pub fn compute_preview_report(
file_path: &Path,
start: usize,
end: usize,
new_content: &str,
) -> Result<PreviewReport> {
let replaced = fs::read(file_path)?;
let source = std::str::from_utf8(&replaced)?;
let rope = Rope::from_str(source);
let start_line = rope.byte_to_line(start);
let end_line = if end == start {
start_line
} else if end == replaced.len() {
rope.len_lines().saturating_sub(1)
} else {
rope.byte_to_line(end)
};
let lines_removed = if end > start {
(&source[start..end]).lines().count()
} else {
0
};
let lines_added = if new_content.is_empty() {
0
} else {
new_content.lines().count()
};
let bytes_removed = end.saturating_sub(start);
let bytes_added = new_content.as_bytes().len();
Ok(PreviewReport {
file: file_path.to_string_lossy().into_owned(),
line_start: start_line + 1,
line_end: if lines_removed == 0 {
start_line + 1
} else {
end_line + 1
},
lines_added,
lines_removed,
bytes_added,
bytes_removed,
})
}
pub fn validate_utf8_span(source: &str, start: usize, end: usize) -> Result<()> {
let file_size = source.len();
if end > file_size || start > end {
return Err(SpliceError::InvalidSpan {
file: std::path::PathBuf::from("<unknown>"),
start,
end,
file_size,
});
}
let _ = &source[start..end];
Ok(())
}
fn extract_function_name_from_patch(patch_content: &str) -> Option<String> {
use regex::Regex;
let fn_regex =
Regex::new(r"(?m)^(?:pub\s+)?(?:async\s+)?(?:unsafe\s+)?fn\s+(\w+)\s*\(").ok()?;
fn_regex
.captures(patch_content)
.map(|caps| caps[1].to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::NamedTempFile;
#[test]
fn test_compute_preview_report_line_counts() {
let mut temp_file = NamedTempFile::new().unwrap();
writeln!(temp_file, "line 1").unwrap();
writeln!(temp_file, "line 2").unwrap();
writeln!(temp_file, "line 3").unwrap();
writeln!(temp_file, "line 4").unwrap();
temp_file.flush().unwrap();
let source = std::fs::read_to_string(temp_file.path()).unwrap();
let start = source.find("line 2\n").unwrap();
let end = source.find("line 4").unwrap();
let new_content = "NEW LINE\n";
let report = compute_preview_report(temp_file.path(), start, end, new_content).unwrap();
assert_eq!(report.lines_removed, 2, "Should count 2 lines removed");
assert_eq!(report.lines_added, 1, "Should count 1 line added");
}
#[test]
fn test_compute_preview_report_empty_replacement() {
let mut temp_file = NamedTempFile::new().unwrap();
writeln!(temp_file, "line 1").unwrap();
writeln!(temp_file, "line 2").unwrap();
temp_file.flush().unwrap();
let source = std::fs::read_to_string(temp_file.path()).unwrap();
let start = source.find("line 1").unwrap();
let end = source.len();
let report = compute_preview_report(temp_file.path(), start, end, "").unwrap();
assert_eq!(report.lines_removed, 2, "Should count 2 lines removed");
assert_eq!(report.lines_added, 0, "Empty content = 0 lines added");
}
#[test]
fn test_compute_preview_report_add_only() {
let mut temp_file = NamedTempFile::new().unwrap();
writeln!(temp_file, "line 1").unwrap();
temp_file.flush().unwrap();
let source = std::fs::read_to_string(temp_file.path()).unwrap();
let start = source.len(); let end = start;
let report =
compute_preview_report(temp_file.path(), start, end, "NEW LINE 1\nNEW LINE 2\n")
.unwrap();
assert_eq!(report.lines_removed, 0, "No lines removed when start==end");
assert_eq!(report.lines_added, 2, "Should count 2 new lines");
}
#[test]
fn test_apply_patch_accepts_strict_and_skip_flags() {
use std::io::Write;
use tempfile::TempDir;
let workspace = TempDir::new().unwrap();
let file_path = workspace.path().join("lib.rs");
{
let mut file = std::fs::File::create(&file_path).unwrap();
writeln!(file, "pub fn old() {{ }}").unwrap();
}
{
let cargo_toml = workspace.path().join("Cargo.toml");
let mut file = std::fs::File::create(&cargo_toml).unwrap();
writeln!(
file,
r#"[package]
name = "test"
version = "0.1.0"
edition = "2021"
[lib]
path = "lib.rs"
"#
)
.unwrap();
}
let content = std::fs::read_to_string(&file_path).unwrap();
let start = content.find("old()").unwrap();
let end = start + "old()".len();
let _ = (start, end);
}
}