mod error;
pub use error::{EditError, EditResult, PolicyBlockError};
use anyhow::Context;
use buildfix_hash::sha256_hex;
use buildfix_types::apply::{
ApplyFile, ApplyPreconditions, ApplyRepoInfo, ApplyResult, ApplyStatus, ApplySummary,
BuildfixApply, PlanRef, PreconditionMismatch,
};
use buildfix_types::ops::{OpKind, SafetyClass};
use buildfix_types::plan::{BuildfixPlan, FilePrecondition, PlanOp};
use buildfix_types::receipt::ToolInfo;
use camino::{Utf8Path, Utf8PathBuf};
use diffy::PatchFormatter;
use fs_err as fs;
use std::collections::{BTreeMap, BTreeSet, HashMap};
use toml_edit::{DocumentMut, InlineTable, Item, value};
use uuid::Uuid;
#[derive(Debug, Clone, Default)]
pub struct ApplyOptions {
pub dry_run: bool,
pub allow_guarded: bool,
pub allow_unsafe: bool,
pub backup_enabled: bool,
pub backup_dir: Option<Utf8PathBuf>,
pub backup_suffix: String,
pub params: HashMap<String, String>,
}
#[derive(Debug, Clone, Default)]
pub struct AttachPreconditionsOptions {
pub include_git_head: bool,
}
pub fn get_head_sha(repo_root: &Utf8Path) -> anyhow::Result<String> {
let output = std::process::Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.current_dir(repo_root)
.output()
.context("failed to run git rev-parse HEAD")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("git rev-parse HEAD failed: {}", stderr.trim());
}
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
pub fn is_working_tree_dirty(repo_root: &Utf8Path) -> anyhow::Result<bool> {
let status_output = std::process::Command::new("git")
.args(["status", "--porcelain"])
.current_dir(repo_root)
.output()
.context("failed to run git status")?;
if !status_output.status.success() {
let stderr = String::from_utf8_lossy(&status_output.stderr);
anyhow::bail!("git status failed: {}", stderr.trim());
}
Ok(!status_output.stdout.is_empty())
}
pub fn attach_preconditions(
repo_root: &Utf8Path,
plan: &mut BuildfixPlan,
opts: &AttachPreconditionsOptions,
) -> anyhow::Result<()> {
let mut files = BTreeSet::new();
for op in &plan.ops {
files.insert(op.target.path.clone());
}
let mut pres = Vec::new();
for path in files {
let abs = abs_path(repo_root, Utf8Path::new(&path));
let bytes = fs::read(&abs).with_context(|| format!("read {}", abs))?;
let sha = sha256_hex(&bytes);
pres.push(FilePrecondition { path, sha256: sha });
}
plan.preconditions.files = pres;
if opts.include_git_head
&& let Ok(sha) = get_head_sha(repo_root)
{
plan.preconditions.head_sha = Some(sha);
}
if let Ok(dirty) = is_working_tree_dirty(repo_root) {
plan.preconditions.dirty = Some(dirty);
}
Ok(())
}
pub fn preview_patch(
repo_root: &Utf8Path,
plan: &BuildfixPlan,
opts: &ApplyOptions,
) -> anyhow::Result<String> {
let outcome = execute_plan(repo_root, plan, opts, false)?;
Ok(render_patch(&outcome.before, &outcome.after))
}
pub fn apply_plan(
repo_root: &Utf8Path,
plan: &BuildfixPlan,
tool: ToolInfo,
opts: &ApplyOptions,
) -> anyhow::Result<(BuildfixApply, String)> {
let mut outcome = execute_plan(repo_root, plan, opts, true)?;
let patch = render_patch(&outcome.before, &outcome.after);
if !opts.dry_run && outcome.preconditions.verified {
let changed_files = changed_files(&outcome.before, &outcome.after);
if !changed_files.is_empty() {
if opts.backup_enabled {
create_backups(
repo_root,
&changed_files,
&outcome.before,
opts,
&mut outcome.results,
)?;
}
write_changed_files(repo_root, &changed_files, &outcome.after)?;
}
}
let repo_info = ApplyRepoInfo {
root: repo_root.to_string(),
head_sha_before: None,
head_sha_after: None,
dirty_before: None,
dirty_after: None,
};
let plan_ref = PlanRef {
path: "artifacts/buildfix/plan.json".to_string(),
sha256: None,
};
let mut apply = BuildfixApply::new(tool, repo_info, plan_ref);
apply.preconditions = outcome.preconditions;
apply.results = outcome.results;
apply.summary = outcome.summary;
Ok((apply, patch))
}
struct ExecuteOutcome {
before: BTreeMap<Utf8PathBuf, String>,
after: BTreeMap<Utf8PathBuf, String>,
results: Vec<ApplyResult>,
summary: ApplySummary,
preconditions: ApplyPreconditions,
}
fn execute_plan(
repo_root: &Utf8Path,
plan: &BuildfixPlan,
opts: &ApplyOptions,
verify_preconditions: bool,
) -> anyhow::Result<ExecuteOutcome> {
let mut touched_files = BTreeSet::new();
let mut resolved_ops: Vec<ResolvedOp> = Vec::new();
for op in &plan.ops {
let resolved = resolve_op(op, opts);
if resolved.allowed {
touched_files.insert(Utf8PathBuf::from(&op.target.path));
}
resolved_ops.push(resolved);
}
let mut before: BTreeMap<Utf8PathBuf, String> = BTreeMap::new();
for p in &touched_files {
let abs = abs_path(repo_root, p);
let contents = fs::read_to_string(&abs).unwrap_or_default();
before.insert(p.clone(), contents);
}
let mut preconditions = ApplyPreconditions {
verified: true,
mismatches: vec![],
};
if verify_preconditions
&& !check_preconditions(repo_root, plan, &touched_files, &mut preconditions)?
{
let mut results = Vec::new();
let mut summary = ApplySummary::default();
for resolved in &resolved_ops {
if !resolved.allowed {
continue;
}
summary.blocked += 1;
results.push(ApplyResult {
op_id: resolved.op.id.clone(),
status: ApplyStatus::Blocked,
message: Some("precondition mismatch".to_string()),
blocked_reason: Some("precondition mismatch".to_string()),
blocked_reason_token: Some(
buildfix_types::plan::blocked_tokens::PRECONDITION_MISMATCH.to_string(),
),
files: vec![],
});
}
return Ok(ExecuteOutcome {
before: before.clone(),
after: before,
results,
summary,
preconditions,
});
}
let mut current = before.clone();
let mut results: Vec<ApplyResult> = Vec::new();
let mut summary = ApplySummary::default();
for resolved in &resolved_ops {
let op = resolved.op;
if !resolved.allowed {
let mut res = ApplyResult {
op_id: op.id.clone(),
status: ApplyStatus::Blocked,
message: None,
blocked_reason: resolved.blocked_reason.clone(),
blocked_reason_token: resolved.blocked_reason_token.clone(),
files: vec![],
};
if let Some(msg) = &resolved.blocked_message {
res.message = Some(msg.clone());
}
summary.blocked += 1;
results.push(res);
continue;
}
summary.attempted += 1;
let file = Utf8PathBuf::from(&op.target.path);
let old = current.get(&file).cloned().unwrap_or_default();
let new = apply_op_to_content(&old, &resolved.kind)
.with_context(|| format!("apply op {} to {}", op.id, op.target.path))?;
current.insert(file.clone(), new.clone());
let mut files = Vec::new();
if old != new {
files.push(ApplyFile {
path: op.target.path.clone(),
sha256_before: Some(sha256_hex(old.as_bytes())),
sha256_after: Some(sha256_hex(new.as_bytes())),
backup_path: None,
});
}
if opts.dry_run {
results.push(ApplyResult {
op_id: op.id.clone(),
status: ApplyStatus::Skipped,
message: Some("dry-run: not written".to_string()),
blocked_reason: None,
blocked_reason_token: None,
files,
});
} else {
summary.applied += 1;
results.push(ApplyResult {
op_id: op.id.clone(),
status: ApplyStatus::Applied,
message: None,
blocked_reason: None,
blocked_reason_token: None,
files,
});
}
}
summary.files_modified = changed_files(&before, ¤t).len() as u64;
Ok(ExecuteOutcome {
before,
after: current,
results,
summary,
preconditions,
})
}
struct ResolvedOp<'a> {
op: &'a PlanOp,
kind: OpKind,
allowed: bool,
blocked_reason: Option<String>,
blocked_reason_token: Option<String>,
blocked_message: Option<String>,
}
fn resolve_op<'a>(op: &'a PlanOp, opts: &ApplyOptions) -> ResolvedOp<'a> {
if op.blocked {
if !op.params_required.is_empty() {
let (kind, missing) = resolve_params(op, &opts.params);
if missing.is_empty() {
return ResolvedOp {
op,
kind,
allowed: allowed_by_safety(opts, op.safety),
blocked_reason: None,
blocked_reason_token: None,
blocked_message: None,
};
}
let blocked_reason = op
.blocked_reason
.clone()
.or(Some("missing params".to_string()));
return ResolvedOp {
op,
kind: op.kind.clone(),
allowed: false,
blocked_reason,
blocked_reason_token: op.blocked_reason_token.clone(),
blocked_message: None,
};
}
let blocked_reason = op.blocked_reason.clone().or(Some("blocked".to_string()));
return ResolvedOp {
op,
kind: op.kind.clone(),
allowed: false,
blocked_reason,
blocked_reason_token: op.blocked_reason_token.clone(),
blocked_message: None,
};
}
if !allowed_by_safety(opts, op.safety) {
use buildfix_types::plan::blocked_tokens;
let token = match op.safety {
SafetyClass::Guarded => blocked_tokens::SAFETY_GUARDED_NOT_ALLOWED,
SafetyClass::Unsafe => blocked_tokens::SAFETY_UNSAFE_NOT_ALLOWED,
_ => blocked_tokens::SAFETY_GUARDED_NOT_ALLOWED,
};
return ResolvedOp {
op,
kind: op.kind.clone(),
allowed: false,
blocked_reason: Some("safety gate".to_string()),
blocked_reason_token: Some(token.to_string()),
blocked_message: Some("safety class not allowed".to_string()),
};
}
let (kind, missing) = resolve_params(op, &opts.params);
if !missing.is_empty() {
return ResolvedOp {
op,
kind,
allowed: false,
blocked_reason: Some(format!("missing params: {}", missing.join(", "))),
blocked_reason_token: Some(
buildfix_types::plan::blocked_tokens::MISSING_PARAMS.to_string(),
),
blocked_message: None,
};
}
ResolvedOp {
op,
kind,
allowed: true,
blocked_reason: None,
blocked_reason_token: None,
blocked_message: None,
}
}
fn resolve_params(op: &PlanOp, params: &HashMap<String, String>) -> (OpKind, Vec<String>) {
if op.params_required.is_empty() {
return (op.kind.clone(), Vec::new());
}
let mut missing = Vec::new();
let mut kind = op.kind.clone();
for key in &op.params_required {
if let Some(value) = params.get(key) {
fill_op_param(&mut kind, key, value);
} else {
missing.push(key.clone());
}
}
(kind, missing)
}
fn fill_op_param(kind: &mut OpKind, key: &str, value: &str) {
let OpKind::TomlTransform { rule_id, args } = kind else {
return;
};
let mut map = match args.take() {
Some(serde_json::Value::Object(m)) => m,
_ => serde_json::Map::new(),
};
match (rule_id.as_str(), key) {
("set_package_rust_version", "rust_version") => {
map.insert(
key.to_string(),
serde_json::Value::String(value.to_string()),
);
}
("set_package_license", "license") => {
map.insert(
key.to_string(),
serde_json::Value::String(value.to_string()),
);
}
("ensure_path_dep_has_version", "version") => {
map.insert(
key.to_string(),
serde_json::Value::String(value.to_string()),
);
}
_ => {
map.insert(
key.to_string(),
serde_json::Value::String(value.to_string()),
);
}
}
*args = Some(serde_json::Value::Object(map));
}
fn check_preconditions(
repo_root: &Utf8Path,
plan: &BuildfixPlan,
touched_files: &BTreeSet<Utf8PathBuf>,
preconditions: &mut ApplyPreconditions,
) -> anyhow::Result<bool> {
let file_map = plan
.preconditions
.files
.iter()
.map(|f| (f.path.clone(), f.sha256.clone()))
.collect::<BTreeMap<_, _>>();
for file in touched_files {
let Some(expected) = file_map.get(&file.to_string()) else {
continue;
};
let abs = abs_path(repo_root, file);
let bytes = fs::read(&abs).with_context(|| format!("read {}", abs))?;
let actual = sha256_hex(&bytes);
if &actual != expected {
preconditions.verified = false;
preconditions.mismatches.push(PreconditionMismatch {
path: file.to_string(),
expected: expected.clone(),
actual,
});
}
}
if let Some(expected) = &plan.preconditions.head_sha
&& let Ok(actual) = get_head_sha(repo_root)
&& &actual != expected
{
preconditions.verified = false;
preconditions.mismatches.push(PreconditionMismatch {
path: "<git_head>".to_string(),
expected: expected.clone(),
actual,
});
}
Ok(preconditions.verified)
}
fn changed_files(
before: &BTreeMap<Utf8PathBuf, String>,
after: &BTreeMap<Utf8PathBuf, String>,
) -> BTreeSet<Utf8PathBuf> {
let mut changed = BTreeSet::new();
for (path, old) in before {
let new = after.get(path).unwrap_or(old);
if old != new {
changed.insert(path.clone());
}
}
changed
}
fn create_backups(
_repo_root: &Utf8Path,
changed_files: &BTreeSet<Utf8PathBuf>,
before: &BTreeMap<Utf8PathBuf, String>,
opts: &ApplyOptions,
results: &mut [ApplyResult],
) -> anyhow::Result<()> {
let Some(ref backup_dir) = opts.backup_dir else {
return Ok(());
};
for path in changed_files {
let contents = before.get(path).cloned().unwrap_or_default();
let backup_rel = format!("{}{}", path, opts.backup_suffix);
let backup_path = backup_dir.join(backup_rel);
if let Some(parent) = backup_path.parent() {
fs::create_dir_all(parent).with_context(|| format!("create backup dir {}", parent))?;
}
fs::write(&backup_path, &contents)
.with_context(|| format!("write backup {}", backup_path))?;
for result in results.iter_mut() {
for file in &mut result.files {
if file.path == *path {
file.backup_path = Some(backup_path.to_string());
}
}
}
}
Ok(())
}
fn write_changed_files(
repo_root: &Utf8Path,
changed_files: &BTreeSet<Utf8PathBuf>,
after: &BTreeMap<Utf8PathBuf, String>,
) -> anyhow::Result<()> {
for path in changed_files {
let abs = abs_path(repo_root, path);
let new_contents = after.get(path).cloned().unwrap_or_default();
write_atomic(&abs, &new_contents)?;
}
Ok(())
}
fn write_atomic(path: &Utf8Path, contents: &str) -> anyhow::Result<()> {
let parent = path.parent().unwrap_or_else(|| Utf8Path::new("."));
let tmp_name = format!(
".buildfix-tmp-{}",
Uuid::new_v4().to_string().replace('-', "")
);
let tmp_path = parent.join(tmp_name);
fs::write(&tmp_path, contents).with_context(|| format!("write {}", tmp_path))?;
if path.exists() {
let _ = fs::remove_file(path);
}
fs::rename(&tmp_path, path).with_context(|| format!("rename {} -> {}", tmp_path, path))?;
Ok(())
}
fn allowed_by_safety(opts: &ApplyOptions, safety: SafetyClass) -> bool {
match safety {
SafetyClass::Safe => true,
SafetyClass::Guarded => opts.allow_guarded,
SafetyClass::Unsafe => opts.allow_unsafe,
}
}
fn abs_path(repo_root: &Utf8Path, rel: &Utf8Path) -> Utf8PathBuf {
if rel.is_absolute() {
rel.to_path_buf()
} else {
repo_root.join(rel)
}
}
fn render_patch(
before: &BTreeMap<Utf8PathBuf, String>,
after: &BTreeMap<Utf8PathBuf, String>,
) -> String {
let mut out = String::new();
let formatter = PatchFormatter::new();
for (path, old) in before {
let new = after.get(path).unwrap_or(old);
if old == new {
continue;
}
out.push_str(&format!("diff --git a/{0} b/{0}\n", path));
out.push_str(&format!("--- a/{0}\n+++ b/{0}\n", path));
let patch = diffy::create_patch(old, new);
out.push_str(&formatter.fmt_patch(&patch).to_string());
if !out.ends_with('\n') {
out.push('\n');
}
}
out
}
pub fn apply_op_to_content(contents: &str, kind: &OpKind) -> anyhow::Result<String> {
match kind {
OpKind::JsonSet { json_path, value } => {
return apply_json_set(contents, json_path, value.clone());
}
OpKind::JsonRemove { json_path } => {
return apply_json_remove(contents, json_path);
}
OpKind::YamlSet { yaml_path, value } => {
return apply_yaml_set(contents, yaml_path, value.clone());
}
OpKind::YamlRemove { yaml_path } => {
return apply_yaml_remove(contents, yaml_path);
}
OpKind::TextReplaceAnchored {
find,
replace,
anchor_before,
anchor_after,
max_replacements,
} => {
return apply_text_replace_anchored(
contents,
find,
replace,
anchor_before,
anchor_after,
*max_replacements,
);
}
_ => {}
}
let mut doc = contents
.parse::<DocumentMut>()
.unwrap_or_else(|_| DocumentMut::new());
match kind {
OpKind::TomlSet { toml_path, value } => {
set_toml_path(&mut doc, toml_path, value.clone());
}
OpKind::TomlRemove { toml_path } => {
remove_toml_path(&mut doc, toml_path);
}
OpKind::JsonSet { .. }
| OpKind::JsonRemove { .. }
| OpKind::YamlSet { .. }
| OpKind::YamlRemove { .. }
| OpKind::TextReplaceAnchored { .. } => {
anyhow::bail!(
"internal error: non-TOML operation should have been handled in earlier match branch"
)
}
OpKind::TomlTransform { rule_id, args } => match rule_id.as_str() {
"ensure_workspace_resolver_v2" => {
doc["workspace"]["resolver"] = value("2");
}
"set_package_rust_version" => {
let rust_version = args
.as_ref()
.and_then(|v| v.get("rust_version"))
.and_then(|v| v.as_str())
.context("missing rust_version param")?;
doc["package"]["rust-version"] = value(rust_version);
}
"set_package_edition" => {
let edition = args
.as_ref()
.and_then(|v| v.get("edition"))
.and_then(|v| v.as_str())
.context("missing edition param")?;
doc["package"]["edition"] = value(edition);
}
"set_package_license" => {
let license = args
.as_ref()
.and_then(|v| v.get("license"))
.and_then(|v| v.as_str())
.context("missing license param")?;
doc["package"]["license"] = value(license);
}
"ensure_path_dep_has_version" => {
let args = args.as_ref().context("missing args")?;
let toml_path = args
.get("toml_path")
.and_then(|v| v.as_array())
.context("missing toml_path")?;
let toml_path: Vec<String> = toml_path
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect();
let dep_path = args
.get("dep_path")
.and_then(|v| v.as_str())
.context("missing dep_path")?;
let version = args
.get("version")
.and_then(|v| v.as_str())
.context("missing version param")?;
let dep_item = get_dep_item_mut(&mut doc, &toml_path)
.context("dependency not found at toml_path")?;
if let Some(inline) = dep_item.as_inline_table_mut() {
let current_path = inline.get("path").and_then(|v| v.as_str());
if current_path != Some(dep_path) {
return Ok(doc.to_string());
}
if inline.get("version").and_then(|v| v.as_str()).is_none() {
inline.insert("version", str_value(version));
}
} else if let Some(tbl) = dep_item.as_table_mut() {
let current_path = tbl
.get("path")
.and_then(|i| i.as_value())
.and_then(|v| v.as_str());
if current_path != Some(dep_path) {
return Ok(doc.to_string());
}
if tbl
.get("version")
.and_then(|i| i.as_value())
.and_then(|v| v.as_str())
.is_none()
{
tbl["version"] = value(version);
}
}
}
"ensure_workspace_dependency_version" => {
let args = args.as_ref().context("missing args")?;
let dep = args
.get("dep")
.and_then(|v| v.as_str())
.context("missing dep")?;
let version = args
.get("version")
.and_then(|v| v.as_str())
.context("missing version")?;
let ws_deps = &mut doc["workspace"]["dependencies"][dep];
if ws_deps.is_none() {
*ws_deps = value(version);
} else if let Some(existing_inline) = ws_deps.as_inline_table_mut() {
if existing_inline.get("path").is_none() && existing_inline.get("git").is_none()
{
existing_inline.insert("version", str_value(version));
}
} else if let Some(existing_tbl) = ws_deps.as_table_mut() {
if existing_tbl.get("path").is_none() && existing_tbl.get("git").is_none() {
existing_tbl["version"] = value(version);
}
} else if ws_deps.is_value() {
*ws_deps = value(version);
}
}
"use_workspace_dependency" => {
let args = args.as_ref().context("missing args")?;
let toml_path = args
.get("toml_path")
.and_then(|v| v.as_array())
.context("missing toml_path")?;
let toml_path: Vec<String> = toml_path
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect();
let preserved = args.get("preserved");
let mut inline = InlineTable::new();
inline.insert("workspace", bool_value(true));
if let Some(p) = preserved {
if let Some(pkg) = p.get("package").and_then(|v| v.as_str()) {
inline.insert("package", str_value(pkg));
}
if let Some(opt) = p.get("optional").and_then(|v| v.as_bool()) {
inline.insert("optional", bool_value(opt));
}
if let Some(df) = p.get("default_features").and_then(|v| v.as_bool()) {
inline.insert("default-features", bool_value(df));
}
if let Some(features) = p.get("features").and_then(|v| v.as_array()) {
let mut arr = toml_edit::Array::new();
for f in features {
if let Some(s) = f.as_str() {
arr.push(s);
}
}
inline.insert("features", toml_edit::Value::from(arr));
}
}
let dep_item = get_dep_item_mut(&mut doc, &toml_path)
.context("dependency not found at toml_path")?;
*dep_item = value(inline);
}
_ => {
}
},
}
Ok(doc.to_string())
}
pub fn execute_plan_from_contents(
before: &BTreeMap<Utf8PathBuf, String>,
plan: &BuildfixPlan,
opts: &ApplyOptions,
) -> anyhow::Result<BTreeMap<Utf8PathBuf, String>> {
let mut current = before.clone();
for op in &plan.ops {
let resolved = resolve_op(op, opts);
if !resolved.allowed {
continue;
}
let file = Utf8PathBuf::from(&op.target.path);
let old = current.get(&file).cloned().unwrap_or_default();
let new = apply_op_to_content(&old, &resolved.kind)
.with_context(|| format!("apply op {} to {}", op.id, op.target.path))?;
current.insert(file, new);
}
let mut changed = BTreeMap::new();
for (path, new_content) in ¤t {
let old_content = before.get(path).map(|s| s.as_str()).unwrap_or("");
if new_content != old_content {
changed.insert(path.clone(), new_content.clone());
}
}
Ok(changed)
}
fn set_toml_path(doc: &mut DocumentMut, toml_path: &[String], value: serde_json::Value) {
if toml_path.is_empty() {
return;
}
let mut current = doc.as_table_mut();
for key in &toml_path[..toml_path.len() - 1] {
let entry = current.entry(key).or_insert(toml_edit::table());
if entry.as_table().is_none() {
*entry = toml_edit::table();
}
let Some(table) = entry.as_table_mut() else {
return;
};
current = table;
}
let last = &toml_path[toml_path.len() - 1];
current[last] = Item::Value(json_value_to_toml(value));
}
fn remove_toml_path(doc: &mut DocumentMut, toml_path: &[String]) {
if toml_path.is_empty() {
return;
}
let mut current = doc.as_table_mut();
for key in &toml_path[..toml_path.len() - 1] {
let Some(tbl) = current.get_mut(key).and_then(|i| i.as_table_mut()) else {
return;
};
current = tbl;
}
let last = &toml_path[toml_path.len() - 1];
current.remove(last);
}
fn apply_text_replace_anchored(
contents: &str,
find: &str,
replace: &str,
anchor_before: &[String],
anchor_after: &[String],
max_replacements: Option<u64>,
) -> anyhow::Result<String> {
let limit = max_replacements.unwrap_or(1);
if limit == 0 {
anyhow::bail!("max_replacements must be >= 1");
}
let has_crlf = contents.contains("\r\n");
let mut lines: Vec<String> = contents.lines().map(|l| l.to_string()).collect();
let trailing_newline = contents.ends_with('\n');
let mut matches = Vec::new();
for idx in 0..lines.len() {
if lines[idx] != find {
continue;
}
if !before_context_matches(&lines, idx, anchor_before) {
continue;
}
if !after_context_matches(&lines, idx, anchor_after) {
continue;
}
matches.push(idx);
}
if matches.is_empty() {
return Ok(contents.to_string());
}
if matches.len() as u64 > limit {
anyhow::bail!(
"anchored replace matched {} lines, exceeding max_replacements {}",
matches.len(),
limit
);
}
for idx in matches {
lines[idx] = replace.to_string();
}
let line_ending = if has_crlf { "\r\n" } else { "\n" };
let mut out = lines.join(line_ending);
if trailing_newline {
out.push_str(line_ending);
}
Ok(out)
}
fn apply_json_set(
contents: &str,
json_path: &[String],
value: serde_json::Value,
) -> anyhow::Result<String> {
let trailing_newline = contents.ends_with('\n');
let mut root = parse_or_init_json(contents)?;
set_json_path(&mut root, json_path, value);
serialize_json_with_newline(&root, trailing_newline)
}
fn apply_json_remove(contents: &str, json_path: &[String]) -> anyhow::Result<String> {
let trailing_newline = contents.ends_with('\n');
let mut root = parse_or_init_json(contents)?;
remove_json_path(&mut root, json_path);
serialize_json_with_newline(&root, trailing_newline)
}
fn parse_or_init_json(contents: &str) -> anyhow::Result<serde_json::Value> {
if contents.trim().is_empty() {
return Ok(serde_json::Value::Object(serde_json::Map::new()));
}
serde_json::from_str(contents).context("parse json")
}
fn set_json_path(root: &mut serde_json::Value, path: &[String], value: serde_json::Value) {
if path.is_empty() {
*root = value;
return;
}
let mut current = root;
for (idx, seg) in path.iter().enumerate() {
let last = idx + 1 == path.len();
let index_seg = parse_index_segment(seg);
if let Some(i) = index_seg {
let default_next = if last {
serde_json::Value::Null
} else {
default_json_container(&path[idx + 1])
};
let arr = ensure_json_array(current);
while arr.len() <= i {
arr.push(default_next.clone());
}
if last {
arr[i] = value;
return;
}
current = &mut arr[i];
continue;
}
if last {
let obj = ensure_json_object(current);
obj.insert(seg.clone(), value);
return;
}
let default_next = default_json_container(&path[idx + 1]);
let obj = ensure_json_object(current);
current = obj.entry(seg.clone()).or_insert(default_next);
}
}
fn remove_json_path(root: &mut serde_json::Value, path: &[String]) -> bool {
if path.is_empty() {
*root = serde_json::Value::Null;
return true;
}
let mut current = root;
for seg in &path[..path.len() - 1] {
if let Some(i) = parse_index_segment(seg) {
let Some(arr) = current.as_array_mut() else {
return false;
};
if i >= arr.len() {
return false;
}
current = &mut arr[i];
continue;
}
let Some(obj) = current.as_object_mut() else {
return false;
};
let Some(next) = obj.get_mut(seg) else {
return false;
};
current = next;
}
let last = &path[path.len() - 1];
if let Some(i) = parse_index_segment(last) {
let Some(arr) = current.as_array_mut() else {
return false;
};
if i >= arr.len() {
return false;
}
arr.remove(i);
return true;
}
let Some(obj) = current.as_object_mut() else {
return false;
};
obj.remove(last).is_some()
}
fn default_json_container(next_seg: &str) -> serde_json::Value {
if parse_index_segment(next_seg).is_some() {
serde_json::Value::Array(Vec::new())
} else {
serde_json::Value::Object(serde_json::Map::new())
}
}
fn ensure_json_object(
value: &mut serde_json::Value,
) -> &mut serde_json::Map<String, serde_json::Value> {
if !value.is_object() {
*value = serde_json::Value::Object(serde_json::Map::new());
}
value.as_object_mut().expect("json object")
}
fn ensure_json_array(value: &mut serde_json::Value) -> &mut Vec<serde_json::Value> {
if !value.is_array() {
*value = serde_json::Value::Array(Vec::new());
}
value.as_array_mut().expect("json array")
}
fn serialize_json_with_newline(
value: &serde_json::Value,
trailing_newline: bool,
) -> anyhow::Result<String> {
let mut out = serde_json::to_string_pretty(value).context("serialize json")?;
if trailing_newline && !out.ends_with('\n') {
out.push('\n');
}
Ok(out)
}
fn apply_yaml_set(
contents: &str,
yaml_path: &[String],
value: serde_json::Value,
) -> anyhow::Result<String> {
let trailing_newline = contents.ends_with('\n');
let mut root = parse_or_init_yaml(contents)?;
let yaml_value = serde_yaml::to_value(value).context("convert value to yaml")?;
set_yaml_path(&mut root, yaml_path, yaml_value);
serialize_yaml_with_newline(&root, trailing_newline)
}
fn apply_yaml_remove(contents: &str, yaml_path: &[String]) -> anyhow::Result<String> {
let trailing_newline = contents.ends_with('\n');
let mut root = parse_or_init_yaml(contents)?;
remove_yaml_path(&mut root, yaml_path);
serialize_yaml_with_newline(&root, trailing_newline)
}
fn parse_or_init_yaml(contents: &str) -> anyhow::Result<serde_yaml::Value> {
if contents.trim().is_empty() {
return Ok(serde_yaml::Value::Mapping(serde_yaml::Mapping::new()));
}
serde_yaml::from_str(contents).context("parse yaml")
}
fn set_yaml_path(root: &mut serde_yaml::Value, path: &[String], value: serde_yaml::Value) {
if path.is_empty() {
*root = value;
return;
}
let mut current = root;
for (idx, seg) in path.iter().enumerate() {
let last = idx + 1 == path.len();
let index_seg = parse_index_segment(seg);
if let Some(i) = index_seg {
let default_next = if last {
serde_yaml::Value::Null
} else {
default_yaml_container(&path[idx + 1])
};
let seq = ensure_yaml_sequence(current);
while seq.len() <= i {
seq.push(default_next.clone());
}
if last {
seq[i] = value;
return;
}
current = &mut seq[i];
continue;
}
if last {
let map = ensure_yaml_mapping(current);
map.insert(yaml_key(seg), value);
return;
}
let default_next = default_yaml_container(&path[idx + 1]);
let map = ensure_yaml_mapping(current);
current = map.entry(yaml_key(seg)).or_insert(default_next);
}
}
fn remove_yaml_path(root: &mut serde_yaml::Value, path: &[String]) -> bool {
if path.is_empty() {
*root = serde_yaml::Value::Null;
return true;
}
let mut current = root;
for seg in &path[..path.len() - 1] {
if let Some(i) = parse_index_segment(seg) {
let Some(seq) = current.as_sequence_mut() else {
return false;
};
if i >= seq.len() {
return false;
}
current = &mut seq[i];
continue;
}
let Some(map) = current.as_mapping_mut() else {
return false;
};
let key = yaml_key(seg);
let Some(next) = map.get_mut(&key) else {
return false;
};
current = next;
}
let last = &path[path.len() - 1];
if let Some(i) = parse_index_segment(last) {
let Some(seq) = current.as_sequence_mut() else {
return false;
};
if i >= seq.len() {
return false;
}
seq.remove(i);
return true;
}
let Some(map) = current.as_mapping_mut() else {
return false;
};
map.remove(yaml_key(last)).is_some()
}
fn default_yaml_container(next_seg: &str) -> serde_yaml::Value {
if parse_index_segment(next_seg).is_some() {
serde_yaml::Value::Sequence(Vec::new())
} else {
serde_yaml::Value::Mapping(serde_yaml::Mapping::new())
}
}
fn ensure_yaml_mapping(value: &mut serde_yaml::Value) -> &mut serde_yaml::Mapping {
if !value.is_mapping() {
*value = serde_yaml::Value::Mapping(serde_yaml::Mapping::new());
}
value.as_mapping_mut().expect("yaml mapping")
}
fn ensure_yaml_sequence(value: &mut serde_yaml::Value) -> &mut Vec<serde_yaml::Value> {
if !value.is_sequence() {
*value = serde_yaml::Value::Sequence(Vec::new());
}
value.as_sequence_mut().expect("yaml sequence")
}
fn yaml_key(key: &str) -> serde_yaml::Value {
serde_yaml::Value::String(key.to_string())
}
fn serialize_yaml_with_newline(
value: &serde_yaml::Value,
trailing_newline: bool,
) -> anyhow::Result<String> {
let mut out = serde_yaml::to_string(value).context("serialize yaml")?;
if trailing_newline {
if !out.ends_with('\n') {
out.push('\n');
}
} else {
while out.ends_with('\n') {
out.pop();
}
}
Ok(out)
}
fn parse_index_segment(seg: &str) -> Option<usize> {
if seg.is_empty() {
return None;
}
seg.parse::<usize>().ok()
}
fn before_context_matches(lines: &[String], idx: usize, anchors: &[String]) -> bool {
if anchors.is_empty() {
return true;
}
if idx < anchors.len() {
return false;
}
let start = idx - anchors.len();
anchors
.iter()
.enumerate()
.all(|(offset, anchor)| lines[start + offset] == *anchor)
}
fn after_context_matches(lines: &[String], idx: usize, anchors: &[String]) -> bool {
if anchors.is_empty() {
return true;
}
let start = idx + 1;
if start + anchors.len() > lines.len() {
return false;
}
anchors
.iter()
.enumerate()
.all(|(offset, anchor)| lines[start + offset] == *anchor)
}
fn json_value_to_toml(json: serde_json::Value) -> toml_edit::Value {
match json {
serde_json::Value::String(s) => str_value(&s),
serde_json::Value::Bool(b) => bool_value(b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
toml_edit::Value::from(i)
} else if let Some(f) = n.as_f64() {
toml_edit::Value::from(f)
} else {
toml_edit::Value::from(n.to_string())
}
}
serde_json::Value::Array(arr) => {
let mut out = toml_edit::Array::new();
for v in arr {
match v {
serde_json::Value::String(s) => out.push(s.as_str()),
serde_json::Value::Bool(b) => out.push(b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
out.push(i);
} else if let Some(f) = n.as_f64() {
out.push(f);
}
}
_ => {}
}
}
toml_edit::Value::from(out)
}
_ => toml_edit::Value::from(""),
}
}
fn str_value(s: &str) -> toml_edit::Value {
toml_edit::Value::from(s)
}
fn bool_value(b: bool) -> toml_edit::Value {
toml_edit::Value::from(b)
}
fn get_dep_item_mut<'a>(doc: &'a mut DocumentMut, toml_path: &[String]) -> Option<&'a mut Item> {
if toml_path.len() < 2 {
return None;
}
if toml_path[0] == "target" {
if toml_path.len() < 4 {
return None;
}
let cfg = &toml_path[1];
let table_name = &toml_path[2];
let dep = &toml_path[3];
let target = doc.get_mut("target")?.as_table_mut()?;
let cfg_tbl = target.get_mut(cfg)?.as_table_mut()?;
let deps = cfg_tbl.get_mut(table_name)?.as_table_mut()?;
return deps.get_mut(dep);
}
let table_name = &toml_path[0];
let dep = &toml_path[1];
let deps = doc.get_mut(table_name)?.as_table_mut()?;
deps.get_mut(dep)
}
pub fn check_policy_block(apply: &BuildfixApply, was_dry_run: bool) -> Option<PolicyBlockError> {
if was_dry_run {
return None;
}
if !apply.preconditions.verified {
return Some(PolicyBlockError::PreconditionMismatch {
message: "precondition mismatch".to_string(),
});
}
let blocked: Vec<&ApplyResult> = apply
.results
.iter()
.filter(|r| r.status == ApplyStatus::Blocked)
.collect();
if !blocked.is_empty() && apply.summary.applied == 0 {
let reasons: Vec<String> = blocked
.iter()
.filter_map(|r| r.blocked_reason.clone())
.collect();
if reasons.iter().any(|r| r.contains("safety")) {
return Some(PolicyBlockError::SafetyGateDenial {
message: format!("{} op(s) blocked by safety gate", blocked.len()),
});
}
return Some(PolicyBlockError::PolicyDenial {
message: format!("{} op(s) blocked by policy", blocked.len()),
});
}
if apply.summary.failed > 0 {
return Some(PolicyBlockError::PreconditionMismatch {
message: format!("{} op(s) failed", apply.summary.failed),
});
}
None
}