1mod error;
9
10pub use error::{EditError, EditResult, PolicyBlockError};
11
12use anyhow::Context;
13use buildfix_hash::sha256_hex;
14use buildfix_types::apply::{
15 ApplyFile, ApplyPreconditions, ApplyRepoInfo, ApplyResult, ApplyStatus, ApplySummary,
16 BuildfixApply, PlanRef, PreconditionMismatch,
17};
18use buildfix_types::ops::{OpKind, SafetyClass};
19use buildfix_types::plan::{BuildfixPlan, FilePrecondition, PlanOp};
20use buildfix_types::receipt::ToolInfo;
21use camino::{Utf8Path, Utf8PathBuf};
22use diffy::PatchFormatter;
23use fs_err as fs;
24use std::collections::{BTreeMap, BTreeSet, HashMap};
25use toml_edit::{DocumentMut, InlineTable, Item, value};
26use uuid::Uuid;
27
28#[derive(Debug, Clone, Default)]
29pub struct ApplyOptions {
30 pub dry_run: bool,
31 pub allow_guarded: bool,
32 pub allow_unsafe: bool,
33 pub backup_enabled: bool,
34 pub backup_dir: Option<Utf8PathBuf>,
36 pub backup_suffix: String,
38 pub params: HashMap<String, String>,
40}
41
42#[derive(Debug, Clone, Default)]
44pub struct AttachPreconditionsOptions {
45 pub include_git_head: bool,
47}
48
49pub fn get_head_sha(repo_root: &Utf8Path) -> anyhow::Result<String> {
51 let output = std::process::Command::new("git")
52 .arg("rev-parse")
53 .arg("HEAD")
54 .current_dir(repo_root)
55 .output()
56 .context("failed to run git rev-parse HEAD")?;
57
58 if !output.status.success() {
59 let stderr = String::from_utf8_lossy(&output.stderr);
60 anyhow::bail!("git rev-parse HEAD failed: {}", stderr.trim());
61 }
62
63 Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
64}
65
66pub fn is_working_tree_dirty(repo_root: &Utf8Path) -> anyhow::Result<bool> {
68 let status_output = std::process::Command::new("git")
69 .args(["status", "--porcelain"])
70 .current_dir(repo_root)
71 .output()
72 .context("failed to run git status")?;
73
74 if !status_output.status.success() {
75 let stderr = String::from_utf8_lossy(&status_output.stderr);
76 anyhow::bail!("git status failed: {}", stderr.trim());
77 }
78
79 Ok(!status_output.stdout.is_empty())
80}
81
82pub fn attach_preconditions(
86 repo_root: &Utf8Path,
87 plan: &mut BuildfixPlan,
88 opts: &AttachPreconditionsOptions,
89) -> anyhow::Result<()> {
90 let mut files = BTreeSet::new();
91 for op in &plan.ops {
92 files.insert(op.target.path.clone());
93 }
94
95 let mut pres = Vec::new();
96 for path in files {
97 let abs = abs_path(repo_root, Utf8Path::new(&path));
98 let bytes = fs::read(&abs).with_context(|| format!("read {}", abs))?;
99 let sha = sha256_hex(&bytes);
100 pres.push(FilePrecondition { path, sha256: sha });
101 }
102 plan.preconditions.files = pres;
103
104 if opts.include_git_head
105 && let Ok(sha) = get_head_sha(repo_root)
106 {
107 plan.preconditions.head_sha = Some(sha);
108 }
109
110 if let Ok(dirty) = is_working_tree_dirty(repo_root) {
111 plan.preconditions.dirty = Some(dirty);
112 }
113
114 Ok(())
115}
116
117pub fn preview_patch(
118 repo_root: &Utf8Path,
119 plan: &BuildfixPlan,
120 opts: &ApplyOptions,
121) -> anyhow::Result<String> {
122 let outcome = execute_plan(repo_root, plan, opts, false)?;
123 Ok(render_patch(&outcome.before, &outcome.after))
124}
125
126pub fn apply_plan(
128 repo_root: &Utf8Path,
129 plan: &BuildfixPlan,
130 tool: ToolInfo,
131 opts: &ApplyOptions,
132) -> anyhow::Result<(BuildfixApply, String)> {
133 let mut outcome = execute_plan(repo_root, plan, opts, true)?;
134 let patch = render_patch(&outcome.before, &outcome.after);
135
136 if !opts.dry_run && outcome.preconditions.verified {
137 let changed_files = changed_files(&outcome.before, &outcome.after);
138 if !changed_files.is_empty() {
139 if opts.backup_enabled {
140 create_backups(
141 repo_root,
142 &changed_files,
143 &outcome.before,
144 opts,
145 &mut outcome.results,
146 )?;
147 }
148 write_changed_files(repo_root, &changed_files, &outcome.after)?;
149 }
150 }
151
152 let repo_info = ApplyRepoInfo {
153 root: repo_root.to_string(),
154 head_sha_before: None,
155 head_sha_after: None,
156 dirty_before: None,
157 dirty_after: None,
158 };
159
160 let plan_ref = PlanRef {
161 path: "artifacts/buildfix/plan.json".to_string(),
162 sha256: None,
163 };
164
165 let mut apply = BuildfixApply::new(tool, repo_info, plan_ref);
166 apply.preconditions = outcome.preconditions;
167 apply.results = outcome.results;
168 apply.summary = outcome.summary;
169
170 Ok((apply, patch))
171}
172
173struct ExecuteOutcome {
174 before: BTreeMap<Utf8PathBuf, String>,
175 after: BTreeMap<Utf8PathBuf, String>,
176 results: Vec<ApplyResult>,
177 summary: ApplySummary,
178 preconditions: ApplyPreconditions,
179}
180
181fn execute_plan(
182 repo_root: &Utf8Path,
183 plan: &BuildfixPlan,
184 opts: &ApplyOptions,
185 verify_preconditions: bool,
186) -> anyhow::Result<ExecuteOutcome> {
187 let mut touched_files = BTreeSet::new();
188 let mut resolved_ops: Vec<ResolvedOp> = Vec::new();
189
190 for op in &plan.ops {
191 let resolved = resolve_op(op, opts);
192 if resolved.allowed {
193 touched_files.insert(Utf8PathBuf::from(&op.target.path));
194 }
195 resolved_ops.push(resolved);
196 }
197
198 let mut before: BTreeMap<Utf8PathBuf, String> = BTreeMap::new();
199 for p in &touched_files {
200 let abs = abs_path(repo_root, p);
201 let contents = fs::read_to_string(&abs).unwrap_or_default();
202 before.insert(p.clone(), contents);
203 }
204
205 let mut preconditions = ApplyPreconditions {
206 verified: true,
207 mismatches: vec![],
208 };
209
210 if verify_preconditions
211 && !check_preconditions(repo_root, plan, &touched_files, &mut preconditions)?
212 {
213 let mut results = Vec::new();
215 let mut summary = ApplySummary::default();
216
217 for resolved in &resolved_ops {
218 if !resolved.allowed {
219 continue;
220 }
221 summary.blocked += 1;
222 results.push(ApplyResult {
223 op_id: resolved.op.id.clone(),
224 status: ApplyStatus::Blocked,
225 message: Some("precondition mismatch".to_string()),
226 blocked_reason: Some("precondition mismatch".to_string()),
227 blocked_reason_token: Some(
228 buildfix_types::plan::blocked_tokens::PRECONDITION_MISMATCH.to_string(),
229 ),
230 files: vec![],
231 });
232 }
233
234 return Ok(ExecuteOutcome {
235 before: before.clone(),
236 after: before,
237 results,
238 summary,
239 preconditions,
240 });
241 }
242
243 let mut current = before.clone();
244 let mut results: Vec<ApplyResult> = Vec::new();
245 let mut summary = ApplySummary::default();
246
247 for resolved in &resolved_ops {
248 let op = resolved.op;
249
250 if !resolved.allowed {
251 let mut res = ApplyResult {
252 op_id: op.id.clone(),
253 status: ApplyStatus::Blocked,
254 message: None,
255 blocked_reason: resolved.blocked_reason.clone(),
256 blocked_reason_token: resolved.blocked_reason_token.clone(),
257 files: vec![],
258 };
259 if let Some(msg) = &resolved.blocked_message {
260 res.message = Some(msg.clone());
261 }
262 summary.blocked += 1;
263 results.push(res);
264 continue;
265 }
266
267 summary.attempted += 1;
268
269 let file = Utf8PathBuf::from(&op.target.path);
270 let old = current.get(&file).cloned().unwrap_or_default();
271
272 let new = apply_op_to_content(&old, &resolved.kind)
273 .with_context(|| format!("apply op {} to {}", op.id, op.target.path))?;
274
275 current.insert(file.clone(), new.clone());
276
277 let mut files = Vec::new();
278 if old != new {
279 files.push(ApplyFile {
280 path: op.target.path.clone(),
281 sha256_before: Some(sha256_hex(old.as_bytes())),
282 sha256_after: Some(sha256_hex(new.as_bytes())),
283 backup_path: None,
284 });
285 }
286
287 if opts.dry_run {
288 results.push(ApplyResult {
289 op_id: op.id.clone(),
290 status: ApplyStatus::Skipped,
291 message: Some("dry-run: not written".to_string()),
292 blocked_reason: None,
293 blocked_reason_token: None,
294 files,
295 });
296 } else {
297 summary.applied += 1;
298 results.push(ApplyResult {
299 op_id: op.id.clone(),
300 status: ApplyStatus::Applied,
301 message: None,
302 blocked_reason: None,
303 blocked_reason_token: None,
304 files,
305 });
306 }
307 }
308
309 summary.files_modified = changed_files(&before, ¤t).len() as u64;
310
311 Ok(ExecuteOutcome {
312 before,
313 after: current,
314 results,
315 summary,
316 preconditions,
317 })
318}
319
320struct ResolvedOp<'a> {
321 op: &'a PlanOp,
322 kind: OpKind,
323 allowed: bool,
324 blocked_reason: Option<String>,
325 blocked_reason_token: Option<String>,
326 blocked_message: Option<String>,
327}
328
329fn resolve_op<'a>(op: &'a PlanOp, opts: &ApplyOptions) -> ResolvedOp<'a> {
330 if op.blocked {
331 if !op.params_required.is_empty() {
332 let (kind, missing) = resolve_params(op, &opts.params);
333 if missing.is_empty() {
334 return ResolvedOp {
335 op,
336 kind,
337 allowed: allowed_by_safety(opts, op.safety),
338 blocked_reason: None,
339 blocked_reason_token: None,
340 blocked_message: None,
341 };
342 }
343 let blocked_reason = op
344 .blocked_reason
345 .clone()
346 .or(Some("missing params".to_string()));
347 return ResolvedOp {
348 op,
349 kind: op.kind.clone(),
350 allowed: false,
351 blocked_reason,
352 blocked_reason_token: op.blocked_reason_token.clone(),
353 blocked_message: None,
354 };
355 }
356
357 let blocked_reason = op.blocked_reason.clone().or(Some("blocked".to_string()));
358 return ResolvedOp {
359 op,
360 kind: op.kind.clone(),
361 allowed: false,
362 blocked_reason,
363 blocked_reason_token: op.blocked_reason_token.clone(),
364 blocked_message: None,
365 };
366 }
367
368 if !allowed_by_safety(opts, op.safety) {
369 use buildfix_types::plan::blocked_tokens;
370 let token = match op.safety {
371 SafetyClass::Guarded => blocked_tokens::SAFETY_GUARDED_NOT_ALLOWED,
372 SafetyClass::Unsafe => blocked_tokens::SAFETY_UNSAFE_NOT_ALLOWED,
373 _ => blocked_tokens::SAFETY_GUARDED_NOT_ALLOWED,
374 };
375 return ResolvedOp {
376 op,
377 kind: op.kind.clone(),
378 allowed: false,
379 blocked_reason: Some("safety gate".to_string()),
380 blocked_reason_token: Some(token.to_string()),
381 blocked_message: Some("safety class not allowed".to_string()),
382 };
383 }
384
385 let (kind, missing) = resolve_params(op, &opts.params);
386 if !missing.is_empty() {
387 return ResolvedOp {
388 op,
389 kind,
390 allowed: false,
391 blocked_reason: Some(format!("missing params: {}", missing.join(", "))),
392 blocked_reason_token: Some(
393 buildfix_types::plan::blocked_tokens::MISSING_PARAMS.to_string(),
394 ),
395 blocked_message: None,
396 };
397 }
398
399 ResolvedOp {
400 op,
401 kind,
402 allowed: true,
403 blocked_reason: None,
404 blocked_reason_token: None,
405 blocked_message: None,
406 }
407}
408
409fn resolve_params(op: &PlanOp, params: &HashMap<String, String>) -> (OpKind, Vec<String>) {
410 if op.params_required.is_empty() {
411 return (op.kind.clone(), Vec::new());
412 }
413
414 let mut missing = Vec::new();
415 let mut kind = op.kind.clone();
416
417 for key in &op.params_required {
418 if let Some(value) = params.get(key) {
419 fill_op_param(&mut kind, key, value);
420 } else {
421 missing.push(key.clone());
422 }
423 }
424
425 (kind, missing)
426}
427
428fn fill_op_param(kind: &mut OpKind, key: &str, value: &str) {
429 let OpKind::TomlTransform { rule_id, args } = kind else {
430 return;
431 };
432
433 let mut map = match args.take() {
434 Some(serde_json::Value::Object(m)) => m,
435 _ => serde_json::Map::new(),
436 };
437
438 match (rule_id.as_str(), key) {
439 ("set_package_rust_version", "rust_version") => {
440 map.insert(
441 key.to_string(),
442 serde_json::Value::String(value.to_string()),
443 );
444 }
445 ("set_package_license", "license") => {
446 map.insert(
447 key.to_string(),
448 serde_json::Value::String(value.to_string()),
449 );
450 }
451 ("ensure_path_dep_has_version", "version") => {
452 map.insert(
453 key.to_string(),
454 serde_json::Value::String(value.to_string()),
455 );
456 }
457 _ => {
458 map.insert(
459 key.to_string(),
460 serde_json::Value::String(value.to_string()),
461 );
462 }
463 }
464
465 *args = Some(serde_json::Value::Object(map));
466}
467
468fn check_preconditions(
469 repo_root: &Utf8Path,
470 plan: &BuildfixPlan,
471 touched_files: &BTreeSet<Utf8PathBuf>,
472 preconditions: &mut ApplyPreconditions,
473) -> anyhow::Result<bool> {
474 let file_map = plan
475 .preconditions
476 .files
477 .iter()
478 .map(|f| (f.path.clone(), f.sha256.clone()))
479 .collect::<BTreeMap<_, _>>();
480
481 for file in touched_files {
482 let Some(expected) = file_map.get(&file.to_string()) else {
483 continue;
484 };
485 let abs = abs_path(repo_root, file);
486 let bytes = fs::read(&abs).with_context(|| format!("read {}", abs))?;
487 let actual = sha256_hex(&bytes);
488 if &actual != expected {
489 preconditions.verified = false;
490 preconditions.mismatches.push(PreconditionMismatch {
491 path: file.to_string(),
492 expected: expected.clone(),
493 actual,
494 });
495 }
496 }
497
498 if let Some(expected) = &plan.preconditions.head_sha
499 && let Ok(actual) = get_head_sha(repo_root)
500 && &actual != expected
501 {
502 preconditions.verified = false;
503 preconditions.mismatches.push(PreconditionMismatch {
504 path: "<git_head>".to_string(),
505 expected: expected.clone(),
506 actual,
507 });
508 }
509
510 Ok(preconditions.verified)
511}
512
513fn changed_files(
514 before: &BTreeMap<Utf8PathBuf, String>,
515 after: &BTreeMap<Utf8PathBuf, String>,
516) -> BTreeSet<Utf8PathBuf> {
517 let mut changed = BTreeSet::new();
518 for (path, old) in before {
519 let new = after.get(path).unwrap_or(old);
520 if old != new {
521 changed.insert(path.clone());
522 }
523 }
524 changed
525}
526
527fn create_backups(
528 _repo_root: &Utf8Path,
529 changed_files: &BTreeSet<Utf8PathBuf>,
530 before: &BTreeMap<Utf8PathBuf, String>,
531 opts: &ApplyOptions,
532 results: &mut [ApplyResult],
533) -> anyhow::Result<()> {
534 let Some(ref backup_dir) = opts.backup_dir else {
535 return Ok(());
536 };
537
538 for path in changed_files {
539 let contents = before.get(path).cloned().unwrap_or_default();
540 let backup_rel = format!("{}{}", path, opts.backup_suffix);
541 let backup_path = backup_dir.join(backup_rel);
542
543 if let Some(parent) = backup_path.parent() {
544 fs::create_dir_all(parent).with_context(|| format!("create backup dir {}", parent))?;
545 }
546
547 fs::write(&backup_path, &contents)
548 .with_context(|| format!("write backup {}", backup_path))?;
549
550 for result in results.iter_mut() {
552 for file in &mut result.files {
553 if file.path == *path {
554 file.backup_path = Some(backup_path.to_string());
555 }
556 }
557 }
558 }
559
560 Ok(())
561}
562
563fn write_changed_files(
564 repo_root: &Utf8Path,
565 changed_files: &BTreeSet<Utf8PathBuf>,
566 after: &BTreeMap<Utf8PathBuf, String>,
567) -> anyhow::Result<()> {
568 for path in changed_files {
569 let abs = abs_path(repo_root, path);
570 let new_contents = after.get(path).cloned().unwrap_or_default();
571 write_atomic(&abs, &new_contents)?;
572 }
573 Ok(())
574}
575
576fn write_atomic(path: &Utf8Path, contents: &str) -> anyhow::Result<()> {
577 let parent = path.parent().unwrap_or_else(|| Utf8Path::new("."));
578 let tmp_name = format!(
579 ".buildfix-tmp-{}",
580 Uuid::new_v4().to_string().replace('-', "")
581 );
582 let tmp_path = parent.join(tmp_name);
583 fs::write(&tmp_path, contents).with_context(|| format!("write {}", tmp_path))?;
584 if path.exists() {
585 let _ = fs::remove_file(path);
586 }
587 fs::rename(&tmp_path, path).with_context(|| format!("rename {} -> {}", tmp_path, path))?;
588 Ok(())
589}
590
591fn allowed_by_safety(opts: &ApplyOptions, safety: SafetyClass) -> bool {
592 match safety {
593 SafetyClass::Safe => true,
594 SafetyClass::Guarded => opts.allow_guarded,
595 SafetyClass::Unsafe => opts.allow_unsafe,
596 }
597}
598
599fn abs_path(repo_root: &Utf8Path, rel: &Utf8Path) -> Utf8PathBuf {
600 if rel.is_absolute() {
601 rel.to_path_buf()
602 } else {
603 repo_root.join(rel)
604 }
605}
606
607fn render_patch(
608 before: &BTreeMap<Utf8PathBuf, String>,
609 after: &BTreeMap<Utf8PathBuf, String>,
610) -> String {
611 let mut out = String::new();
612 let formatter = PatchFormatter::new();
613
614 for (path, old) in before {
615 let new = after.get(path).unwrap_or(old);
616 if old == new {
617 continue;
618 }
619
620 out.push_str(&format!("diff --git a/{0} b/{0}\n", path));
621 out.push_str(&format!("--- a/{0}\n+++ b/{0}\n", path));
622
623 let patch = diffy::create_patch(old, new);
624 out.push_str(&formatter.fmt_patch(&patch).to_string());
625 if !out.ends_with('\n') {
626 out.push('\n');
627 }
628 }
629
630 out
631}
632
633pub fn apply_op_to_content(contents: &str, kind: &OpKind) -> anyhow::Result<String> {
639 match kind {
640 OpKind::JsonSet { json_path, value } => {
641 return apply_json_set(contents, json_path, value.clone());
642 }
643 OpKind::JsonRemove { json_path } => {
644 return apply_json_remove(contents, json_path);
645 }
646 OpKind::YamlSet { yaml_path, value } => {
647 return apply_yaml_set(contents, yaml_path, value.clone());
648 }
649 OpKind::YamlRemove { yaml_path } => {
650 return apply_yaml_remove(contents, yaml_path);
651 }
652 OpKind::TextReplaceAnchored {
653 find,
654 replace,
655 anchor_before,
656 anchor_after,
657 max_replacements,
658 } => {
659 return apply_text_replace_anchored(
660 contents,
661 find,
662 replace,
663 anchor_before,
664 anchor_after,
665 *max_replacements,
666 );
667 }
668 _ => {}
669 }
670
671 let mut doc = contents
672 .parse::<DocumentMut>()
673 .unwrap_or_else(|_| DocumentMut::new());
674
675 match kind {
676 OpKind::TomlSet { toml_path, value } => {
677 set_toml_path(&mut doc, toml_path, value.clone());
678 }
679 OpKind::TomlRemove { toml_path } => {
680 remove_toml_path(&mut doc, toml_path);
681 }
682 OpKind::JsonSet { .. }
683 | OpKind::JsonRemove { .. }
684 | OpKind::YamlSet { .. }
685 | OpKind::YamlRemove { .. }
686 | OpKind::TextReplaceAnchored { .. } => unreachable!("handled above"),
687 OpKind::TomlTransform { rule_id, args } => match rule_id.as_str() {
688 "ensure_workspace_resolver_v2" => {
689 doc["workspace"]["resolver"] = value("2");
690 }
691 "set_package_rust_version" => {
692 let rust_version = args
693 .as_ref()
694 .and_then(|v| v.get("rust_version"))
695 .and_then(|v| v.as_str())
696 .context("missing rust_version param")?;
697 doc["package"]["rust-version"] = value(rust_version);
698 }
699 "set_package_edition" => {
700 let edition = args
701 .as_ref()
702 .and_then(|v| v.get("edition"))
703 .and_then(|v| v.as_str())
704 .context("missing edition param")?;
705 doc["package"]["edition"] = value(edition);
706 }
707 "set_package_license" => {
708 let license = args
709 .as_ref()
710 .and_then(|v| v.get("license"))
711 .and_then(|v| v.as_str())
712 .context("missing license param")?;
713 doc["package"]["license"] = value(license);
714 }
715 "ensure_path_dep_has_version" => {
716 let args = args.as_ref().context("missing args")?;
717 let toml_path = args
718 .get("toml_path")
719 .and_then(|v| v.as_array())
720 .context("missing toml_path")?;
721 let toml_path: Vec<String> = toml_path
722 .iter()
723 .filter_map(|v| v.as_str().map(|s| s.to_string()))
724 .collect();
725 let dep_path = args
726 .get("dep_path")
727 .and_then(|v| v.as_str())
728 .context("missing dep_path")?;
729 let version = args
730 .get("version")
731 .and_then(|v| v.as_str())
732 .context("missing version param")?;
733
734 let dep_item = get_dep_item_mut(&mut doc, &toml_path)
735 .context("dependency not found at toml_path")?;
736
737 if let Some(inline) = dep_item.as_inline_table_mut() {
738 let current_path = inline.get("path").and_then(|v| v.as_str());
739 if current_path != Some(dep_path) {
740 return Ok(doc.to_string());
741 }
742 if inline.get("version").and_then(|v| v.as_str()).is_none() {
743 inline.insert("version", str_value(version));
744 }
745 } else if let Some(tbl) = dep_item.as_table_mut() {
746 let current_path = tbl
747 .get("path")
748 .and_then(|i| i.as_value())
749 .and_then(|v| v.as_str());
750 if current_path != Some(dep_path) {
751 return Ok(doc.to_string());
752 }
753 if tbl
754 .get("version")
755 .and_then(|i| i.as_value())
756 .and_then(|v| v.as_str())
757 .is_none()
758 {
759 tbl["version"] = value(version);
760 }
761 }
762 }
763 "ensure_workspace_dependency_version" => {
764 let args = args.as_ref().context("missing args")?;
765 let dep = args
766 .get("dep")
767 .and_then(|v| v.as_str())
768 .context("missing dep")?;
769 let version = args
770 .get("version")
771 .and_then(|v| v.as_str())
772 .context("missing version")?;
773
774 let ws_deps = &mut doc["workspace"]["dependencies"][dep];
775 if ws_deps.is_none() {
776 *ws_deps = value(version);
777 } else if let Some(existing_inline) = ws_deps.as_inline_table_mut() {
778 if existing_inline.get("path").is_none() && existing_inline.get("git").is_none()
779 {
780 existing_inline.insert("version", str_value(version));
781 }
782 } else if let Some(existing_tbl) = ws_deps.as_table_mut() {
783 if existing_tbl.get("path").is_none() && existing_tbl.get("git").is_none() {
784 existing_tbl["version"] = value(version);
785 }
786 } else if ws_deps.is_value() {
787 *ws_deps = value(version);
788 }
789 }
790 "use_workspace_dependency" => {
791 let args = args.as_ref().context("missing args")?;
792 let toml_path = args
793 .get("toml_path")
794 .and_then(|v| v.as_array())
795 .context("missing toml_path")?;
796 let toml_path: Vec<String> = toml_path
797 .iter()
798 .filter_map(|v| v.as_str().map(|s| s.to_string()))
799 .collect();
800
801 let preserved = args.get("preserved");
802 let mut inline = InlineTable::new();
803 inline.insert("workspace", bool_value(true));
804 if let Some(p) = preserved {
805 if let Some(pkg) = p.get("package").and_then(|v| v.as_str()) {
806 inline.insert("package", str_value(pkg));
807 }
808 if let Some(opt) = p.get("optional").and_then(|v| v.as_bool()) {
809 inline.insert("optional", bool_value(opt));
810 }
811 if let Some(df) = p.get("default_features").and_then(|v| v.as_bool()) {
812 inline.insert("default-features", bool_value(df));
813 }
814 if let Some(features) = p.get("features").and_then(|v| v.as_array()) {
815 let mut arr = toml_edit::Array::new();
816 for f in features {
817 if let Some(s) = f.as_str() {
818 arr.push(s);
819 }
820 }
821 inline.insert("features", toml_edit::Value::from(arr));
822 }
823 }
824
825 let dep_item = get_dep_item_mut(&mut doc, &toml_path)
826 .context("dependency not found at toml_path")?;
827 *dep_item = value(inline);
828 }
829 _ => {
830 }
832 },
833 }
834
835 Ok(doc.to_string())
836}
837
838pub fn execute_plan_from_contents(
847 before: &BTreeMap<Utf8PathBuf, String>,
848 plan: &BuildfixPlan,
849 opts: &ApplyOptions,
850) -> anyhow::Result<BTreeMap<Utf8PathBuf, String>> {
851 let mut current = before.clone();
852
853 for op in &plan.ops {
854 let resolved = resolve_op(op, opts);
855 if !resolved.allowed {
856 continue;
857 }
858
859 let file = Utf8PathBuf::from(&op.target.path);
860 let old = current.get(&file).cloned().unwrap_or_default();
861 let new = apply_op_to_content(&old, &resolved.kind)
862 .with_context(|| format!("apply op {} to {}", op.id, op.target.path))?;
863 current.insert(file, new);
864 }
865
866 let mut changed = BTreeMap::new();
868 for (path, new_content) in ¤t {
869 let old_content = before.get(path).map(|s| s.as_str()).unwrap_or("");
870 if new_content != old_content {
871 changed.insert(path.clone(), new_content.clone());
872 }
873 }
874
875 Ok(changed)
876}
877
878fn set_toml_path(doc: &mut DocumentMut, toml_path: &[String], value: serde_json::Value) {
879 if toml_path.is_empty() {
880 return;
881 }
882 let mut current = doc.as_table_mut();
883 for key in &toml_path[..toml_path.len() - 1] {
884 let entry = current.entry(key).or_insert(toml_edit::table());
885 if entry.as_table().is_none() {
886 *entry = toml_edit::table();
887 }
888 let Some(table) = entry.as_table_mut() else {
889 return;
890 };
891 current = table;
892 }
893 let last = &toml_path[toml_path.len() - 1];
894 current[last] = Item::Value(json_value_to_toml(value));
895}
896
897fn remove_toml_path(doc: &mut DocumentMut, toml_path: &[String]) {
898 if toml_path.is_empty() {
899 return;
900 }
901 let mut current = doc.as_table_mut();
902 for key in &toml_path[..toml_path.len() - 1] {
903 let Some(tbl) = current.get_mut(key).and_then(|i| i.as_table_mut()) else {
904 return;
905 };
906 current = tbl;
907 }
908 let last = &toml_path[toml_path.len() - 1];
909 current.remove(last);
910}
911
912fn apply_text_replace_anchored(
913 contents: &str,
914 find: &str,
915 replace: &str,
916 anchor_before: &[String],
917 anchor_after: &[String],
918 max_replacements: Option<u64>,
919) -> anyhow::Result<String> {
920 let limit = max_replacements.unwrap_or(1);
921 if limit == 0 {
922 anyhow::bail!("max_replacements must be >= 1");
923 }
924
925 let has_crlf = contents.contains("\r\n");
926 let mut lines: Vec<String> = contents.lines().map(|l| l.to_string()).collect();
927 let trailing_newline = contents.ends_with('\n');
928
929 let mut matches = Vec::new();
930
931 for idx in 0..lines.len() {
932 if lines[idx] != find {
933 continue;
934 }
935
936 if !before_context_matches(&lines, idx, anchor_before) {
937 continue;
938 }
939 if !after_context_matches(&lines, idx, anchor_after) {
940 continue;
941 }
942
943 matches.push(idx);
944 }
945
946 if matches.is_empty() {
947 return Ok(contents.to_string());
948 }
949
950 if matches.len() as u64 > limit {
951 anyhow::bail!(
952 "anchored replace matched {} lines, exceeding max_replacements {}",
953 matches.len(),
954 limit
955 );
956 }
957
958 for idx in matches {
959 lines[idx] = replace.to_string();
960 }
961
962 let line_ending = if has_crlf { "\r\n" } else { "\n" };
963 let mut out = lines.join(line_ending);
964 if trailing_newline {
965 out.push_str(line_ending);
966 }
967
968 Ok(out)
969}
970
971fn apply_json_set(
972 contents: &str,
973 json_path: &[String],
974 value: serde_json::Value,
975) -> anyhow::Result<String> {
976 let trailing_newline = contents.ends_with('\n');
977 let mut root = parse_or_init_json(contents)?;
978 set_json_path(&mut root, json_path, value);
979 serialize_json_with_newline(&root, trailing_newline)
980}
981
982fn apply_json_remove(contents: &str, json_path: &[String]) -> anyhow::Result<String> {
983 let trailing_newline = contents.ends_with('\n');
984 let mut root = parse_or_init_json(contents)?;
985 remove_json_path(&mut root, json_path);
986 serialize_json_with_newline(&root, trailing_newline)
987}
988
989fn parse_or_init_json(contents: &str) -> anyhow::Result<serde_json::Value> {
990 if contents.trim().is_empty() {
991 return Ok(serde_json::Value::Object(serde_json::Map::new()));
992 }
993 serde_json::from_str(contents).context("parse json")
994}
995
996fn set_json_path(root: &mut serde_json::Value, path: &[String], value: serde_json::Value) {
997 if path.is_empty() {
998 *root = value;
999 return;
1000 }
1001
1002 let mut current = root;
1003 for (idx, seg) in path.iter().enumerate() {
1004 let last = idx + 1 == path.len();
1005 let index_seg = parse_index_segment(seg);
1006
1007 if let Some(i) = index_seg {
1008 let default_next = if last {
1009 serde_json::Value::Null
1010 } else {
1011 default_json_container(&path[idx + 1])
1012 };
1013 let arr = ensure_json_array(current);
1014 while arr.len() <= i {
1015 arr.push(default_next.clone());
1016 }
1017 if last {
1018 arr[i] = value;
1019 return;
1020 }
1021 current = &mut arr[i];
1022 continue;
1023 }
1024
1025 if last {
1026 let obj = ensure_json_object(current);
1027 obj.insert(seg.clone(), value);
1028 return;
1029 }
1030
1031 let default_next = default_json_container(&path[idx + 1]);
1032 let obj = ensure_json_object(current);
1033 current = obj.entry(seg.clone()).or_insert(default_next);
1034 }
1035}
1036
1037fn remove_json_path(root: &mut serde_json::Value, path: &[String]) -> bool {
1038 if path.is_empty() {
1039 *root = serde_json::Value::Null;
1040 return true;
1041 }
1042
1043 let mut current = root;
1044 for seg in &path[..path.len() - 1] {
1045 if let Some(i) = parse_index_segment(seg) {
1046 let Some(arr) = current.as_array_mut() else {
1047 return false;
1048 };
1049 if i >= arr.len() {
1050 return false;
1051 }
1052 current = &mut arr[i];
1053 continue;
1054 }
1055
1056 let Some(obj) = current.as_object_mut() else {
1057 return false;
1058 };
1059 let Some(next) = obj.get_mut(seg) else {
1060 return false;
1061 };
1062 current = next;
1063 }
1064
1065 let last = &path[path.len() - 1];
1066 if let Some(i) = parse_index_segment(last) {
1067 let Some(arr) = current.as_array_mut() else {
1068 return false;
1069 };
1070 if i >= arr.len() {
1071 return false;
1072 }
1073 arr.remove(i);
1074 return true;
1075 }
1076
1077 let Some(obj) = current.as_object_mut() else {
1078 return false;
1079 };
1080 obj.remove(last).is_some()
1081}
1082
1083fn default_json_container(next_seg: &str) -> serde_json::Value {
1084 if parse_index_segment(next_seg).is_some() {
1085 serde_json::Value::Array(Vec::new())
1086 } else {
1087 serde_json::Value::Object(serde_json::Map::new())
1088 }
1089}
1090
1091fn ensure_json_object(
1092 value: &mut serde_json::Value,
1093) -> &mut serde_json::Map<String, serde_json::Value> {
1094 if !value.is_object() {
1095 *value = serde_json::Value::Object(serde_json::Map::new());
1096 }
1097 value.as_object_mut().expect("json object")
1098}
1099
1100fn ensure_json_array(value: &mut serde_json::Value) -> &mut Vec<serde_json::Value> {
1101 if !value.is_array() {
1102 *value = serde_json::Value::Array(Vec::new());
1103 }
1104 value.as_array_mut().expect("json array")
1105}
1106
1107fn serialize_json_with_newline(
1108 value: &serde_json::Value,
1109 trailing_newline: bool,
1110) -> anyhow::Result<String> {
1111 let mut out = serde_json::to_string_pretty(value).context("serialize json")?;
1112 if trailing_newline && !out.ends_with('\n') {
1113 out.push('\n');
1114 }
1115 Ok(out)
1116}
1117
1118fn apply_yaml_set(
1119 contents: &str,
1120 yaml_path: &[String],
1121 value: serde_json::Value,
1122) -> anyhow::Result<String> {
1123 let trailing_newline = contents.ends_with('\n');
1124 let mut root = parse_or_init_yaml(contents)?;
1125 let yaml_value = serde_yaml::to_value(value).context("convert value to yaml")?;
1126 set_yaml_path(&mut root, yaml_path, yaml_value);
1127 serialize_yaml_with_newline(&root, trailing_newline)
1128}
1129
1130fn apply_yaml_remove(contents: &str, yaml_path: &[String]) -> anyhow::Result<String> {
1131 let trailing_newline = contents.ends_with('\n');
1132 let mut root = parse_or_init_yaml(contents)?;
1133 remove_yaml_path(&mut root, yaml_path);
1134 serialize_yaml_with_newline(&root, trailing_newline)
1135}
1136
1137fn parse_or_init_yaml(contents: &str) -> anyhow::Result<serde_yaml::Value> {
1138 if contents.trim().is_empty() {
1139 return Ok(serde_yaml::Value::Mapping(serde_yaml::Mapping::new()));
1140 }
1141 serde_yaml::from_str(contents).context("parse yaml")
1142}
1143
1144fn set_yaml_path(root: &mut serde_yaml::Value, path: &[String], value: serde_yaml::Value) {
1145 if path.is_empty() {
1146 *root = value;
1147 return;
1148 }
1149
1150 let mut current = root;
1151 for (idx, seg) in path.iter().enumerate() {
1152 let last = idx + 1 == path.len();
1153 let index_seg = parse_index_segment(seg);
1154
1155 if let Some(i) = index_seg {
1156 let default_next = if last {
1157 serde_yaml::Value::Null
1158 } else {
1159 default_yaml_container(&path[idx + 1])
1160 };
1161 let seq = ensure_yaml_sequence(current);
1162 while seq.len() <= i {
1163 seq.push(default_next.clone());
1164 }
1165 if last {
1166 seq[i] = value;
1167 return;
1168 }
1169 current = &mut seq[i];
1170 continue;
1171 }
1172
1173 if last {
1174 let map = ensure_yaml_mapping(current);
1175 map.insert(yaml_key(seg), value);
1176 return;
1177 }
1178
1179 let default_next = default_yaml_container(&path[idx + 1]);
1180 let map = ensure_yaml_mapping(current);
1181 current = map.entry(yaml_key(seg)).or_insert(default_next);
1182 }
1183}
1184
1185fn remove_yaml_path(root: &mut serde_yaml::Value, path: &[String]) -> bool {
1186 if path.is_empty() {
1187 *root = serde_yaml::Value::Null;
1188 return true;
1189 }
1190
1191 let mut current = root;
1192 for seg in &path[..path.len() - 1] {
1193 if let Some(i) = parse_index_segment(seg) {
1194 let Some(seq) = current.as_sequence_mut() else {
1195 return false;
1196 };
1197 if i >= seq.len() {
1198 return false;
1199 }
1200 current = &mut seq[i];
1201 continue;
1202 }
1203
1204 let Some(map) = current.as_mapping_mut() else {
1205 return false;
1206 };
1207 let key = yaml_key(seg);
1208 let Some(next) = map.get_mut(&key) else {
1209 return false;
1210 };
1211 current = next;
1212 }
1213
1214 let last = &path[path.len() - 1];
1215 if let Some(i) = parse_index_segment(last) {
1216 let Some(seq) = current.as_sequence_mut() else {
1217 return false;
1218 };
1219 if i >= seq.len() {
1220 return false;
1221 }
1222 seq.remove(i);
1223 return true;
1224 }
1225
1226 let Some(map) = current.as_mapping_mut() else {
1227 return false;
1228 };
1229 map.remove(yaml_key(last)).is_some()
1230}
1231
1232fn default_yaml_container(next_seg: &str) -> serde_yaml::Value {
1233 if parse_index_segment(next_seg).is_some() {
1234 serde_yaml::Value::Sequence(Vec::new())
1235 } else {
1236 serde_yaml::Value::Mapping(serde_yaml::Mapping::new())
1237 }
1238}
1239
1240fn ensure_yaml_mapping(value: &mut serde_yaml::Value) -> &mut serde_yaml::Mapping {
1241 if !value.is_mapping() {
1242 *value = serde_yaml::Value::Mapping(serde_yaml::Mapping::new());
1243 }
1244 value.as_mapping_mut().expect("yaml mapping")
1245}
1246
1247fn ensure_yaml_sequence(value: &mut serde_yaml::Value) -> &mut Vec<serde_yaml::Value> {
1248 if !value.is_sequence() {
1249 *value = serde_yaml::Value::Sequence(Vec::new());
1250 }
1251 value.as_sequence_mut().expect("yaml sequence")
1252}
1253
1254fn yaml_key(key: &str) -> serde_yaml::Value {
1255 serde_yaml::Value::String(key.to_string())
1256}
1257
1258fn serialize_yaml_with_newline(
1259 value: &serde_yaml::Value,
1260 trailing_newline: bool,
1261) -> anyhow::Result<String> {
1262 let mut out = serde_yaml::to_string(value).context("serialize yaml")?;
1263 if trailing_newline {
1264 if !out.ends_with('\n') {
1265 out.push('\n');
1266 }
1267 } else {
1268 while out.ends_with('\n') {
1269 out.pop();
1270 }
1271 }
1272 Ok(out)
1273}
1274
1275fn parse_index_segment(seg: &str) -> Option<usize> {
1276 if seg.is_empty() {
1277 return None;
1278 }
1279 seg.parse::<usize>().ok()
1280}
1281
1282fn before_context_matches(lines: &[String], idx: usize, anchors: &[String]) -> bool {
1283 if anchors.is_empty() {
1284 return true;
1285 }
1286 if idx < anchors.len() {
1287 return false;
1288 }
1289
1290 let start = idx - anchors.len();
1291 anchors
1292 .iter()
1293 .enumerate()
1294 .all(|(offset, anchor)| lines[start + offset] == *anchor)
1295}
1296
1297fn after_context_matches(lines: &[String], idx: usize, anchors: &[String]) -> bool {
1298 if anchors.is_empty() {
1299 return true;
1300 }
1301 let start = idx + 1;
1302 if start + anchors.len() > lines.len() {
1303 return false;
1304 }
1305
1306 anchors
1307 .iter()
1308 .enumerate()
1309 .all(|(offset, anchor)| lines[start + offset] == *anchor)
1310}
1311
1312fn json_value_to_toml(json: serde_json::Value) -> toml_edit::Value {
1313 match json {
1314 serde_json::Value::String(s) => str_value(&s),
1315 serde_json::Value::Bool(b) => bool_value(b),
1316 serde_json::Value::Number(n) => {
1317 if let Some(i) = n.as_i64() {
1318 toml_edit::Value::from(i)
1319 } else if let Some(f) = n.as_f64() {
1320 toml_edit::Value::from(f)
1321 } else {
1322 toml_edit::Value::from(n.to_string())
1323 }
1324 }
1325 serde_json::Value::Array(arr) => {
1326 let mut out = toml_edit::Array::new();
1327 for v in arr {
1328 match v {
1329 serde_json::Value::String(s) => out.push(s.as_str()),
1330 serde_json::Value::Bool(b) => out.push(b),
1331 serde_json::Value::Number(n) => {
1332 if let Some(i) = n.as_i64() {
1333 out.push(i);
1334 } else if let Some(f) = n.as_f64() {
1335 out.push(f);
1336 }
1337 }
1338 _ => {}
1339 }
1340 }
1341 toml_edit::Value::from(out)
1342 }
1343 _ => toml_edit::Value::from(""),
1344 }
1345}
1346
1347fn str_value(s: &str) -> toml_edit::Value {
1348 toml_edit::Value::from(s)
1349}
1350
1351fn bool_value(b: bool) -> toml_edit::Value {
1352 toml_edit::Value::from(b)
1353}
1354
1355fn get_dep_item_mut<'a>(doc: &'a mut DocumentMut, toml_path: &[String]) -> Option<&'a mut Item> {
1356 if toml_path.len() < 2 {
1357 return None;
1358 }
1359
1360 if toml_path[0] == "target" {
1361 if toml_path.len() < 4 {
1362 return None;
1363 }
1364 let cfg = &toml_path[1];
1365 let table_name = &toml_path[2];
1366 let dep = &toml_path[3];
1367
1368 let target = doc.get_mut("target")?.as_table_mut()?;
1369 let cfg_tbl = target.get_mut(cfg)?.as_table_mut()?;
1370 let deps = cfg_tbl.get_mut(table_name)?.as_table_mut()?;
1371 return deps.get_mut(dep);
1372 }
1373
1374 let table_name = &toml_path[0];
1375 let dep = &toml_path[1];
1376 let deps = doc.get_mut(table_name)?.as_table_mut()?;
1377 deps.get_mut(dep)
1378}
1379
1380pub fn check_policy_block(apply: &BuildfixApply, was_dry_run: bool) -> Option<PolicyBlockError> {
1382 if was_dry_run {
1383 return None;
1384 }
1385
1386 if !apply.preconditions.verified {
1387 return Some(PolicyBlockError::PreconditionMismatch {
1388 message: "precondition mismatch".to_string(),
1389 });
1390 }
1391
1392 let blocked: Vec<&ApplyResult> = apply
1393 .results
1394 .iter()
1395 .filter(|r| r.status == ApplyStatus::Blocked)
1396 .collect();
1397
1398 if !blocked.is_empty() {
1399 let reasons: Vec<String> = blocked
1400 .iter()
1401 .filter_map(|r| r.blocked_reason.clone())
1402 .collect();
1403
1404 if reasons.iter().any(|r| r.contains("safety")) {
1405 return Some(PolicyBlockError::SafetyGateDenial {
1406 message: format!("{} op(s) blocked by safety gate", blocked.len()),
1407 });
1408 }
1409
1410 return Some(PolicyBlockError::PolicyDenial {
1411 message: format!("{} op(s) blocked by policy", blocked.len()),
1412 });
1413 }
1414
1415 if apply.summary.failed > 0 {
1416 return Some(PolicyBlockError::PreconditionMismatch {
1417 message: format!("{} op(s) failed", apply.summary.failed),
1418 });
1419 }
1420
1421 None
1422}