1mod error;
9
10pub use error::{EditError, EditResult, PolicyBlockError};
11
12use anyhow::Context;
13use buildfix_hash::sha256_hex;
14use buildfix_types::apply::{
15 ApplyFile, ApplyPreconditions, ApplyRepoInfo, ApplyResult, ApplyStatus, ApplySummary,
16 BuildfixApply, PlanRef, PreconditionMismatch,
17};
18use buildfix_types::ops::{OpKind, SafetyClass};
19use buildfix_types::plan::{BuildfixPlan, FilePrecondition, PlanOp};
20use buildfix_types::receipt::ToolInfo;
21use camino::{Utf8Path, Utf8PathBuf};
22use diffy::PatchFormatter;
23use fs_err as fs;
24use std::collections::{BTreeMap, BTreeSet, HashMap};
25use toml_edit::{DocumentMut, InlineTable, Item, value};
26use uuid::Uuid;
27
28#[derive(Debug, Clone, Default)]
29pub struct ApplyOptions {
30 pub dry_run: bool,
31 pub allow_guarded: bool,
32 pub allow_unsafe: bool,
33 pub backup_enabled: bool,
34 pub backup_dir: Option<Utf8PathBuf>,
36 pub backup_suffix: String,
38 pub params: HashMap<String, String>,
40}
41
42#[derive(Debug, Clone, Default)]
44pub struct AttachPreconditionsOptions {
45 pub include_git_head: bool,
47}
48
49pub fn get_head_sha(repo_root: &Utf8Path) -> anyhow::Result<String> {
51 let output = std::process::Command::new("git")
52 .arg("rev-parse")
53 .arg("HEAD")
54 .current_dir(repo_root)
55 .output()
56 .context("failed to run git rev-parse HEAD")?;
57
58 if !output.status.success() {
59 let stderr = String::from_utf8_lossy(&output.stderr);
60 anyhow::bail!("git rev-parse HEAD failed: {}", stderr.trim());
61 }
62
63 Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
64}
65
66pub fn is_working_tree_dirty(repo_root: &Utf8Path) -> anyhow::Result<bool> {
68 let status_output = std::process::Command::new("git")
69 .args(["status", "--porcelain"])
70 .current_dir(repo_root)
71 .output()
72 .context("failed to run git status")?;
73
74 if !status_output.status.success() {
75 let stderr = String::from_utf8_lossy(&status_output.stderr);
76 anyhow::bail!("git status failed: {}", stderr.trim());
77 }
78
79 Ok(!status_output.stdout.is_empty())
80}
81
82pub fn attach_preconditions(
86 repo_root: &Utf8Path,
87 plan: &mut BuildfixPlan,
88 opts: &AttachPreconditionsOptions,
89) -> anyhow::Result<()> {
90 let mut files = BTreeSet::new();
91 for op in &plan.ops {
92 files.insert(op.target.path.clone());
93 }
94
95 let mut pres = Vec::new();
96 for path in files {
97 let abs = abs_path(repo_root, Utf8Path::new(&path));
98 let bytes = fs::read(&abs).with_context(|| format!("read {}", abs))?;
99 let sha = sha256_hex(&bytes);
100 pres.push(FilePrecondition { path, sha256: sha });
101 }
102 plan.preconditions.files = pres;
103
104 if opts.include_git_head
105 && let Ok(sha) = get_head_sha(repo_root)
106 {
107 plan.preconditions.head_sha = Some(sha);
108 }
109
110 if let Ok(dirty) = is_working_tree_dirty(repo_root) {
111 plan.preconditions.dirty = Some(dirty);
112 }
113
114 Ok(())
115}
116
117pub fn preview_patch(
118 repo_root: &Utf8Path,
119 plan: &BuildfixPlan,
120 opts: &ApplyOptions,
121) -> anyhow::Result<String> {
122 let outcome = execute_plan(repo_root, plan, opts, false)?;
123 Ok(render_patch(&outcome.before, &outcome.after))
124}
125
126pub fn apply_plan(
128 repo_root: &Utf8Path,
129 plan: &BuildfixPlan,
130 tool: ToolInfo,
131 opts: &ApplyOptions,
132) -> anyhow::Result<(BuildfixApply, String)> {
133 let mut outcome = execute_plan(repo_root, plan, opts, true)?;
134 let patch = render_patch(&outcome.before, &outcome.after);
135
136 if !opts.dry_run && outcome.preconditions.verified {
137 let changed_files = changed_files(&outcome.before, &outcome.after);
138 if !changed_files.is_empty() {
139 if opts.backup_enabled {
140 create_backups(
141 repo_root,
142 &changed_files,
143 &outcome.before,
144 opts,
145 &mut outcome.results,
146 )?;
147 }
148 write_changed_files(repo_root, &changed_files, &outcome.after)?;
149 }
150 }
151
152 let repo_info = ApplyRepoInfo {
153 root: repo_root.to_string(),
154 head_sha_before: None,
155 head_sha_after: None,
156 dirty_before: None,
157 dirty_after: None,
158 };
159
160 let plan_ref = PlanRef {
161 path: "artifacts/buildfix/plan.json".to_string(),
162 sha256: None,
163 };
164
165 let mut apply = BuildfixApply::new(tool, repo_info, plan_ref);
166 apply.preconditions = outcome.preconditions;
167 apply.results = outcome.results;
168 apply.summary = outcome.summary;
169
170 Ok((apply, patch))
171}
172
173struct ExecuteOutcome {
174 before: BTreeMap<Utf8PathBuf, String>,
175 after: BTreeMap<Utf8PathBuf, String>,
176 results: Vec<ApplyResult>,
177 summary: ApplySummary,
178 preconditions: ApplyPreconditions,
179}
180
181fn execute_plan(
182 repo_root: &Utf8Path,
183 plan: &BuildfixPlan,
184 opts: &ApplyOptions,
185 verify_preconditions: bool,
186) -> anyhow::Result<ExecuteOutcome> {
187 let mut touched_files = BTreeSet::new();
188 let mut resolved_ops: Vec<ResolvedOp> = Vec::new();
189
190 for op in &plan.ops {
191 let resolved = resolve_op(op, opts);
192 if resolved.allowed {
193 touched_files.insert(Utf8PathBuf::from(&op.target.path));
194 }
195 resolved_ops.push(resolved);
196 }
197
198 let mut before: BTreeMap<Utf8PathBuf, String> = BTreeMap::new();
199 for p in &touched_files {
200 let abs = abs_path(repo_root, p);
201 let contents = fs::read_to_string(&abs).unwrap_or_default();
202 before.insert(p.clone(), contents);
203 }
204
205 let mut preconditions = ApplyPreconditions {
206 verified: true,
207 mismatches: vec![],
208 };
209
210 if verify_preconditions
211 && !check_preconditions(repo_root, plan, &touched_files, &mut preconditions)?
212 {
213 let mut results = Vec::new();
215 let mut summary = ApplySummary::default();
216
217 for resolved in &resolved_ops {
218 if !resolved.allowed {
219 continue;
220 }
221 summary.blocked += 1;
222 results.push(ApplyResult {
223 op_id: resolved.op.id.clone(),
224 status: ApplyStatus::Blocked,
225 message: Some("precondition mismatch".to_string()),
226 blocked_reason: Some("precondition mismatch".to_string()),
227 blocked_reason_token: Some(
228 buildfix_types::plan::blocked_tokens::PRECONDITION_MISMATCH.to_string(),
229 ),
230 files: vec![],
231 });
232 }
233
234 return Ok(ExecuteOutcome {
235 before: before.clone(),
236 after: before,
237 results,
238 summary,
239 preconditions,
240 });
241 }
242
243 let mut current = before.clone();
244 let mut results: Vec<ApplyResult> = Vec::new();
245 let mut summary = ApplySummary::default();
246
247 for resolved in &resolved_ops {
248 let op = resolved.op;
249
250 if !resolved.allowed {
251 let mut res = ApplyResult {
252 op_id: op.id.clone(),
253 status: ApplyStatus::Blocked,
254 message: None,
255 blocked_reason: resolved.blocked_reason.clone(),
256 blocked_reason_token: resolved.blocked_reason_token.clone(),
257 files: vec![],
258 };
259 if let Some(msg) = &resolved.blocked_message {
260 res.message = Some(msg.clone());
261 }
262 summary.blocked += 1;
263 results.push(res);
264 continue;
265 }
266
267 summary.attempted += 1;
268
269 let file = Utf8PathBuf::from(&op.target.path);
270 let old = current.get(&file).cloned().unwrap_or_default();
271
272 let new = apply_op_to_content(&old, &resolved.kind)
273 .with_context(|| format!("apply op {} to {}", op.id, op.target.path))?;
274
275 current.insert(file.clone(), new.clone());
276
277 let mut files = Vec::new();
278 if old != new {
279 files.push(ApplyFile {
280 path: op.target.path.clone(),
281 sha256_before: Some(sha256_hex(old.as_bytes())),
282 sha256_after: Some(sha256_hex(new.as_bytes())),
283 backup_path: None,
284 });
285 }
286
287 if opts.dry_run {
288 results.push(ApplyResult {
289 op_id: op.id.clone(),
290 status: ApplyStatus::Skipped,
291 message: Some("dry-run: not written".to_string()),
292 blocked_reason: None,
293 blocked_reason_token: None,
294 files,
295 });
296 } else {
297 summary.applied += 1;
298 results.push(ApplyResult {
299 op_id: op.id.clone(),
300 status: ApplyStatus::Applied,
301 message: None,
302 blocked_reason: None,
303 blocked_reason_token: None,
304 files,
305 });
306 }
307 }
308
309 summary.files_modified = changed_files(&before, ¤t).len() as u64;
310
311 Ok(ExecuteOutcome {
312 before,
313 after: current,
314 results,
315 summary,
316 preconditions,
317 })
318}
319
320struct ResolvedOp<'a> {
321 op: &'a PlanOp,
322 kind: OpKind,
323 allowed: bool,
324 blocked_reason: Option<String>,
325 blocked_reason_token: Option<String>,
326 blocked_message: Option<String>,
327}
328
329fn resolve_op<'a>(op: &'a PlanOp, opts: &ApplyOptions) -> ResolvedOp<'a> {
330 if op.blocked {
331 if !op.params_required.is_empty() {
332 let (kind, missing) = resolve_params(op, &opts.params);
333 if missing.is_empty() {
334 return ResolvedOp {
335 op,
336 kind,
337 allowed: allowed_by_safety(opts, op.safety),
338 blocked_reason: None,
339 blocked_reason_token: None,
340 blocked_message: None,
341 };
342 }
343 let blocked_reason = op
344 .blocked_reason
345 .clone()
346 .or(Some("missing params".to_string()));
347 return ResolvedOp {
348 op,
349 kind: op.kind.clone(),
350 allowed: false,
351 blocked_reason,
352 blocked_reason_token: op.blocked_reason_token.clone(),
353 blocked_message: None,
354 };
355 }
356
357 let blocked_reason = op.blocked_reason.clone().or(Some("blocked".to_string()));
358 return ResolvedOp {
359 op,
360 kind: op.kind.clone(),
361 allowed: false,
362 blocked_reason,
363 blocked_reason_token: op.blocked_reason_token.clone(),
364 blocked_message: None,
365 };
366 }
367
368 if !allowed_by_safety(opts, op.safety) {
369 use buildfix_types::plan::blocked_tokens;
370 let token = match op.safety {
371 SafetyClass::Guarded => blocked_tokens::SAFETY_GUARDED_NOT_ALLOWED,
372 SafetyClass::Unsafe => blocked_tokens::SAFETY_UNSAFE_NOT_ALLOWED,
373 _ => blocked_tokens::SAFETY_GUARDED_NOT_ALLOWED,
374 };
375 return ResolvedOp {
376 op,
377 kind: op.kind.clone(),
378 allowed: false,
379 blocked_reason: Some("safety gate".to_string()),
380 blocked_reason_token: Some(token.to_string()),
381 blocked_message: Some("safety class not allowed".to_string()),
382 };
383 }
384
385 let (kind, missing) = resolve_params(op, &opts.params);
386 if !missing.is_empty() {
387 return ResolvedOp {
388 op,
389 kind,
390 allowed: false,
391 blocked_reason: Some(format!("missing params: {}", missing.join(", "))),
392 blocked_reason_token: Some(
393 buildfix_types::plan::blocked_tokens::MISSING_PARAMS.to_string(),
394 ),
395 blocked_message: None,
396 };
397 }
398
399 ResolvedOp {
400 op,
401 kind,
402 allowed: true,
403 blocked_reason: None,
404 blocked_reason_token: None,
405 blocked_message: None,
406 }
407}
408
409fn resolve_params(op: &PlanOp, params: &HashMap<String, String>) -> (OpKind, Vec<String>) {
410 if op.params_required.is_empty() {
411 return (op.kind.clone(), Vec::new());
412 }
413
414 let mut missing = Vec::new();
415 let mut kind = op.kind.clone();
416
417 for key in &op.params_required {
418 if let Some(value) = params.get(key) {
419 fill_op_param(&mut kind, key, value);
420 } else {
421 missing.push(key.clone());
422 }
423 }
424
425 (kind, missing)
426}
427
428fn fill_op_param(kind: &mut OpKind, key: &str, value: &str) {
429 let OpKind::TomlTransform { rule_id, args } = kind else {
430 return;
431 };
432
433 let mut map = match args.take() {
434 Some(serde_json::Value::Object(m)) => m,
435 _ => serde_json::Map::new(),
436 };
437
438 match (rule_id.as_str(), key) {
439 ("set_package_rust_version", "rust_version") => {
440 map.insert(
441 key.to_string(),
442 serde_json::Value::String(value.to_string()),
443 );
444 }
445 ("set_package_license", "license") => {
446 map.insert(
447 key.to_string(),
448 serde_json::Value::String(value.to_string()),
449 );
450 }
451 ("ensure_path_dep_has_version", "version") => {
452 map.insert(
453 key.to_string(),
454 serde_json::Value::String(value.to_string()),
455 );
456 }
457 _ => {
458 map.insert(
459 key.to_string(),
460 serde_json::Value::String(value.to_string()),
461 );
462 }
463 }
464
465 *args = Some(serde_json::Value::Object(map));
466}
467
468fn check_preconditions(
469 repo_root: &Utf8Path,
470 plan: &BuildfixPlan,
471 touched_files: &BTreeSet<Utf8PathBuf>,
472 preconditions: &mut ApplyPreconditions,
473) -> anyhow::Result<bool> {
474 let file_map = plan
475 .preconditions
476 .files
477 .iter()
478 .map(|f| (f.path.clone(), f.sha256.clone()))
479 .collect::<BTreeMap<_, _>>();
480
481 for file in touched_files {
482 let Some(expected) = file_map.get(&file.to_string()) else {
483 continue;
484 };
485 let abs = abs_path(repo_root, file);
486 let bytes = fs::read(&abs).with_context(|| format!("read {}", abs))?;
487 let actual = sha256_hex(&bytes);
488 if &actual != expected {
489 preconditions.verified = false;
490 preconditions.mismatches.push(PreconditionMismatch {
491 path: file.to_string(),
492 expected: expected.clone(),
493 actual,
494 });
495 }
496 }
497
498 if let Some(expected) = &plan.preconditions.head_sha
499 && let Ok(actual) = get_head_sha(repo_root)
500 && &actual != expected
501 {
502 preconditions.verified = false;
503 preconditions.mismatches.push(PreconditionMismatch {
504 path: "<git_head>".to_string(),
505 expected: expected.clone(),
506 actual,
507 });
508 }
509
510 Ok(preconditions.verified)
511}
512
513fn changed_files(
514 before: &BTreeMap<Utf8PathBuf, String>,
515 after: &BTreeMap<Utf8PathBuf, String>,
516) -> BTreeSet<Utf8PathBuf> {
517 let mut changed = BTreeSet::new();
518 for (path, old) in before {
519 let new = after.get(path).unwrap_or(old);
520 if old != new {
521 changed.insert(path.clone());
522 }
523 }
524 changed
525}
526
527fn create_backups(
528 _repo_root: &Utf8Path,
529 changed_files: &BTreeSet<Utf8PathBuf>,
530 before: &BTreeMap<Utf8PathBuf, String>,
531 opts: &ApplyOptions,
532 results: &mut [ApplyResult],
533) -> anyhow::Result<()> {
534 let Some(ref backup_dir) = opts.backup_dir else {
535 return Ok(());
536 };
537
538 for path in changed_files {
539 let contents = before.get(path).cloned().unwrap_or_default();
540 let backup_rel = format!("{}{}", path, opts.backup_suffix);
541 let backup_path = backup_dir.join(backup_rel);
542
543 if let Some(parent) = backup_path.parent() {
544 fs::create_dir_all(parent).with_context(|| format!("create backup dir {}", parent))?;
545 }
546
547 fs::write(&backup_path, &contents)
548 .with_context(|| format!("write backup {}", backup_path))?;
549
550 for result in results.iter_mut() {
552 for file in &mut result.files {
553 if file.path == *path {
554 file.backup_path = Some(backup_path.to_string());
555 }
556 }
557 }
558 }
559
560 Ok(())
561}
562
563fn write_changed_files(
564 repo_root: &Utf8Path,
565 changed_files: &BTreeSet<Utf8PathBuf>,
566 after: &BTreeMap<Utf8PathBuf, String>,
567) -> anyhow::Result<()> {
568 for path in changed_files {
569 let abs = abs_path(repo_root, path);
570 let new_contents = after.get(path).cloned().unwrap_or_default();
571 write_atomic(&abs, &new_contents)?;
572 }
573 Ok(())
574}
575
576fn write_atomic(path: &Utf8Path, contents: &str) -> anyhow::Result<()> {
577 let parent = path.parent().unwrap_or_else(|| Utf8Path::new("."));
578 let tmp_name = format!(
579 ".buildfix-tmp-{}",
580 Uuid::new_v4().to_string().replace('-', "")
581 );
582 let tmp_path = parent.join(tmp_name);
583 fs::write(&tmp_path, contents).with_context(|| format!("write {}", tmp_path))?;
584 if path.exists() {
585 let _ = fs::remove_file(path);
586 }
587 fs::rename(&tmp_path, path).with_context(|| format!("rename {} -> {}", tmp_path, path))?;
588 Ok(())
589}
590
591fn allowed_by_safety(opts: &ApplyOptions, safety: SafetyClass) -> bool {
592 match safety {
593 SafetyClass::Safe => true,
594 SafetyClass::Guarded => opts.allow_guarded,
595 SafetyClass::Unsafe => opts.allow_unsafe,
596 }
597}
598
599fn abs_path(repo_root: &Utf8Path, rel: &Utf8Path) -> Utf8PathBuf {
600 if rel.is_absolute() {
601 rel.to_path_buf()
602 } else {
603 repo_root.join(rel)
604 }
605}
606
607fn render_patch(
608 before: &BTreeMap<Utf8PathBuf, String>,
609 after: &BTreeMap<Utf8PathBuf, String>,
610) -> String {
611 let mut out = String::new();
612 let formatter = PatchFormatter::new();
613
614 for (path, old) in before {
615 let new = after.get(path).unwrap_or(old);
616 if old == new {
617 continue;
618 }
619
620 out.push_str(&format!("diff --git a/{0} b/{0}\n", path));
621 out.push_str(&format!("--- a/{0}\n+++ b/{0}\n", path));
622
623 let patch = diffy::create_patch(old, new);
624 out.push_str(&formatter.fmt_patch(&patch).to_string());
625 if !out.ends_with('\n') {
626 out.push('\n');
627 }
628 }
629
630 out
631}
632
633pub fn apply_op_to_content(contents: &str, kind: &OpKind) -> anyhow::Result<String> {
639 match kind {
640 OpKind::JsonSet { json_path, value } => {
641 return apply_json_set(contents, json_path, value.clone());
642 }
643 OpKind::JsonRemove { json_path } => {
644 return apply_json_remove(contents, json_path);
645 }
646 OpKind::YamlSet { yaml_path, value } => {
647 return apply_yaml_set(contents, yaml_path, value.clone());
648 }
649 OpKind::YamlRemove { yaml_path } => {
650 return apply_yaml_remove(contents, yaml_path);
651 }
652 OpKind::TextReplaceAnchored {
653 find,
654 replace,
655 anchor_before,
656 anchor_after,
657 max_replacements,
658 } => {
659 return apply_text_replace_anchored(
660 contents,
661 find,
662 replace,
663 anchor_before,
664 anchor_after,
665 *max_replacements,
666 );
667 }
668 _ => {}
669 }
670
671 let mut doc = contents
672 .parse::<DocumentMut>()
673 .unwrap_or_else(|_| DocumentMut::new());
674
675 match kind {
676 OpKind::TomlSet { toml_path, value } => {
677 set_toml_path(&mut doc, toml_path, value.clone());
678 }
679 OpKind::TomlRemove { toml_path } => {
680 remove_toml_path(&mut doc, toml_path);
681 }
682 OpKind::JsonSet { .. }
683 | OpKind::JsonRemove { .. }
684 | OpKind::YamlSet { .. }
685 | OpKind::YamlRemove { .. }
686 | OpKind::TextReplaceAnchored { .. } => {
687 anyhow::bail!(
688 "internal error: non-TOML operation should have been handled in earlier match branch"
689 )
690 }
691 OpKind::TomlTransform { rule_id, args } => match rule_id.as_str() {
692 "ensure_workspace_resolver_v2" => {
693 doc["workspace"]["resolver"] = value("2");
694 }
695 "set_package_rust_version" => {
696 let rust_version = args
697 .as_ref()
698 .and_then(|v| v.get("rust_version"))
699 .and_then(|v| v.as_str())
700 .context("missing rust_version param")?;
701 doc["package"]["rust-version"] = value(rust_version);
702 }
703 "set_package_edition" => {
704 let edition = args
705 .as_ref()
706 .and_then(|v| v.get("edition"))
707 .and_then(|v| v.as_str())
708 .context("missing edition param")?;
709 doc["package"]["edition"] = value(edition);
710 }
711 "set_package_license" => {
712 let license = args
713 .as_ref()
714 .and_then(|v| v.get("license"))
715 .and_then(|v| v.as_str())
716 .context("missing license param")?;
717 doc["package"]["license"] = value(license);
718 }
719 "ensure_path_dep_has_version" => {
720 let args = args.as_ref().context("missing args")?;
721 let toml_path = args
722 .get("toml_path")
723 .and_then(|v| v.as_array())
724 .context("missing toml_path")?;
725 let toml_path: Vec<String> = toml_path
726 .iter()
727 .filter_map(|v| v.as_str().map(|s| s.to_string()))
728 .collect();
729 let dep_path = args
730 .get("dep_path")
731 .and_then(|v| v.as_str())
732 .context("missing dep_path")?;
733 let version = args
734 .get("version")
735 .and_then(|v| v.as_str())
736 .context("missing version param")?;
737
738 let dep_item = get_dep_item_mut(&mut doc, &toml_path)
739 .context("dependency not found at toml_path")?;
740
741 if let Some(inline) = dep_item.as_inline_table_mut() {
742 let current_path = inline.get("path").and_then(|v| v.as_str());
743 if current_path != Some(dep_path) {
744 return Ok(doc.to_string());
745 }
746 if inline.get("version").and_then(|v| v.as_str()).is_none() {
747 inline.insert("version", str_value(version));
748 }
749 } else if let Some(tbl) = dep_item.as_table_mut() {
750 let current_path = tbl
751 .get("path")
752 .and_then(|i| i.as_value())
753 .and_then(|v| v.as_str());
754 if current_path != Some(dep_path) {
755 return Ok(doc.to_string());
756 }
757 if tbl
758 .get("version")
759 .and_then(|i| i.as_value())
760 .and_then(|v| v.as_str())
761 .is_none()
762 {
763 tbl["version"] = value(version);
764 }
765 }
766 }
767 "ensure_workspace_dependency_version" => {
768 let args = args.as_ref().context("missing args")?;
769 let dep = args
770 .get("dep")
771 .and_then(|v| v.as_str())
772 .context("missing dep")?;
773 let version = args
774 .get("version")
775 .and_then(|v| v.as_str())
776 .context("missing version")?;
777
778 let ws_deps = &mut doc["workspace"]["dependencies"][dep];
779 if ws_deps.is_none() {
780 *ws_deps = value(version);
781 } else if let Some(existing_inline) = ws_deps.as_inline_table_mut() {
782 if existing_inline.get("path").is_none() && existing_inline.get("git").is_none()
783 {
784 existing_inline.insert("version", str_value(version));
785 }
786 } else if let Some(existing_tbl) = ws_deps.as_table_mut() {
787 if existing_tbl.get("path").is_none() && existing_tbl.get("git").is_none() {
788 existing_tbl["version"] = value(version);
789 }
790 } else if ws_deps.is_value() {
791 *ws_deps = value(version);
792 }
793 }
794 "use_workspace_dependency" => {
795 let args = args.as_ref().context("missing args")?;
796 let toml_path = args
797 .get("toml_path")
798 .and_then(|v| v.as_array())
799 .context("missing toml_path")?;
800 let toml_path: Vec<String> = toml_path
801 .iter()
802 .filter_map(|v| v.as_str().map(|s| s.to_string()))
803 .collect();
804
805 let preserved = args.get("preserved");
806 let mut inline = InlineTable::new();
807 inline.insert("workspace", bool_value(true));
808 if let Some(p) = preserved {
809 if let Some(pkg) = p.get("package").and_then(|v| v.as_str()) {
810 inline.insert("package", str_value(pkg));
811 }
812 if let Some(opt) = p.get("optional").and_then(|v| v.as_bool()) {
813 inline.insert("optional", bool_value(opt));
814 }
815 if let Some(df) = p.get("default_features").and_then(|v| v.as_bool()) {
816 inline.insert("default-features", bool_value(df));
817 }
818 if let Some(features) = p.get("features").and_then(|v| v.as_array()) {
819 let mut arr = toml_edit::Array::new();
820 for f in features {
821 if let Some(s) = f.as_str() {
822 arr.push(s);
823 }
824 }
825 inline.insert("features", toml_edit::Value::from(arr));
826 }
827 }
828
829 let dep_item = get_dep_item_mut(&mut doc, &toml_path)
830 .context("dependency not found at toml_path")?;
831 *dep_item = value(inline);
832 }
833 _ => {
834 }
836 },
837 }
838
839 Ok(doc.to_string())
840}
841
842pub fn execute_plan_from_contents(
851 before: &BTreeMap<Utf8PathBuf, String>,
852 plan: &BuildfixPlan,
853 opts: &ApplyOptions,
854) -> anyhow::Result<BTreeMap<Utf8PathBuf, String>> {
855 let mut current = before.clone();
856
857 for op in &plan.ops {
858 let resolved = resolve_op(op, opts);
859 if !resolved.allowed {
860 continue;
861 }
862
863 let file = Utf8PathBuf::from(&op.target.path);
864 let old = current.get(&file).cloned().unwrap_or_default();
865 let new = apply_op_to_content(&old, &resolved.kind)
866 .with_context(|| format!("apply op {} to {}", op.id, op.target.path))?;
867 current.insert(file, new);
868 }
869
870 let mut changed = BTreeMap::new();
872 for (path, new_content) in ¤t {
873 let old_content = before.get(path).map(|s| s.as_str()).unwrap_or("");
874 if new_content != old_content {
875 changed.insert(path.clone(), new_content.clone());
876 }
877 }
878
879 Ok(changed)
880}
881
882fn set_toml_path(doc: &mut DocumentMut, toml_path: &[String], value: serde_json::Value) {
883 if toml_path.is_empty() {
884 return;
885 }
886 let mut current = doc.as_table_mut();
887 for key in &toml_path[..toml_path.len() - 1] {
888 let entry = current.entry(key).or_insert(toml_edit::table());
889 if entry.as_table().is_none() {
890 *entry = toml_edit::table();
891 }
892 let Some(table) = entry.as_table_mut() else {
893 return;
894 };
895 current = table;
896 }
897 let last = &toml_path[toml_path.len() - 1];
898 current[last] = Item::Value(json_value_to_toml(value));
899}
900
901fn remove_toml_path(doc: &mut DocumentMut, toml_path: &[String]) {
902 if toml_path.is_empty() {
903 return;
904 }
905 let mut current = doc.as_table_mut();
906 for key in &toml_path[..toml_path.len() - 1] {
907 let Some(tbl) = current.get_mut(key).and_then(|i| i.as_table_mut()) else {
908 return;
909 };
910 current = tbl;
911 }
912 let last = &toml_path[toml_path.len() - 1];
913 current.remove(last);
914}
915
916fn apply_text_replace_anchored(
917 contents: &str,
918 find: &str,
919 replace: &str,
920 anchor_before: &[String],
921 anchor_after: &[String],
922 max_replacements: Option<u64>,
923) -> anyhow::Result<String> {
924 let limit = max_replacements.unwrap_or(1);
925 if limit == 0 {
926 anyhow::bail!("max_replacements must be >= 1");
927 }
928
929 let has_crlf = contents.contains("\r\n");
930 let mut lines: Vec<String> = contents.lines().map(|l| l.to_string()).collect();
931 let trailing_newline = contents.ends_with('\n');
932
933 let mut matches = Vec::new();
934
935 for idx in 0..lines.len() {
936 if lines[idx] != find {
937 continue;
938 }
939
940 if !before_context_matches(&lines, idx, anchor_before) {
941 continue;
942 }
943 if !after_context_matches(&lines, idx, anchor_after) {
944 continue;
945 }
946
947 matches.push(idx);
948 }
949
950 if matches.is_empty() {
951 return Ok(contents.to_string());
952 }
953
954 if matches.len() as u64 > limit {
955 anyhow::bail!(
956 "anchored replace matched {} lines, exceeding max_replacements {}",
957 matches.len(),
958 limit
959 );
960 }
961
962 for idx in matches {
963 lines[idx] = replace.to_string();
964 }
965
966 let line_ending = if has_crlf { "\r\n" } else { "\n" };
967 let mut out = lines.join(line_ending);
968 if trailing_newline {
969 out.push_str(line_ending);
970 }
971
972 Ok(out)
973}
974
975fn apply_json_set(
976 contents: &str,
977 json_path: &[String],
978 value: serde_json::Value,
979) -> anyhow::Result<String> {
980 let trailing_newline = contents.ends_with('\n');
981 let mut root = parse_or_init_json(contents)?;
982 set_json_path(&mut root, json_path, value);
983 serialize_json_with_newline(&root, trailing_newline)
984}
985
986fn apply_json_remove(contents: &str, json_path: &[String]) -> anyhow::Result<String> {
987 let trailing_newline = contents.ends_with('\n');
988 let mut root = parse_or_init_json(contents)?;
989 remove_json_path(&mut root, json_path);
990 serialize_json_with_newline(&root, trailing_newline)
991}
992
993fn parse_or_init_json(contents: &str) -> anyhow::Result<serde_json::Value> {
994 if contents.trim().is_empty() {
995 return Ok(serde_json::Value::Object(serde_json::Map::new()));
996 }
997 serde_json::from_str(contents).context("parse json")
998}
999
1000fn set_json_path(root: &mut serde_json::Value, path: &[String], value: serde_json::Value) {
1001 if path.is_empty() {
1002 *root = value;
1003 return;
1004 }
1005
1006 let mut current = root;
1007 for (idx, seg) in path.iter().enumerate() {
1008 let last = idx + 1 == path.len();
1009 let index_seg = parse_index_segment(seg);
1010
1011 if let Some(i) = index_seg {
1012 let default_next = if last {
1013 serde_json::Value::Null
1014 } else {
1015 default_json_container(&path[idx + 1])
1016 };
1017 let arr = ensure_json_array(current);
1018 while arr.len() <= i {
1019 arr.push(default_next.clone());
1020 }
1021 if last {
1022 arr[i] = value;
1023 return;
1024 }
1025 current = &mut arr[i];
1026 continue;
1027 }
1028
1029 if last {
1030 let obj = ensure_json_object(current);
1031 obj.insert(seg.clone(), value);
1032 return;
1033 }
1034
1035 let default_next = default_json_container(&path[idx + 1]);
1036 let obj = ensure_json_object(current);
1037 current = obj.entry(seg.clone()).or_insert(default_next);
1038 }
1039}
1040
1041fn remove_json_path(root: &mut serde_json::Value, path: &[String]) -> bool {
1042 if path.is_empty() {
1043 *root = serde_json::Value::Null;
1044 return true;
1045 }
1046
1047 let mut current = root;
1048 for seg in &path[..path.len() - 1] {
1049 if let Some(i) = parse_index_segment(seg) {
1050 let Some(arr) = current.as_array_mut() else {
1051 return false;
1052 };
1053 if i >= arr.len() {
1054 return false;
1055 }
1056 current = &mut arr[i];
1057 continue;
1058 }
1059
1060 let Some(obj) = current.as_object_mut() else {
1061 return false;
1062 };
1063 let Some(next) = obj.get_mut(seg) else {
1064 return false;
1065 };
1066 current = next;
1067 }
1068
1069 let last = &path[path.len() - 1];
1070 if let Some(i) = parse_index_segment(last) {
1071 let Some(arr) = current.as_array_mut() else {
1072 return false;
1073 };
1074 if i >= arr.len() {
1075 return false;
1076 }
1077 arr.remove(i);
1078 return true;
1079 }
1080
1081 let Some(obj) = current.as_object_mut() else {
1082 return false;
1083 };
1084 obj.remove(last).is_some()
1085}
1086
1087fn default_json_container(next_seg: &str) -> serde_json::Value {
1088 if parse_index_segment(next_seg).is_some() {
1089 serde_json::Value::Array(Vec::new())
1090 } else {
1091 serde_json::Value::Object(serde_json::Map::new())
1092 }
1093}
1094
1095fn ensure_json_object(
1096 value: &mut serde_json::Value,
1097) -> &mut serde_json::Map<String, serde_json::Value> {
1098 if !value.is_object() {
1099 *value = serde_json::Value::Object(serde_json::Map::new());
1100 }
1101 value.as_object_mut().expect("json object")
1102}
1103
1104fn ensure_json_array(value: &mut serde_json::Value) -> &mut Vec<serde_json::Value> {
1105 if !value.is_array() {
1106 *value = serde_json::Value::Array(Vec::new());
1107 }
1108 value.as_array_mut().expect("json array")
1109}
1110
1111fn serialize_json_with_newline(
1112 value: &serde_json::Value,
1113 trailing_newline: bool,
1114) -> anyhow::Result<String> {
1115 let mut out = serde_json::to_string_pretty(value).context("serialize json")?;
1116 if trailing_newline && !out.ends_with('\n') {
1117 out.push('\n');
1118 }
1119 Ok(out)
1120}
1121
1122fn apply_yaml_set(
1123 contents: &str,
1124 yaml_path: &[String],
1125 value: serde_json::Value,
1126) -> anyhow::Result<String> {
1127 let trailing_newline = contents.ends_with('\n');
1128 let mut root = parse_or_init_yaml(contents)?;
1129 let yaml_value = serde_yaml::to_value(value).context("convert value to yaml")?;
1130 set_yaml_path(&mut root, yaml_path, yaml_value);
1131 serialize_yaml_with_newline(&root, trailing_newline)
1132}
1133
1134fn apply_yaml_remove(contents: &str, yaml_path: &[String]) -> anyhow::Result<String> {
1135 let trailing_newline = contents.ends_with('\n');
1136 let mut root = parse_or_init_yaml(contents)?;
1137 remove_yaml_path(&mut root, yaml_path);
1138 serialize_yaml_with_newline(&root, trailing_newline)
1139}
1140
1141fn parse_or_init_yaml(contents: &str) -> anyhow::Result<serde_yaml::Value> {
1142 if contents.trim().is_empty() {
1143 return Ok(serde_yaml::Value::Mapping(serde_yaml::Mapping::new()));
1144 }
1145 serde_yaml::from_str(contents).context("parse yaml")
1146}
1147
1148fn set_yaml_path(root: &mut serde_yaml::Value, path: &[String], value: serde_yaml::Value) {
1149 if path.is_empty() {
1150 *root = value;
1151 return;
1152 }
1153
1154 let mut current = root;
1155 for (idx, seg) in path.iter().enumerate() {
1156 let last = idx + 1 == path.len();
1157 let index_seg = parse_index_segment(seg);
1158
1159 if let Some(i) = index_seg {
1160 let default_next = if last {
1161 serde_yaml::Value::Null
1162 } else {
1163 default_yaml_container(&path[idx + 1])
1164 };
1165 let seq = ensure_yaml_sequence(current);
1166 while seq.len() <= i {
1167 seq.push(default_next.clone());
1168 }
1169 if last {
1170 seq[i] = value;
1171 return;
1172 }
1173 current = &mut seq[i];
1174 continue;
1175 }
1176
1177 if last {
1178 let map = ensure_yaml_mapping(current);
1179 map.insert(yaml_key(seg), value);
1180 return;
1181 }
1182
1183 let default_next = default_yaml_container(&path[idx + 1]);
1184 let map = ensure_yaml_mapping(current);
1185 current = map.entry(yaml_key(seg)).or_insert(default_next);
1186 }
1187}
1188
1189fn remove_yaml_path(root: &mut serde_yaml::Value, path: &[String]) -> bool {
1190 if path.is_empty() {
1191 *root = serde_yaml::Value::Null;
1192 return true;
1193 }
1194
1195 let mut current = root;
1196 for seg in &path[..path.len() - 1] {
1197 if let Some(i) = parse_index_segment(seg) {
1198 let Some(seq) = current.as_sequence_mut() else {
1199 return false;
1200 };
1201 if i >= seq.len() {
1202 return false;
1203 }
1204 current = &mut seq[i];
1205 continue;
1206 }
1207
1208 let Some(map) = current.as_mapping_mut() else {
1209 return false;
1210 };
1211 let key = yaml_key(seg);
1212 let Some(next) = map.get_mut(&key) else {
1213 return false;
1214 };
1215 current = next;
1216 }
1217
1218 let last = &path[path.len() - 1];
1219 if let Some(i) = parse_index_segment(last) {
1220 let Some(seq) = current.as_sequence_mut() else {
1221 return false;
1222 };
1223 if i >= seq.len() {
1224 return false;
1225 }
1226 seq.remove(i);
1227 return true;
1228 }
1229
1230 let Some(map) = current.as_mapping_mut() else {
1231 return false;
1232 };
1233 map.remove(yaml_key(last)).is_some()
1234}
1235
1236fn default_yaml_container(next_seg: &str) -> serde_yaml::Value {
1237 if parse_index_segment(next_seg).is_some() {
1238 serde_yaml::Value::Sequence(Vec::new())
1239 } else {
1240 serde_yaml::Value::Mapping(serde_yaml::Mapping::new())
1241 }
1242}
1243
1244fn ensure_yaml_mapping(value: &mut serde_yaml::Value) -> &mut serde_yaml::Mapping {
1245 if !value.is_mapping() {
1246 *value = serde_yaml::Value::Mapping(serde_yaml::Mapping::new());
1247 }
1248 value.as_mapping_mut().expect("yaml mapping")
1249}
1250
1251fn ensure_yaml_sequence(value: &mut serde_yaml::Value) -> &mut Vec<serde_yaml::Value> {
1252 if !value.is_sequence() {
1253 *value = serde_yaml::Value::Sequence(Vec::new());
1254 }
1255 value.as_sequence_mut().expect("yaml sequence")
1256}
1257
1258fn yaml_key(key: &str) -> serde_yaml::Value {
1259 serde_yaml::Value::String(key.to_string())
1260}
1261
1262fn serialize_yaml_with_newline(
1263 value: &serde_yaml::Value,
1264 trailing_newline: bool,
1265) -> anyhow::Result<String> {
1266 let mut out = serde_yaml::to_string(value).context("serialize yaml")?;
1267 if trailing_newline {
1268 if !out.ends_with('\n') {
1269 out.push('\n');
1270 }
1271 } else {
1272 while out.ends_with('\n') {
1273 out.pop();
1274 }
1275 }
1276 Ok(out)
1277}
1278
1279fn parse_index_segment(seg: &str) -> Option<usize> {
1280 if seg.is_empty() {
1281 return None;
1282 }
1283 seg.parse::<usize>().ok()
1284}
1285
1286fn before_context_matches(lines: &[String], idx: usize, anchors: &[String]) -> bool {
1287 if anchors.is_empty() {
1288 return true;
1289 }
1290 if idx < anchors.len() {
1291 return false;
1292 }
1293
1294 let start = idx - anchors.len();
1295 anchors
1296 .iter()
1297 .enumerate()
1298 .all(|(offset, anchor)| lines[start + offset] == *anchor)
1299}
1300
1301fn after_context_matches(lines: &[String], idx: usize, anchors: &[String]) -> bool {
1302 if anchors.is_empty() {
1303 return true;
1304 }
1305 let start = idx + 1;
1306 if start + anchors.len() > lines.len() {
1307 return false;
1308 }
1309
1310 anchors
1311 .iter()
1312 .enumerate()
1313 .all(|(offset, anchor)| lines[start + offset] == *anchor)
1314}
1315
1316fn json_value_to_toml(json: serde_json::Value) -> toml_edit::Value {
1317 match json {
1318 serde_json::Value::String(s) => str_value(&s),
1319 serde_json::Value::Bool(b) => bool_value(b),
1320 serde_json::Value::Number(n) => {
1321 if let Some(i) = n.as_i64() {
1322 toml_edit::Value::from(i)
1323 } else if let Some(f) = n.as_f64() {
1324 toml_edit::Value::from(f)
1325 } else {
1326 toml_edit::Value::from(n.to_string())
1327 }
1328 }
1329 serde_json::Value::Array(arr) => {
1330 let mut out = toml_edit::Array::new();
1331 for v in arr {
1332 match v {
1333 serde_json::Value::String(s) => out.push(s.as_str()),
1334 serde_json::Value::Bool(b) => out.push(b),
1335 serde_json::Value::Number(n) => {
1336 if let Some(i) = n.as_i64() {
1337 out.push(i);
1338 } else if let Some(f) = n.as_f64() {
1339 out.push(f);
1340 }
1341 }
1342 _ => {}
1343 }
1344 }
1345 toml_edit::Value::from(out)
1346 }
1347 _ => toml_edit::Value::from(""),
1348 }
1349}
1350
1351fn str_value(s: &str) -> toml_edit::Value {
1352 toml_edit::Value::from(s)
1353}
1354
1355fn bool_value(b: bool) -> toml_edit::Value {
1356 toml_edit::Value::from(b)
1357}
1358
1359fn get_dep_item_mut<'a>(doc: &'a mut DocumentMut, toml_path: &[String]) -> Option<&'a mut Item> {
1360 if toml_path.len() < 2 {
1361 return None;
1362 }
1363
1364 if toml_path[0] == "target" {
1365 if toml_path.len() < 4 {
1366 return None;
1367 }
1368 let cfg = &toml_path[1];
1369 let table_name = &toml_path[2];
1370 let dep = &toml_path[3];
1371
1372 let target = doc.get_mut("target")?.as_table_mut()?;
1373 let cfg_tbl = target.get_mut(cfg)?.as_table_mut()?;
1374 let deps = cfg_tbl.get_mut(table_name)?.as_table_mut()?;
1375 return deps.get_mut(dep);
1376 }
1377
1378 let table_name = &toml_path[0];
1379 let dep = &toml_path[1];
1380 let deps = doc.get_mut(table_name)?.as_table_mut()?;
1381 deps.get_mut(dep)
1382}
1383
1384pub fn check_policy_block(apply: &BuildfixApply, was_dry_run: bool) -> Option<PolicyBlockError> {
1386 if was_dry_run {
1387 return None;
1388 }
1389
1390 if !apply.preconditions.verified {
1391 return Some(PolicyBlockError::PreconditionMismatch {
1392 message: "precondition mismatch".to_string(),
1393 });
1394 }
1395
1396 let blocked: Vec<&ApplyResult> = apply
1397 .results
1398 .iter()
1399 .filter(|r| r.status == ApplyStatus::Blocked)
1400 .collect();
1401
1402 if !blocked.is_empty() && apply.summary.applied == 0 {
1404 let reasons: Vec<String> = blocked
1405 .iter()
1406 .filter_map(|r| r.blocked_reason.clone())
1407 .collect();
1408
1409 if reasons.iter().any(|r| r.contains("safety")) {
1410 return Some(PolicyBlockError::SafetyGateDenial {
1411 message: format!("{} op(s) blocked by safety gate", blocked.len()),
1412 });
1413 }
1414
1415 return Some(PolicyBlockError::PolicyDenial {
1416 message: format!("{} op(s) blocked by policy", blocked.len()),
1417 });
1418 }
1419
1420 if apply.summary.failed > 0 {
1421 return Some(PolicyBlockError::PreconditionMismatch {
1422 message: format!("{} op(s) failed", apply.summary.failed),
1423 });
1424 }
1425
1426 None
1427}