1use std::collections::{BTreeMap, BTreeSet};
2use std::fs;
3use std::path::{Path, PathBuf};
4use std::process::Command;
5
6use harn_vm::flow::{IntentClusterer, ObservedAtom, SqliteFlowStore, TextOp, VcsBackend};
7use serde::ser::SerializeStruct;
8use serde::Serialize;
9use serde_json::json;
10use time::format_description::well_known::Rfc3339;
11use time::{Date, Duration, OffsetDateTime, Time};
12
13use crate::cli::{
14 FlowArchivistCommand, FlowArgs, FlowCommand, FlowReplayAuditArgs, FlowShipCommand,
15};
16
17const SHIP_CAPTAIN_EVAL_PACKS: [&str; 4] = [
18 "slice_quality",
19 "false_ship_rate",
20 "coverage_fidelity",
21 "latency_pr_to_merge",
22];
23
24pub(crate) fn run_flow(args: &FlowArgs) -> Result<i32, String> {
25 match &args.command {
26 FlowCommand::ReplayAudit(replay) => run_replay_audit(replay),
27 FlowCommand::Ship(ship) => match &ship.command {
28 FlowShipCommand::Watch(watch) => run_ship_watch(watch),
29 },
30 FlowCommand::Archivist(archivist) => match &archivist.command {
31 FlowArchivistCommand::Scan(scan) => run_archivist_scan(scan),
32 },
33 }
34}
35
36pub(crate) fn run_replay_audit(args: &FlowReplayAuditArgs) -> Result<i32, String> {
37 let since = args.since.as_deref().map(parse_since).transpose()?;
38 if !args.store.is_file() {
39 return Err(format!(
40 "Flow store {} does not exist",
41 args.store.display()
42 ));
43 }
44 let store = SqliteFlowStore::open(&args.store, "replay-audit").map_err(|error| {
45 format!(
46 "failed to open Flow store {}: {error}",
47 args.store.display()
48 )
49 })?;
50
51 let chains = current_predicate_chains(&args.predicate_root, &args.touched_dirs);
52 let diagnostics = discovery_diagnostics(&chains);
53 if has_discovery_error(&diagnostics) {
54 return Err(render_discovery_diagnostics(&diagnostics));
55 }
56 if !args.json {
57 print_discovery_warnings(&diagnostics);
58 }
59
60 let current_predicates = harn_vm::flow::resolve_predicates_for_touched_directories(&chains);
61 let stored = store
62 .shipped_derived_slices_since(since)
63 .map_err(|error| format!("failed to list shipped slices: {error}"))?;
64 let created_at_by_slice = stored
65 .iter()
66 .map(|stored| (stored.slice.id, stored.created_at.clone()))
67 .collect::<std::collections::BTreeMap<_, _>>();
68 let report = harn_vm::flow::replay_audit_report(
69 stored.into_iter().map(|stored| stored.slice),
70 ¤t_predicates,
71 );
72
73 if args.json {
74 let json = serde_json::to_string_pretty(&report)
75 .map_err(|error| format!("failed to encode replay-audit report: {error}"))?;
76 println!("{json}");
77 } else {
78 print_human_report(
79 args.since.as_deref().unwrap_or("beginning"),
80 &report,
81 &created_at_by_slice,
82 );
83 }
84
85 Ok(if args.fail_on_drift && report.has_drift() {
86 1
87 } else {
88 0
89 })
90}
91
92#[derive(Debug, Clone)]
97pub struct FlowShipWatchInputs<'a> {
98 pub store: &'a Path,
99 pub predicate_root: &'a Path,
100 pub touched_dirs: &'a [PathBuf],
101 pub persona: &'a str,
102 pub mock_pr_out: Option<&'a Path>,
105}
106
107pub fn ship_watch_payload(inputs: &FlowShipWatchInputs<'_>) -> Result<serde_json::Value, String> {
114 let store = open_store(inputs.store)?;
115 let atom_refs = store
116 .list_atoms()
117 .map_err(|error| format!("failed to list Flow atoms: {error}"))?;
118 if atom_refs.is_empty() {
119 return Ok(json!({
123 "status": "idle",
124 "reason": "no_atoms",
125 "persona": inputs.persona,
126 "phase": "phase_0",
127 "mode": "shadow",
128 "autonomy": "propose_with_approval",
129 "receipts_required": true,
130 }));
131 }
132
133 let atoms = atom_refs
134 .iter()
135 .map(|atom_ref| {
136 store
137 .get_atom(atom_ref.atom_id)
138 .map_err(|error| format!("failed to load atom {}: {error}", atom_ref.atom_id))
139 })
140 .collect::<Result<Vec<_>, _>>()?;
141 let intents = IntentClusterer::default().cluster(
142 atoms
143 .iter()
144 .enumerate()
145 .map(|(index, atom)| ObservedAtom::from_atom(atom, (index + 1) as u64)),
146 );
147 let intent_payload = intents
148 .iter()
149 .map(|intent| {
150 json!({
151 "id": intent.id,
152 "goal_description": intent.goal_description,
153 "atoms": intent.atoms,
154 "confidence": intent.confidence,
155 "origin_transcript_span": intent.origin_transcript_span,
156 })
157 })
158 .collect::<Vec<_>>();
159
160 let chains = current_predicate_chains(inputs.predicate_root, inputs.touched_dirs);
161 let diagnostics = discovery_diagnostics(&chains);
162 if has_discovery_error(&diagnostics) {
163 return Err(render_discovery_diagnostics(&diagnostics));
164 }
165 let bootstrap_payload = bootstrap_policy_payload(inputs.predicate_root);
166 let predicates = harn_vm::flow::resolve_predicates_for_touched_directories(&chains);
167 let predicate_payload = predicates
168 .iter()
169 .map(|predicate| {
170 json!({
171 "qualified_name": predicate.qualified_name,
172 "logical_name": predicate.logical_name,
173 "hash": predicate.predicate.source_hash,
174 "kind": predicate.predicate.kind,
175 "relative_dir": predicate.source.relative_dir,
176 "retroactive": predicate.predicate.retroactive,
177 })
178 })
179 .collect::<Vec<_>>();
180 let ceiling = harn_vm::flow::PredicateCeiling::default();
181 let ceiling_outcome = harn_vm::flow::enforce_predicate_ceiling(&predicates, &ceiling);
182 let ceiling_payload = serialize_ceiling_outcome(&ceiling_outcome, &ceiling);
183 let validation_status = match ceiling_outcome.violation().map(|v| v.level) {
184 None => "ok",
185 Some(harn_vm::flow::PredicateCeilingLevel::RequireApproval) => "require_approval",
186 Some(harn_vm::flow::PredicateCeilingLevel::Block) => "blocked",
187 };
188
189 let atom_ids: Vec<_> = atom_refs.iter().map(|atom| atom.atom_id).collect();
190 let slice = store
191 .derive_slice(&atom_ids)
192 .map_err(|error| format!("failed to derive candidate slice: {error}"))?;
193 let ship_receipt = store
194 .ship_slice(&slice)
195 .map_err(|error| format!("failed to persist Ship Captain receipt: {error}"))?;
196 let created_at = OffsetDateTime::now_utc()
197 .format(&Rfc3339)
198 .map_err(|error| format!("failed to format receipt timestamp: {error}"))?;
199 let mock_pr = json!({
200 "number": 0,
201 "state": "open",
202 "url": format!("mock://github/pull/{}", slice.id),
203 "title": format!("Flow slice {}", slice.id),
204 "body": format!(
205 "Shadow-mode Ship Captain candidate slice.\n\nAtoms: {}\nIntents: {}\nPredicates discovered: {}\nValidation: {}\n\nNo remote PR was opened.",
206 slice.atoms.len(),
207 intents.len(),
208 predicates.len(),
209 validation_status,
210 ),
211 "requires_approval": true,
212 "validation_status": validation_status,
213 });
214 let payload = json!({
215 "status": "mock_pr_opened",
216 "persona": inputs.persona,
217 "phase": "phase_0",
218 "mode": "shadow",
219 "autonomy": "propose_with_approval",
220 "receipts_required": true,
221 "created_at": created_at,
222 "slice": {
223 "id": slice.id,
224 "atoms": slice.atoms,
225 "atom_count": slice.atoms.len(),
226 },
227 "intents": intent_payload,
228 "predicate_validation": {
229 "predicate_root": inputs.predicate_root,
230 "touched_dirs": if inputs.touched_dirs.is_empty() {
231 vec![PathBuf::from(".")]
232 } else {
233 inputs.touched_dirs.to_vec()
234 },
235 "status": validation_status,
236 "predicates": predicate_payload,
237 "ceiling": ceiling_payload,
238 "bootstrap_policy": bootstrap_payload,
239 "diagnostics": diagnostics.iter().map(|(path, diagnostic)| json!({
240 "path": path,
241 "severity": discovery_severity_label(diagnostic.severity),
242 "message": diagnostic.message,
243 })).collect::<Vec<_>>(),
244 },
245 "ship_receipt": {
246 "slice_id": ship_receipt.slice_id,
247 "commit": ship_receipt.commit,
248 "ref_name": ship_receipt.ref_name,
249 },
250 "mock_pr": mock_pr,
251 "eval_packs": SHIP_CAPTAIN_EVAL_PACKS,
252 });
253
254 if let Some(path) = inputs.mock_pr_out {
255 write_json(path, &payload)
256 .map_err(|error| format!("failed to write mock PR receipt: {error}"))?;
257 }
258 Ok(payload)
259}
260
261fn run_ship_watch(args: &crate::cli::FlowShipWatchArgs) -> Result<i32, String> {
262 let inputs = FlowShipWatchInputs {
263 store: &args.store,
264 predicate_root: &args.predicate_root,
265 touched_dirs: &args.touched_dirs,
266 persona: &args.persona,
267 mock_pr_out: args.mock_pr_out.as_deref(),
268 };
269
270 if !args.json {
271 let chains = current_predicate_chains(&args.predicate_root, &args.touched_dirs);
272 let diagnostics = discovery_diagnostics(&chains);
273 if !has_discovery_error(&diagnostics) {
274 print_discovery_warnings(&diagnostics);
275 }
276 }
277
278 let payload = ship_watch_payload(&inputs)?;
279 let summary = match payload.get("status").and_then(|status| status.as_str()) {
280 Some("idle") => "Ship Captain idle: no atoms in the Flow store.".to_string(),
281 _ => match payload
282 .get("slice")
283 .and_then(|slice| slice.get("id"))
284 .and_then(|id| id.as_str())
285 {
286 Some(slice_id) => format!("mock PR opened for candidate slice {slice_id}"),
287 None => "Ship Captain receipt emitted.".to_string(),
288 },
289 };
290 print_payload(args.json, &summary, &payload);
291 Ok(0)
292}
293
294fn serialize_ceiling_outcome(
295 outcome: &harn_vm::flow::PredicateCeilingOutcome,
296 ceiling: &harn_vm::flow::PredicateCeiling,
297) -> serde_json::Value {
298 use harn_vm::flow::{PredicateCeilingLevel, PredicateCeilingOutcome};
299 let mut payload = json!({
300 "count": outcome.count(),
301 "require_approval_threshold": ceiling.require_approval_threshold,
302 "block_threshold": ceiling.block_threshold,
303 });
304 match outcome {
305 PredicateCeilingOutcome::Within { .. } => {
306 payload["status"] = json!("within");
307 }
308 PredicateCeilingOutcome::Exceeded(violation) => {
309 payload["status"] = json!(match violation.level {
310 PredicateCeilingLevel::RequireApproval => "require_approval",
311 PredicateCeilingLevel::Block => "blocked",
312 });
313 payload["threshold"] = json!(violation.threshold);
314 payload["message"] = json!(violation.message());
315 payload["top_contributors"] = json!(violation
316 .top_contributors
317 .iter()
318 .map(|item| json!({
319 "relative_dir": item.relative_dir,
320 "count": item.count,
321 }))
322 .collect::<Vec<_>>());
323 }
324 }
325 payload
326}
327
328fn bootstrap_policy_payload(predicate_root: &Path) -> serde_json::Value {
329 use harn_vm::flow::Approver;
330 let Some(discovered) = harn_vm::flow::discover_bootstrap_policy(predicate_root) else {
331 return json!({
332 "status": "absent",
333 "path": predicate_root.join(harn_vm::flow::META_INVARIANTS_FILE),
334 });
335 };
336 let maintainers = discovered
337 .policy
338 .maintainers
339 .iter()
340 .map(|approver| match approver {
341 Approver::Role { name } => json!({"kind": "role", "id": name}),
342 Approver::Principal { id } => json!({"kind": "principal", "id": id}),
343 })
344 .collect::<Vec<_>>();
345 let diagnostics = discovered
346 .diagnostics
347 .iter()
348 .map(|diagnostic| {
349 json!({
350 "severity": discovery_severity_label(diagnostic.severity),
351 "message": diagnostic.message,
352 })
353 })
354 .collect::<Vec<_>>();
355 json!({
356 "status": "present",
357 "path": discovered.path,
358 "hash": discovered.policy.hash,
359 "maintainers": maintainers,
360 "diagnostics": diagnostics,
361 })
362}
363
364fn discovery_severity_label(severity: harn_vm::flow::DiscoveryDiagnosticSeverity) -> &'static str {
365 match severity {
366 harn_vm::flow::DiscoveryDiagnosticSeverity::Warning => "warning",
367 harn_vm::flow::DiscoveryDiagnosticSeverity::Error => "error",
368 }
369}
370
371fn run_archivist_scan(args: &crate::cli::FlowArchivistScanArgs) -> Result<i32, String> {
372 let repo = args
373 .repo
374 .canonicalize()
375 .unwrap_or_else(|_| args.repo.clone());
376 let source_date = OffsetDateTime::now_utc().date().to_string();
377 let inventory = inventory_repo(&repo);
378 let stack_hints = inventory.stack_hints.clone();
379 let manifest = load_archivist_manifest(&repo, args.manifest.as_deref());
380 let invariant_files = find_invariant_dirs(&repo);
381 let mut seen = BTreeSet::new();
382 let mut predicates = Vec::new();
383 let mut discovery_diagnostics = Vec::new();
384 for dir in &invariant_files {
385 for file in harn_vm::flow::discover_invariants(&repo, dir) {
386 let relative_dir = file.relative_dir.clone();
387 for diagnostic in &file.diagnostics {
388 discovery_diagnostics.push(json!({
389 "relative_dir": relative_dir,
390 "path": file.path,
391 "severity": format!("{:?}", diagnostic.severity).to_lowercase(),
392 "message": diagnostic.message,
393 }));
394 }
395 for predicate in file.predicates {
396 if !seen.insert(predicate.source_hash.clone()) {
397 continue;
398 }
399 predicates.push(json!({
400 "name": predicate.name,
401 "hash": predicate.source_hash,
402 "kind": predicate.kind,
403 "fallback": predicate.fallback,
404 "relative_dir": relative_dir.clone(),
405 "retroactive": predicate.retroactive,
406 "archivist": predicate.archivist.map(|archivist| json!({
407 "evidence": archivist.evidence,
408 "confidence": archivist.confidence,
409 "source_date": archivist.source_date,
410 "coverage_examples": archivist.coverage_examples,
411 })),
412 }));
413 }
414 }
415 }
416 let convention = mine_convention_signals(&repo);
417 let motion = mine_motion_signals(&repo);
418 let bootstrap_payload = bootstrap_policy_payload(&repo);
419 let proposals = archivist_proposals(
420 &repo,
421 &inventory,
422 &convention,
423 &motion,
424 predicates.is_empty(),
425 &source_date,
426 );
427 let shadow_evaluation = shadow_evaluate(&repo, &args.store, args.shadow_days, &proposals)?;
428 let payload = json!({
429 "status": "proposal_set",
430 "persona": {
431 "name": "archivist",
432 "mode": "propose_only",
433 "autonomy": "propose_only",
434 "promotion": "human_review_required",
435 },
436 "repo": repo,
437 "manifest": manifest,
438 "inventory": inventory,
439 "stack_hints": stack_hints,
440 "convention_signals": convention,
441 "motion_signals": motion,
442 "seed_library": {
443 "repository": "https://github.com/burin-labs/harn-canon",
444 "strategy": "detected-stack seeds are copied into proposals, then repo-local evidence prunes them before review",
445 },
446 "existing_predicates": predicates,
447 "discovery_diagnostics": discovery_diagnostics,
448 "bootstrap_policy": bootstrap_payload,
449 "proposals": proposals,
450 "shadow_evaluation": shadow_evaluation,
451 });
452
453 if let Some(path) = &args.out {
454 write_json(path, &payload)
455 .map_err(|error| format!("failed to write Archivist proposal set: {error}"))?;
456 }
457 print_payload(args.json, "Archivist proposal set emitted.", &payload);
458 Ok(0)
459}
460
461#[derive(Clone, Debug, Default, Serialize)]
462struct RepoInventory {
463 stack_hints: Vec<&'static str>,
464 lockfiles: Vec<String>,
465 config_files: Vec<String>,
466 source_roots: Vec<String>,
467}
468
469#[derive(Clone, Debug, Serialize)]
470struct Signal {
471 kind: &'static str,
472 path: String,
473 detail: String,
474}
475
476#[derive(Clone, Debug, Serialize)]
477struct MotionSignal {
478 kind: &'static str,
479 count: usize,
480 examples: Vec<String>,
481}
482
483#[derive(Clone, Debug)]
484struct ArchivistProposal {
485 id: &'static str,
486 title: &'static str,
487 path: String,
488 rationale: String,
489 predicate_name: &'static str,
490 match_terms: Vec<&'static str>,
491 evidence: Vec<String>,
492 confidence: f64,
493 coverage_examples: Vec<String>,
494 source: String,
495}
496
497impl Serialize for ArchivistProposal {
498 fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
499 let mut state = serializer.serialize_struct("ArchivistProposal", 11)?;
500 state.serialize_field("id", self.id)?;
501 state.serialize_field("title", self.title)?;
502 state.serialize_field("path", &self.path)?;
503 state.serialize_field("rationale", &self.rationale)?;
504 state.serialize_field("predicate_name", self.predicate_name)?;
505 state.serialize_field("autonomy", "propose_only")?;
506 state.serialize_field("promotion", "human_review_required")?;
507 state.serialize_field("evidence", &self.evidence)?;
508 state.serialize_field("confidence", &self.confidence)?;
509 state.serialize_field("coverage_examples", &self.coverage_examples)?;
510 state.serialize_field("predicate_source", &self.source)?;
511 state.end()
512 }
513}
514
515fn inventory_repo(repo: &Path) -> RepoInventory {
516 let mut inventory = RepoInventory::default();
517 let known = [
518 ("Cargo.toml", "rust", "config"),
519 ("Cargo.lock", "rust", "lockfile"),
520 ("rust-toolchain.toml", "rust", "config"),
521 ("rustfmt.toml", "rust", "config"),
522 ("clippy.toml", "rust", "config"),
523 ("package.json", "javascript", "config"),
524 ("package-lock.json", "javascript", "lockfile"),
525 ("pnpm-lock.yaml", "javascript", "lockfile"),
526 ("yarn.lock", "javascript", "lockfile"),
527 ("tsconfig.json", "typescript", "config"),
528 ("pyproject.toml", "python", "config"),
529 ("poetry.lock", "python", "lockfile"),
530 ("uv.lock", "python", "lockfile"),
531 ("go.mod", "go", "config"),
532 ("go.sum", "go", "lockfile"),
533 ("Package.swift", "swift", "config"),
534 ];
535 for (path, stack, kind) in known {
536 if repo.join(path).exists() {
537 push_unique(&mut inventory.stack_hints, stack);
538 match kind {
539 "lockfile" => inventory.lockfiles.push(path.to_string()),
540 _ => inventory.config_files.push(path.to_string()),
541 }
542 }
543 }
544 if repo.join(".github/workflows").is_dir() {
545 inventory.config_files.push(".github/workflows".to_string());
546 }
547 for root in ["crates", "src", "docs/src", "conformance/tests", "examples"] {
548 if repo.join(root).exists() {
549 inventory.source_roots.push(root.to_string());
550 }
551 }
552 inventory
553}
554
555fn push_unique(values: &mut Vec<&'static str>, value: &'static str) {
556 if !values.contains(&value) {
557 values.push(value);
558 }
559}
560
561fn load_archivist_manifest(repo: &Path, explicit: Option<&Path>) -> serde_json::Value {
562 let explicit_manifest = explicit.is_some();
563 let candidates = explicit
564 .map(|path| vec![path.to_path_buf()])
565 .unwrap_or_else(|| {
566 [
567 repo.join("harn.toml"),
568 repo.join("examples/personas/flow.harn.toml"),
569 repo.join("examples/personas/harn.toml"),
570 ]
571 .into_iter()
572 .filter(|path| path.is_file())
573 .collect()
574 });
575 let mut loaded_without_archivist = None;
576 let mut first_invalid = None;
577 for candidate in candidates {
578 match crate::package::load_personas_from_manifest_path(&candidate) {
579 Ok(catalog) => {
580 let archivist = catalog
581 .personas
582 .iter()
583 .find(|persona| persona.name.as_deref() == Some("archivist"));
584 if let Some(persona) = archivist {
585 return json!({
586 "status": "loaded",
587 "path": catalog.manifest_path,
588 "persona": persona,
589 });
590 }
591 loaded_without_archivist.get_or_insert_with(|| json!({
592 "status": "loaded_without_archivist",
593 "path": catalog.manifest_path,
594 "personas": catalog.personas.iter().filter_map(|p| p.name.clone()).collect::<Vec<_>>(),
595 }));
596 }
597 Err(errors) => {
598 let invalid = json!({
599 "status": "invalid",
600 "path": candidate,
601 "errors": errors.iter().map(ToString::to_string).collect::<Vec<_>>(),
602 });
603 if explicit_manifest {
604 return invalid;
605 }
606 first_invalid.get_or_insert(invalid);
607 }
608 }
609 }
610 if let Some(loaded) = loaded_without_archivist {
611 return loaded;
612 }
613 if let Some(invalid) = first_invalid {
614 return invalid;
615 }
616 json!({
617 "status": "not_found",
618 "searched": ["harn.toml", "examples/personas/flow.harn.toml", "examples/personas/harn.toml"],
619 })
620}
621
622fn mine_convention_signals(repo: &Path) -> Vec<Signal> {
623 let mut signals = Vec::new();
624 for path in walk_repo_files(repo, 4_000) {
625 let relative = relative_path(repo, &path);
626 let file_name = path
627 .file_name()
628 .and_then(|name| name.to_str())
629 .unwrap_or("");
630 if matches!(
631 file_name,
632 "rustfmt.toml" | "clippy.toml" | "deny.toml" | ".markdownlint.json" | ".prettierrc"
633 ) {
634 signals.push(Signal {
635 kind: "lint_config",
636 path: relative.clone(),
637 detail: "repo-local style or lint policy".to_string(),
638 });
639 }
640 if relative.ends_with(".harn")
641 || relative.ends_with(".rs")
642 || relative.ends_with(".md")
643 || relative.ends_with(".toml")
644 {
645 if let Ok(source) = fs::read_to_string(&path) {
646 for (index, line) in source.lines().enumerate() {
647 let trimmed = line.trim_start();
648 let is_comment = trimmed.starts_with("//")
649 || trimmed.starts_with('#')
650 || trimmed.starts_with("<!--");
651 if is_comment {
652 if let Some(pos) = trimmed.to_ascii_lowercase().find("invariant:") {
653 signals.push(Signal {
654 kind: "inline_invariant",
655 path: format!("{relative}:{}", index + 1),
656 detail: trimmed[pos..].trim().chars().take(180).collect(),
657 });
658 }
659 }
660 if signals.len() >= 80 {
661 return signals;
662 }
663 }
664 }
665 }
666 }
667 signals
668}
669
670fn mine_motion_signals(repo: &Path) -> Vec<MotionSignal> {
671 let output = Command::new("git")
672 .arg("-C")
673 .arg(repo)
674 .args([
675 "log",
676 "--since=90 days ago",
677 "--pretty=%s",
678 "--max-count=200",
679 ])
680 .output();
681 let Ok(output) = output else {
682 return Vec::new();
683 };
684 if !output.status.success() {
685 return Vec::new();
686 }
687 let stdout = String::from_utf8_lossy(&output.stdout);
688 let buckets: [(&str, &[&str]); 4] = [
689 ("tests", &["test", "coverage", "conformance"]),
690 ("lint_format", &["lint", "format", "fmt", "clippy"]),
691 (
692 "flow_predicates",
693 &["flow", "predicate", "invariant", "archivist"],
694 ),
695 ("release_docs", &["release", "docs", "changelog"]),
696 ];
697 let mut counts: BTreeMap<&'static str, Vec<String>> = BTreeMap::new();
698 for subject in stdout.lines() {
699 let lower = subject.to_ascii_lowercase();
700 for (kind, terms) in buckets {
701 if terms.iter().any(|term| lower.contains(term)) {
702 counts
703 .entry(kind)
704 .or_default()
705 .push(subject.chars().take(140).collect());
706 }
707 }
708 }
709 counts
710 .into_iter()
711 .map(|(kind, examples)| MotionSignal {
712 kind,
713 count: examples.len(),
714 examples: examples.into_iter().take(5).collect(),
715 })
716 .collect()
717}
718
719fn archivist_proposals(
720 repo: &Path,
721 inventory: &RepoInventory,
722 convention: &[Signal],
723 motion: &[MotionSignal],
724 no_existing_predicates: bool,
725 source_date: &str,
726) -> Vec<ArchivistProposal> {
727 let mut proposals = Vec::new();
728 if no_existing_predicates {
729 proposals.push(bootstrap_proposal(source_date));
730 }
731 if inventory.stack_hints.contains(&"rust") {
732 proposals.push(rust_unsafe_proposal(repo, source_date));
733 proposals.push(rust_panics_proposal(repo, source_date));
734 }
735 if inventory
736 .config_files
737 .iter()
738 .any(|path| path == ".github/workflows")
739 {
740 proposals.push(github_actions_permissions_proposal(source_date));
741 }
742 if motion
743 .iter()
744 .any(|signal| signal.kind == "tests" && signal.count >= 3)
745 {
746 proposals.push(test_motion_proposal(source_date));
747 }
748 let inline_signals = convention
749 .iter()
750 .filter(|signal| signal.kind == "inline_invariant")
751 .take(5)
752 .collect::<Vec<_>>();
753 if !inline_signals.is_empty() {
754 proposals.push(inline_invariant_proposal(&inline_signals, source_date));
755 }
756 proposals
757}
758
759fn bootstrap_proposal(source_date: &str) -> ArchivistProposal {
760 let evidence = vec![
761 "https://slsa.dev/spec/v1.0/provenance".to_string(),
762 "https://in-toto.io/attestation-spec/".to_string(),
763 ];
764 let coverage_examples = vec![
765 "invariants.harn".to_string(),
766 "meta-invariants.harn".to_string(),
767 ];
768 proposal(
769 "bootstrap-meta-invariants",
770 "Seed repo-wide predicate authorship metadata",
771 "invariants.harn",
772 "The repository has no discovered Flow predicates; seed review-only bootstrap metadata before expanding policy.",
773 "predicate_metadata_is_reviewable",
774 vec!["@archivist", "@semantic", "@deterministic"],
775 evidence,
776 0.72,
777 coverage_examples,
778 source_date,
779 "flow_invariant_warn(\"bootstrap predicate metadata should be reviewed by a human maintainer\")",
780 )
781}
782
783fn rust_unsafe_proposal(repo: &Path, source_date: &str) -> ArchivistProposal {
784 let examples = files_containing(repo, "unsafe", &["rs"], 5);
785 proposal(
786 "rust-unsafe-safety-comment",
787 "Require review evidence near new Rust unsafe blocks",
788 "invariants.harn",
789 "Rust is detected and unsafe blocks are a recurring high-value review boundary; propose a deterministic guard that warns on unsafe additions without nearby safety rationale.",
790 "rust_unsafe_requires_safety_comment",
791 vec!["unsafe", "SAFETY:"],
792 vec![
793 "https://doc.rust-lang.org/clippy/lint_configuration.html#undocumented_unsafe_blocks".to_string(),
794 "https://rust-lang.github.io/api-guidelines/documentation.html".to_string(),
795 ],
796 0.82,
797 examples,
798 source_date,
799 "flow_invariant_warn(\"new unsafe code should include nearby SAFETY rationale or explicit reviewer approval\")",
800 )
801}
802
803fn rust_panics_proposal(repo: &Path, source_date: &str) -> ArchivistProposal {
804 let mut examples = files_containing(repo, "panic!", &["rs"], 5);
805 examples.extend(files_containing(
806 repo,
807 ".unwrap()",
808 &["rs"],
809 5 - examples.len().min(5),
810 ));
811 proposal(
812 "rust-library-panic-surface",
813 "Flag new library panic surfaces without tests or documentation",
814 "invariants.harn",
815 "The Rust API Guidelines call out documented panic conditions; Flow can cheaply warn when atoms add panic-prone surfaces in library crates.",
816 "rust_library_panics_are_documented",
817 vec!["panic!", "unwrap()", "expect("],
818 vec![
819 "https://rust-lang.github.io/api-guidelines/documentation.html#c-failure".to_string(),
820 "https://rust-lang.github.io/rust-clippy/beta/".to_string(),
821 ],
822 0.76,
823 examples,
824 source_date,
825 "flow_invariant_warn(\"new panic-prone Rust paths should include tests or documented panic conditions\")",
826 )
827}
828
829fn github_actions_permissions_proposal(source_date: &str) -> ArchivistProposal {
830 proposal(
831 "github-actions-minimal-permissions",
832 "Warn on workflow edits without explicit permissions",
833 ".github/invariants.harn",
834 "GitHub workflow files are present; explicit job/workflow permissions make CI authority reviewable and reduce supply-chain blast radius.",
835 "github_actions_permissions_are_explicit",
836 vec!["permissions:", "uses:"],
837 vec![
838 "https://docs.github.com/actions/security-for-github-actions/security-guides/security-hardening-for-github-actions".to_string(),
839 "https://docs.github.com/code-security/supply-chain-security/understanding-your-software-supply-chain/about-supply-chain-security".to_string(),
840 ],
841 0.79,
842 vec![".github/workflows".to_string()],
843 source_date,
844 "flow_invariant_warn(\"workflow edits should keep explicit least-privilege permissions\")",
845 )
846}
847
848fn test_motion_proposal(source_date: &str) -> ArchivistProposal {
849 proposal(
850 "motion-tests-near-flow-changes",
851 "Keep test coverage close to recurring Flow changes",
852 "invariants.harn",
853 "Recent history repeatedly touches tests/conformance around Flow work; propose a warning when Flow atoms lack nearby test coverage evidence.",
854 "flow_changes_keep_tests_nearby",
855 vec!["flow", "predicate", "conformance", "test"],
856 vec![
857 "git log --since='90 days ago' --pretty=%s".to_string(),
858 "conformance/tests/".to_string(),
859 ],
860 0.68,
861 vec!["crates/harn-vm/src/flow".to_string(), "conformance/tests".to_string()],
862 source_date,
863 "flow_invariant_warn(\"Flow predicate/runtime changes should carry focused tests or conformance coverage\")",
864 )
865}
866
867fn inline_invariant_proposal(signals: &[&Signal], source_date: &str) -> ArchivistProposal {
868 let id = "inline-invariant-crystallization";
869 let examples = signals
870 .iter()
871 .map(|signal| signal.path.clone())
872 .collect::<Vec<_>>();
873 proposal(
874 id,
875 "Crystallize inline invariant comment into Flow predicate",
876 "invariants.harn",
877 "Found inline invariant comments; propose turning recurring comments into reviewable predicate metadata.",
878 "inline_invariant_comment_is_crystallized",
879 vec!["invariant:"],
880 examples.clone(),
881 0.64,
882 examples,
883 source_date,
884 "flow_invariant_warn(\"inline invariant comments should graduate into reviewable Flow predicates when they recur\")",
885 )
886}
887
888#[allow(clippy::too_many_arguments)]
889fn proposal(
890 id: &'static str,
891 title: &'static str,
892 path: &str,
893 rationale: &str,
894 predicate_name: &'static str,
895 match_terms: Vec<&'static str>,
896 evidence: Vec<String>,
897 confidence: f64,
898 coverage_examples: Vec<String>,
899 source_date: &str,
900 result_expr: &str,
901) -> ArchivistProposal {
902 let evidence_harn = evidence
903 .iter()
904 .map(|item| format!("{:?}", item))
905 .collect::<Vec<_>>()
906 .join(", ");
907 let coverage_harn = coverage_examples
908 .iter()
909 .map(|item| format!("{:?}", item))
910 .collect::<Vec<_>>()
911 .join(", ");
912 let source = format!(
913 "@invariant\n@deterministic\n@archivist(evidence: [{evidence_harn}], confidence: {confidence:.2}, source_date: {:?}, coverage_examples: [{coverage_harn}])\nfn {predicate_name}(slice) {{\n return {result_expr}\n}}\n",
914 source_date
915 );
916 ArchivistProposal {
917 id,
918 title,
919 path: path.to_string(),
920 rationale: rationale.to_string(),
921 predicate_name,
922 match_terms,
923 evidence,
924 confidence,
925 coverage_examples,
926 source,
927 }
928}
929
930fn shadow_evaluate(
931 repo: &Path,
932 store_path: &Path,
933 shadow_days: u32,
934 proposals: &[ArchivistProposal],
935) -> Result<serde_json::Value, String> {
936 let store_path = if store_path.is_absolute() {
937 store_path.to_path_buf()
938 } else {
939 repo.join(store_path)
940 };
941 if !store_path.is_file() {
942 return Ok(json!({
943 "status": "no_flow_store",
944 "store": store_path,
945 "window_days": shadow_days,
946 "recent_atoms": 0,
947 "proposal_results": empty_shadow_results(proposals),
948 "false_positive_candidates": [],
949 }));
950 }
951 let store = SqliteFlowStore::open(&store_path, "archivist-shadow").map_err(|error| {
952 format!(
953 "failed to open Flow store {}: {error}",
954 store_path.display()
955 )
956 })?;
957 let since = OffsetDateTime::now_utc() - Duration::days(i64::from(shadow_days));
958 let refs = store
959 .list_atoms()
960 .map_err(|error| format!("failed to list Flow atoms: {error}"))?;
961 let mut recent_atoms = Vec::new();
962 for atom_ref in refs {
963 let atom = store
964 .get_atom(atom_ref.atom_id)
965 .map_err(|error| format!("failed to load Flow atom {}: {error}", atom_ref.atom_id))?;
966 if atom.provenance.timestamp >= since {
967 recent_atoms.push(atom);
968 }
969 }
970
971 let mut false_positive_candidates = Vec::new();
972 let mut results = Vec::new();
973 for proposal in proposals {
974 let mut matched_atoms = 0usize;
975 for atom in &recent_atoms {
976 let inserted = inserted_text(atom);
977 if proposal.match_terms.iter().any(|term| {
978 inserted
979 .to_ascii_lowercase()
980 .contains(&term.to_ascii_lowercase())
981 }) {
982 matched_atoms += 1;
983 if likely_false_positive(proposal, &inserted) {
984 false_positive_candidates.push(json!({
985 "proposal_id": proposal.id,
986 "atom": atom.id,
987 "transcript_ref": atom.provenance.transcript_ref,
988 "diff_span": first_insert_span(atom),
989 "reason": "heuristic match may already contain satisfying context",
990 }));
991 }
992 }
993 }
994 results.push(json!({
995 "proposal_id": proposal.id,
996 "recent_atoms": recent_atoms.len(),
997 "matching_atoms": matched_atoms,
998 "estimated_coverage": if recent_atoms.is_empty() { 0.0 } else { matched_atoms as f64 / recent_atoms.len() as f64 },
999 }));
1000 }
1001 Ok(json!({
1002 "status": "evaluated",
1003 "store": store_path,
1004 "window_days": shadow_days,
1005 "recent_atoms": recent_atoms.len(),
1006 "proposal_results": results,
1007 "false_positive_candidates": false_positive_candidates,
1008 }))
1009}
1010
1011fn empty_shadow_results(proposals: &[ArchivistProposal]) -> Vec<serde_json::Value> {
1012 proposals
1013 .iter()
1014 .map(|proposal| {
1015 json!({
1016 "proposal_id": proposal.id,
1017 "recent_atoms": 0,
1018 "matching_atoms": 0,
1019 "estimated_coverage": 0.0,
1020 })
1021 })
1022 .collect()
1023}
1024
1025fn inserted_text(atom: &harn_vm::flow::Atom) -> String {
1026 atom.ops
1027 .iter()
1028 .filter_map(|op| match op {
1029 TextOp::Insert { content, .. } => Some(content.as_str()),
1030 TextOp::Delete { .. } => None,
1031 })
1032 .collect::<Vec<_>>()
1033 .join("\n")
1034}
1035
1036fn first_insert_span(atom: &harn_vm::flow::Atom) -> serde_json::Value {
1037 atom.ops
1038 .iter()
1039 .find_map(|op| match op {
1040 TextOp::Insert { offset, content } => Some(json!({
1041 "start": offset,
1042 "end": offset.saturating_add(content.len() as u64),
1043 })),
1044 TextOp::Delete { .. } => None,
1045 })
1046 .unwrap_or_else(|| json!({"start": 0, "end": 0}))
1047}
1048
1049fn likely_false_positive(proposal: &ArchivistProposal, inserted: &str) -> bool {
1050 match proposal.id {
1051 "rust-unsafe-safety-comment" => {
1052 inserted.contains("unsafe") && inserted.to_ascii_lowercase().contains("safety")
1053 }
1054 "github-actions-minimal-permissions" => {
1055 inserted.contains("permissions:") && inserted.contains("uses:")
1056 }
1057 _ => false,
1058 }
1059}
1060
1061fn files_containing(repo: &Path, needle: &str, extensions: &[&str], limit: usize) -> Vec<String> {
1062 if limit == 0 {
1063 return Vec::new();
1064 }
1065 let needle = needle.to_ascii_lowercase();
1066 let mut matches = Vec::new();
1067 for path in walk_repo_files(repo, 4_000) {
1068 let Some(ext) = path.extension().and_then(|ext| ext.to_str()) else {
1069 continue;
1070 };
1071 if !extensions.contains(&ext) {
1072 continue;
1073 }
1074 let Ok(source) = fs::read_to_string(&path) else {
1075 continue;
1076 };
1077 if source.to_ascii_lowercase().contains(&needle) {
1078 matches.push(relative_path(repo, &path));
1079 if matches.len() >= limit {
1080 break;
1081 }
1082 }
1083 }
1084 matches
1085}
1086
1087fn walk_repo_files(repo: &Path, limit: usize) -> Vec<PathBuf> {
1088 let mut files = Vec::new();
1089 collect_repo_files(repo, repo, limit, &mut files);
1090 files
1091}
1092
1093fn collect_repo_files(root: &Path, dir: &Path, limit: usize, out: &mut Vec<PathBuf>) {
1094 if out.len() >= limit {
1095 return;
1096 }
1097 let Ok(entries) = fs::read_dir(dir) else {
1098 return;
1099 };
1100 let mut entries: Vec<_> = entries.filter_map(Result::ok).collect();
1101 entries.sort_by_key(|entry| entry.path());
1102 for entry in entries {
1103 if out.len() >= limit {
1104 return;
1105 }
1106 let path = entry.path();
1107 let name = path
1108 .file_name()
1109 .and_then(|name| name.to_str())
1110 .unwrap_or_default();
1111 if path.is_dir() {
1112 if should_skip_scan_dir(name) {
1113 continue;
1114 }
1115 collect_repo_files(root, &path, limit, out);
1116 } else if path.is_file() {
1117 let relative = relative_path(root, &path);
1118 if !relative.ends_with(".lock")
1119 || matches!(name, "Cargo.lock" | "package-lock.json" | "yarn.lock")
1120 {
1121 out.push(path);
1122 }
1123 }
1124 }
1125}
1126
1127fn should_skip_scan_dir(name: &str) -> bool {
1128 matches!(
1129 name,
1130 ".git"
1131 | "target"
1132 | "node_modules"
1133 | "docs/dist"
1134 | ".harn"
1135 | ".harn-runs"
1136 | ".claude"
1137 | ".burin"
1138 )
1139}
1140
1141fn relative_path(root: &Path, path: &Path) -> String {
1142 path.strip_prefix(root)
1143 .unwrap_or(path)
1144 .components()
1145 .filter_map(|component| match component {
1146 std::path::Component::Normal(name) => Some(name.to_string_lossy().into_owned()),
1147 _ => None,
1148 })
1149 .collect::<Vec<_>>()
1150 .join("/")
1151}
1152
1153fn current_predicate_chains(
1154 root: &Path,
1155 touched_dirs: &[PathBuf],
1156) -> Vec<Vec<harn_vm::flow::DiscoveredInvariantFile>> {
1157 let dirs: Vec<PathBuf> = if touched_dirs.is_empty() {
1158 vec![PathBuf::from(".")]
1159 } else {
1160 touched_dirs.to_vec()
1161 };
1162 dirs.into_iter()
1163 .map(|dir| harn_vm::flow::discover_invariants(root, &dir))
1164 .collect()
1165}
1166
1167fn open_store(path: &Path) -> Result<SqliteFlowStore, String> {
1168 if let Some(parent) = path
1169 .parent()
1170 .filter(|parent| !parent.as_os_str().is_empty())
1171 {
1172 fs::create_dir_all(parent).map_err(|error| error.to_string())?;
1173 }
1174 SqliteFlowStore::open(path, "flow-cli").map_err(|error| error.to_string())
1175}
1176
1177fn find_invariant_dirs(root: &Path) -> Vec<PathBuf> {
1178 let mut dirs = Vec::new();
1179 collect_invariant_dirs(root, root, &mut dirs);
1180 dirs
1181}
1182
1183fn collect_invariant_dirs(root: &Path, dir: &Path, out: &mut Vec<PathBuf>) {
1184 let Ok(entries) = fs::read_dir(dir) else {
1185 return;
1186 };
1187 let mut entries: Vec<_> = entries.filter_map(Result::ok).collect();
1188 entries.sort_by_key(|entry| entry.path());
1189 for entry in entries {
1190 let path = entry.path();
1191 if path.is_dir() {
1192 let name = path
1193 .file_name()
1194 .and_then(|name| name.to_str())
1195 .unwrap_or_default();
1196 if matches!(name, ".git" | "target" | "node_modules") {
1197 continue;
1198 }
1199 collect_invariant_dirs(root, &path, out);
1200 } else if path.file_name().and_then(|name| name.to_str()) == Some("invariants.harn") {
1201 out.push(path.parent().unwrap_or(root).to_path_buf());
1202 }
1203 }
1204}
1205
1206fn print_human_report(
1207 since: &str,
1208 report: &harn_vm::flow::ReplayAuditReport,
1209 created_at_by_slice: &std::collections::BTreeMap<harn_vm::flow::SliceId, String>,
1210) {
1211 println!(
1212 "Audited {} shipped derived slice(s) since {since}; {} slice(s) have advisory drift.",
1213 report.audited_slices, report.drifted_slices
1214 );
1215 if report.slices.is_empty() {
1216 return;
1217 }
1218 for slice in &report.slices {
1219 let created_at = created_at_by_slice
1220 .get(&slice.slice_id)
1221 .map(String::as_str)
1222 .unwrap_or("unknown");
1223 println!("slice {} created_at={created_at}", slice.slice_id);
1224 if !slice.advisory_drift.is_empty() {
1225 println!(" current @retroactive predicates not pinned:");
1226 for predicate in &slice.advisory_drift {
1227 println!(" - {} {}", predicate.name, predicate.hash.as_str());
1228 }
1229 }
1230 if !slice.historical_only_predicates.is_empty() {
1231 println!(" historical predicate hashes no longer in current set:");
1232 for hash in &slice.historical_only_predicates {
1233 println!(" - {}", hash.as_str());
1234 }
1235 }
1236 }
1237}
1238
1239fn discovery_diagnostics(
1240 chains: &[Vec<harn_vm::flow::DiscoveredInvariantFile>],
1241) -> Vec<(String, &harn_vm::flow::DiscoveryDiagnostic)> {
1242 chains
1243 .iter()
1244 .flat_map(|chain| chain.iter())
1245 .flat_map(|file| {
1246 file.diagnostics
1247 .iter()
1248 .map(move |diagnostic| (file.path.display().to_string(), diagnostic))
1249 })
1250 .collect()
1251}
1252
1253fn has_discovery_error(diagnostics: &[(String, &harn_vm::flow::DiscoveryDiagnostic)]) -> bool {
1254 diagnostics.iter().any(|(_, diagnostic)| {
1255 diagnostic.severity == harn_vm::flow::DiscoveryDiagnosticSeverity::Error
1256 })
1257}
1258
1259fn print_discovery_warnings(diagnostics: &[(String, &harn_vm::flow::DiscoveryDiagnostic)]) {
1260 for (path, diagnostic) in diagnostics.iter().filter(|(_, diagnostic)| {
1261 diagnostic.severity == harn_vm::flow::DiscoveryDiagnosticSeverity::Warning
1262 }) {
1263 eprintln!("warning: {path}: {}", diagnostic.message);
1264 }
1265}
1266
1267fn render_discovery_diagnostics(
1268 diagnostics: &[(String, &harn_vm::flow::DiscoveryDiagnostic)],
1269) -> String {
1270 diagnostics
1271 .iter()
1272 .map(|(path, diagnostic)| format!("{path}: {}", diagnostic.message))
1273 .collect::<Vec<_>>()
1274 .join("\n")
1275}
1276
1277fn write_json(path: &Path, value: &serde_json::Value) -> Result<(), std::io::Error> {
1278 if let Some(parent) = path
1279 .parent()
1280 .filter(|parent| !parent.as_os_str().is_empty())
1281 {
1282 fs::create_dir_all(parent)?;
1283 }
1284 fs::write(path, serde_json::to_vec_pretty(value).unwrap())
1285}
1286
1287fn print_payload(json_output: bool, text: &str, payload: &serde_json::Value) {
1288 if json_output {
1289 println!("{}", serde_json::to_string_pretty(payload).unwrap());
1290 } else {
1291 println!("{text}");
1292 }
1293}
1294
1295fn parse_since(raw: &str) -> Result<OffsetDateTime, String> {
1296 if let Ok(parsed) = OffsetDateTime::parse(raw, &Rfc3339) {
1297 return Ok(parsed);
1298 }
1299 if let Ok(unix) = raw.parse::<i64>() {
1300 let parsed = if raw.len() > 10 {
1301 OffsetDateTime::from_unix_timestamp_nanos(unix as i128 * 1_000_000)
1302 } else {
1303 OffsetDateTime::from_unix_timestamp(unix)
1304 };
1305 return parsed.map_err(|error| format!("invalid --since timestamp '{raw}': {error}"));
1306 }
1307 let date_format = time::format_description::parse("[year]-[month]-[day]")
1308 .map_err(|error| format!("failed to build date parser: {error}"))?;
1309 let date = Date::parse(raw, &date_format).map_err(|_| {
1310 format!("invalid --since date '{raw}'; use RFC3339, unix time, or YYYY-MM-DD")
1311 })?;
1312 Ok(date.with_time(Time::MIDNIGHT).assume_utc())
1313}
1314
1315#[cfg(test)]
1316mod tests {
1317 use super::*;
1318 use ed25519_dalek::SigningKey;
1319 use harn_vm::flow::{Atom, Provenance};
1320
1321 #[test]
1322 fn parse_since_accepts_rfc3339_unix_and_date() {
1323 assert_eq!(
1324 parse_since("2026-04-26T12:00:00Z")
1325 .unwrap()
1326 .unix_timestamp(),
1327 1_777_204_800
1328 );
1329 assert_eq!(
1330 parse_since("1777205600").unwrap().unix_timestamp(),
1331 1_777_205_600
1332 );
1333 assert_eq!(
1334 parse_since("2026-04-26").unwrap().unix_timestamp(),
1335 1_777_161_600
1336 );
1337 }
1338
1339 #[test]
1340 fn archivist_rust_proposal_is_parseable_harn_with_provenance() {
1341 let temp = tempfile::tempdir().unwrap();
1342 fs::write(
1343 temp.path().join("Cargo.toml"),
1344 "[package]\nname = \"demo\"\n",
1345 )
1346 .unwrap();
1347 fs::create_dir_all(temp.path().join("src")).unwrap();
1348 fs::write(temp.path().join("src/lib.rs"), "pub unsafe fn raw() {}\n").unwrap();
1349
1350 let inventory = inventory_repo(temp.path());
1351 let proposals = archivist_proposals(temp.path(), &inventory, &[], &[], true, "2026-04-26");
1352 let rust = proposals
1353 .iter()
1354 .find(|proposal| proposal.id == "rust-unsafe-safety-comment")
1355 .expect("rust unsafe proposal");
1356
1357 let parsed = harn_vm::flow::parse_invariants_source(&rust.source);
1358 assert!(
1359 parsed.diagnostics.is_empty(),
1360 "generated source should parse cleanly: {:?}",
1361 parsed.diagnostics
1362 );
1363 assert_eq!(
1364 parsed.predicates[0].name,
1365 "rust_unsafe_requires_safety_comment"
1366 );
1367 assert!(parsed.predicates[0].archivist.is_some());
1368 }
1369
1370 #[test]
1371 fn shadow_evaluate_reports_false_positive_atom_pointers() {
1372 let temp = tempfile::tempdir().unwrap();
1373 fs::write(
1374 temp.path().join("Cargo.toml"),
1375 "[package]\nname = \"demo\"\n",
1376 )
1377 .unwrap();
1378 let store_path = temp.path().join(".harn/flow.sqlite");
1379 fs::create_dir_all(store_path.parent().unwrap()).unwrap();
1380
1381 {
1382 let store = SqliteFlowStore::open(&store_path, "test").unwrap();
1383 let principal = SigningKey::from_bytes(&[7; 32]);
1384 let persona = SigningKey::from_bytes(&[8; 32]);
1385 let atom = Atom::sign(
1386 vec![TextOp::Insert {
1387 offset: 0,
1388 content: "unsafe { /* SAFETY: fixture */ }".to_string(),
1389 }],
1390 Vec::new(),
1391 Provenance::new("user:test", "archivist-test", "run-1", "trace-1", "tx-1"),
1392 None,
1393 &principal,
1394 &persona,
1395 )
1396 .unwrap();
1397 store.emit_atoms(&[atom]).unwrap();
1398 }
1399
1400 let proposal = rust_unsafe_proposal(temp.path(), "2026-04-26");
1401 let report = shadow_evaluate(temp.path(), &store_path, 30, &[proposal]).unwrap();
1402 assert_eq!(report["status"], "evaluated");
1403 assert_eq!(report["recent_atoms"], 1);
1404 assert_eq!(
1405 report["false_positive_candidates"][0]["transcript_ref"],
1406 "tx-1"
1407 );
1408 assert!(report["false_positive_candidates"][0]["atom"].is_string());
1409 }
1410}