1use crate::core::broker::DbBroker;
2use crate::core::error;
3use crate::core::external_action::{self, ExternalCapability};
4use crate::core::state_commit;
5use crate::core::store::Store;
6use crate::core::todo;
7use crate::plugins::federation;
8use clap::{Parser, Subcommand};
9use fancy_regex::Regex;
10use rusqlite::OptionalExtension;
11use serde::{Deserialize, Serialize};
12use sha2::{Digest, Sha256};
13use std::fs::{self, OpenOptions};
14use std::io::Write;
15use std::path::{Path, PathBuf};
16
17#[derive(Parser, Debug)]
18#[clap(name = "verify", about = "Replay verification proofs and detect drift")]
19pub struct VerifyCli {
20 #[clap(long, global = true)]
22 json: bool,
23 #[clap(long, global = true)]
25 stale: bool,
26 #[clap(subcommand)]
27 command: Option<VerifyCommand>,
28}
29
30#[derive(Subcommand, Debug)]
31pub enum VerifyCommand {
32 Todo {
34 #[clap(value_name = "ID")]
35 id: String,
36 },
37}
38
39#[derive(Debug)]
40struct VerifyTarget {
41 todo_id: String,
42 status: String,
43 proof_plan: Option<String>,
44 artifacts: Option<String>,
45 last_verified_at: Option<String>,
46 verification_policy_days: i64,
47}
48
49#[derive(Debug, Serialize, Deserialize, Clone)]
50struct VerificationArtifacts {
51 completed_at: String,
52 proof_plan_results: Vec<ProofPlanResult>,
53 file_artifacts: Vec<FileArtifact>,
54}
55
56#[derive(Debug, Serialize, Deserialize, Clone)]
57struct ProofPlanResult {
58 proof_gate: String,
59 status: String,
60 command: String,
61 output_hash: String,
62}
63
64#[derive(Debug, Serialize, Deserialize, Clone)]
65struct FileArtifact {
66 path: String,
67 hash: String,
68 size: u64,
69 mtime: Option<u64>,
70}
71
72#[derive(Debug, Serialize)]
73struct VerifySummary {
74 total: usize,
75 passed: usize,
76 failed: usize,
77 unknown: usize,
78 stale: usize,
79}
80
81#[derive(Debug, Serialize)]
82struct ProofCheckResult {
83 gate: String,
84 status: String,
85 expected_output_hash: Option<String>,
86 actual_output_hash: Option<String>,
87 reason: Option<String>,
88}
89
90#[derive(Debug, Serialize)]
91struct ArtifactCheckResult {
92 path: String,
93 status: String,
94 expected_hash: Option<String>,
95 actual_hash: Option<String>,
96 reason: Option<String>,
97}
98
99#[derive(Debug, Serialize)]
100struct VerifyTodoResult {
101 todo_id: String,
102 status: String,
103 proofs: Vec<ProofCheckResult>,
104 artifacts: Vec<ArtifactCheckResult>,
105 notes: Vec<String>,
106}
107
108#[derive(Debug, Serialize)]
109struct VerifyReport {
110 verified_at: String,
111 summary: VerifySummary,
112 results: Vec<VerifyTodoResult>,
113}
114
115#[derive(Debug, Serialize)]
116struct StaleItem {
117 todo_id: String,
118 last_verified_at: Option<String>,
119 verification_policy_days: i64,
120}
121
122fn now_iso() -> String {
123 crate::core::time::now_epoch_z()
124}
125
126fn epoch_secs(ts: &str) -> Option<i64> {
127 ts.trim_end_matches('Z').parse::<i64>().ok()
128}
129
130fn normalize_validate_output(raw: &str) -> String {
131 let ansi = Regex::new(r"\x1B\[[0-9;]*[A-Za-z]").expect("valid ANSI regex");
132 let elapsed_re = Regex::new(r" elapsed=\S+").expect("valid elapsed regex");
133 let stripped = ansi.replace_all(raw, "");
134 stripped
135 .lines()
136 .map(str::trim)
137 .filter(|line| !line.is_empty())
138 .map(|line| {
139 if line.contains("decapod_validate_user_") || line.contains("decapod_validate_repo_") {
140 "<tmp_validate_path>".to_string()
141 } else {
142 elapsed_re.replace_all(line, "").to_string()
144 }
145 })
146 .collect::<Vec<_>>()
147 .join("\n")
148}
149
150fn normalize_json_value(value: &serde_json::Value) -> serde_json::Value {
151 match value {
152 serde_json::Value::Object(map) => {
153 let mut normalized = serde_json::Map::new();
154 let mut keys: Vec<&String> = map.keys().collect();
155 keys.sort();
156 for key in keys {
157 normalized.insert(key.clone(), normalize_json_value(&map[key]));
158 }
159 serde_json::Value::Object(normalized)
160 }
161 serde_json::Value::Array(items) => {
162 serde_json::Value::Array(items.iter().map(normalize_json_value).collect())
163 }
164 _ => value.clone(),
165 }
166}
167
168fn sha256_hex(input: &[u8]) -> String {
169 let mut hasher = Sha256::new();
170 hasher.update(input);
171 format!("sha256:{:x}", hasher.finalize())
172}
173
174fn hash_file(path: &Path) -> Result<(String, u64, Option<u64>), error::DecapodError> {
175 let bytes = fs::read(path)?;
176 let meta = fs::metadata(path)?;
177 let mtime = meta
178 .modified()
179 .ok()
180 .and_then(|m| m.duration_since(std::time::UNIX_EPOCH).ok())
181 .map(|d| d.as_secs());
182 Ok((sha256_hex(&bytes), meta.len(), mtime))
183}
184
185fn run_validate_and_hash(
186 store_root: &Path,
187 repo_root: &Path,
188) -> Result<(bool, String), error::DecapodError> {
189 let exe = std::env::current_exe()?;
190 let exe_str = exe.to_string_lossy().to_string();
191 let output = external_action::execute(
192 store_root,
193 ExternalCapability::VerificationExec,
194 "verify.validate_passes",
195 &exe_str,
196 &["validate", "--format", "json"],
197 repo_root,
198 )?;
199
200 let stdout = String::from_utf8_lossy(&output.stdout).to_string();
201 if let Ok(value) = serde_json::from_str::<serde_json::Value>(&stdout) {
202 let normalized = normalize_json_value(&value);
203 let canonical = serde_json::to_string(&normalized).unwrap();
204 return Ok((output.status.success(), sha256_hex(canonical.as_bytes())));
205 }
206
207 let mut merged = stdout;
208 if !output.stderr.is_empty() {
209 merged.push('\n');
210 merged.push_str(&String::from_utf8_lossy(&output.stderr));
211 }
212
213 let normalized = normalize_validate_output(&merged);
214 Ok((output.status.success(), sha256_hex(normalized.as_bytes())))
215}
216
217fn verification_events_path(store: &Store) -> PathBuf {
218 store.root.join("verification_events.jsonl")
219}
220
221fn append_jsonl(path: &Path, value: &serde_json::Value) -> Result<(), error::DecapodError> {
222 let mut f = OpenOptions::new().create(true).append(true).open(path)?;
223 writeln!(f, "{}", serde_json::to_string(value).unwrap())?;
224 Ok(())
225}
226
227fn mirror_verification_to_federation(
228 store: &Store,
229 todo_id: &str,
230 title: &str,
231 body: &str,
232 tags: &str,
233) {
234 let source = format!("event:{}", todo_id);
235 let anchor = federation::find_node_by_source(store, &source)
236 .ok()
237 .flatten();
238 if let Ok(node) = federation::add_node(
239 store,
240 title,
241 "decision",
242 "notable",
243 "agent_inferred",
244 body,
245 &source,
246 tags,
247 "repo",
248 None,
249 "decapod",
250 ) && let Some(intent_or_commitment) = anchor
251 {
252 let _ = federation::add_edge(store, &intent_or_commitment, &node.id, "depends_on");
253 }
254 let _ = federation::refresh_derived_files(store);
255}
256
257fn load_targets(
258 store: &Store,
259 single_id: Option<&str>,
260) -> Result<Vec<VerifyTarget>, error::DecapodError> {
261 todo::initialize_todo_db(&store.root)?;
262 let broker = DbBroker::new(&store.root);
263 let db_path = todo::todo_db_path(&store.root);
264
265 broker.with_conn(&db_path, "decapod", None, "verify.targets", |conn| {
266 let mut out = Vec::new();
267 if let Some(id) = single_id {
268 let mut stmt = conn.prepare(
269 "SELECT t.id, t.status, v.proof_plan, v.verification_artifacts, v.last_verified_at, COALESCE(v.verification_policy_days, 90)\n FROM tasks t\n LEFT JOIN task_verification v ON v.todo_id = t.id\n WHERE t.id = ?1",
270 )?;
271 let rows = stmt.query_map(rusqlite::params![id], |row| {
272 Ok(VerifyTarget {
273 todo_id: row.get(0)?,
274 status: row.get(1)?,
275 proof_plan: row.get(2)?,
276 artifacts: row.get(3)?,
277 last_verified_at: row.get(4)?,
278 verification_policy_days: row.get(5)?,
279 })
280 })?;
281
282 for row in rows {
283 out.push(row?);
284 }
285 } else {
286 let mut stmt = conn.prepare(
287 "SELECT t.id, t.status, v.proof_plan, v.verification_artifacts, v.last_verified_at, COALESCE(v.verification_policy_days, 90)\n FROM tasks t\n LEFT JOIN task_verification v ON v.todo_id = t.id\n WHERE t.status = 'done'\n AND v.proof_plan IS NOT NULL\n AND v.proof_plan <> ''\n ORDER BY t.updated_at DESC",
288 )?;
289 let rows = stmt.query_map([], |row| {
290 Ok(VerifyTarget {
291 todo_id: row.get(0)?,
292 status: row.get(1)?,
293 proof_plan: row.get(2)?,
294 artifacts: row.get(3)?,
295 last_verified_at: row.get(4)?,
296 verification_policy_days: row.get(5)?,
297 })
298 })?;
299
300 for row in rows {
301 out.push(row?);
302 }
303 }
304 Ok(out)
305 })
306}
307
308fn persist_result(
309 store: &Store,
310 todo_id: &str,
311 status: &str,
312 notes: &str,
313) -> Result<(), error::DecapodError> {
314 let ts = now_iso();
315 let broker = DbBroker::new(&store.root);
316 let db_path = todo::todo_db_path(&store.root);
317 let (proof_plan, verification_artifacts, verification_policy_days) =
318 broker.with_conn(&db_path, "decapod", None, "verify.persist", |conn| {
319 let existing: Option<(String, Option<String>, i64)> = conn
320 .query_row(
321 "SELECT proof_plan, verification_artifacts, verification_policy_days
322 FROM task_verification
323 WHERE todo_id = ?1",
324 rusqlite::params![todo_id],
325 |row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
326 )
327 .optional()?;
328 let (proof_plan, verification_artifacts, verification_policy_days) =
329 existing.unwrap_or_else(|| ("[]".to_string(), None, 90));
330
331 conn.execute(
332 "INSERT INTO task_verification(todo_id, proof_plan, verification_artifacts, last_verified_at, last_verified_status, last_verified_notes, verification_policy_days, updated_at)
333 VALUES(?1, ?2, ?3, ?4, ?5, ?6, ?7, ?4)
334 ON CONFLICT(todo_id) DO UPDATE SET
335 proof_plan=excluded.proof_plan,
336 verification_artifacts=excluded.verification_artifacts,
337 last_verified_at=excluded.last_verified_at,
338 last_verified_status=excluded.last_verified_status,
339 last_verified_notes=excluded.last_verified_notes,
340 verification_policy_days=excluded.verification_policy_days,
341 updated_at=excluded.updated_at",
342 rusqlite::params![
343 todo_id,
344 proof_plan,
345 verification_artifacts,
346 ts,
347 status,
348 notes,
349 verification_policy_days,
350 ],
351 )?;
352 Ok((proof_plan, verification_artifacts, verification_policy_days))
353 })?;
354
355 todo::record_task_event(
356 &store.root,
357 "task.verify.result",
358 Some(todo_id),
359 serde_json::json!({
360 "proof_plan": serde_json::from_str::<serde_json::Value>(&proof_plan).unwrap_or_else(|_| serde_json::json!([])),
361 "verification_artifacts": verification_artifacts
362 .as_ref()
363 .and_then(|s| serde_json::from_str::<serde_json::Value>(s).ok())
364 .unwrap_or(serde_json::Value::Null),
365 "last_verified_status": status,
366 "last_verified_notes": notes,
367 "verification_policy_days": verification_policy_days
368 }),
369 )?;
370 mirror_verification_to_federation(
371 store,
372 todo_id,
373 &format!("Verification Result: {}", todo_id),
374 &format!("Verification status={} notes={}", status, notes),
375 "proof,verification,result",
376 );
377 Ok(())
378}
379
380fn is_stale(target: &VerifyTarget, now_secs: i64) -> bool {
381 match target.last_verified_at.as_deref().and_then(epoch_secs) {
382 None => true,
383 Some(last) => now_secs.saturating_sub(last) > target.verification_policy_days * 86_400,
384 }
385}
386
387fn resolve_artifact_path(repo_root: &Path, stored: &str) -> PathBuf {
388 let path = Path::new(stored);
389 if path.is_absolute() {
390 path.to_path_buf()
391 } else {
392 repo_root.join(path)
393 }
394}
395
396fn verify_target(
397 target: &VerifyTarget,
398 store_root: &Path,
399 repo_root: &Path,
400) -> Result<VerifyTodoResult, error::DecapodError> {
401 let mut result = VerifyTodoResult {
402 todo_id: target.todo_id.clone(),
403 status: "pass".to_string(),
404 proofs: Vec::new(),
405 artifacts: Vec::new(),
406 notes: Vec::new(),
407 };
408
409 if target.status != "done" {
410 result.status = "unknown".to_string();
411 result
412 .notes
413 .push("TODO is not in done state; only done tasks are verifiable".to_string());
414 return Ok(result);
415 }
416
417 let plan_raw = match target.proof_plan.as_deref() {
418 Some(v) if !v.trim().is_empty() => v,
419 _ => {
420 result.status = "unknown".to_string();
421 result.notes.push(
422 "Missing verification metadata. Remediation: mark task done with `--validated` or capture verification artifacts for this TODO.".to_string(),
423 );
424 return Ok(result);
425 }
426 };
427
428 let proof_plan: Vec<String> = match serde_json::from_str(plan_raw) {
429 Ok(v) => v,
430 Err(_) => {
431 result.status = "unknown".to_string();
432 result.notes.push(
433 "Invalid proof_plan format. Remediation: recapture verification artifacts for this TODO.".to_string(),
434 );
435 return Ok(result);
436 }
437 };
438
439 let supported_proofs = ["validate_passes", "state_commit"];
440 if proof_plan
441 .iter()
442 .any(|p| !supported_proofs.iter().any(|sp| p == *sp))
443 {
444 result.status = "unknown".to_string();
445 result.notes.push(
446 "Unsupported proof_plan. Supported: validate_passes, state_commit. Remediation: set proof_plan to [\"validate_passes\"] or [\"state_commit\"].".to_string(),
447 );
448 return Ok(result);
449 }
450
451 let validate_check_needed = proof_plan.iter().any(|p| p == "validate_passes");
453
454 let artifacts_raw = match target.artifacts.as_deref() {
455 Some(v) if !v.trim().is_empty() => v,
456 _ => {
457 result.status = "unknown".to_string();
458 result.notes.push(
459 "Missing verification_artifacts. Remediation: capture verification artifacts for this TODO.".to_string(),
460 );
461 return Ok(result);
462 }
463 };
464
465 let artifacts: VerificationArtifacts = match serde_json::from_str(artifacts_raw) {
466 Ok(v) => v,
467 Err(_) => {
468 result.status = "unknown".to_string();
469 result.notes.push(
470 "Malformed verification_artifacts JSON. Remediation: recapture verification artifacts for this TODO.".to_string(),
471 );
472 return Ok(result);
473 }
474 };
475
476 let expected_proof = artifacts
477 .proof_plan_results
478 .iter()
479 .find(|p| p.proof_gate == "validate_passes");
480 let expected_hash = expected_proof.map(|p| p.output_hash.clone());
481
482 if expected_hash.is_none() {
483 result.status = "unknown".to_string();
484 result.notes.push(
485 "Missing baseline validate_passes output hash. Remediation: capture verification artifacts for this TODO.".to_string(),
486 );
487 return Ok(result);
488 }
489
490 if validate_check_needed {
492 let (validate_ok, actual_hash) = run_validate_and_hash(store_root, repo_root)?;
493 let expected = expected_hash.unwrap_or_default();
494
495 if !validate_ok {
496 result.status = "fail".to_string();
497 result.proofs.push(ProofCheckResult {
498 gate: "validate_passes".to_string(),
499 status: "fail".to_string(),
500 expected_output_hash: Some(expected),
501 actual_output_hash: Some(actual_hash),
502 reason: Some("decapod validate did not pass".to_string()),
503 });
504 } else if actual_hash != expected {
505 result.status = "fail".to_string();
506 result.proofs.push(ProofCheckResult {
507 gate: "validate_passes".to_string(),
508 status: "fail".to_string(),
509 expected_output_hash: Some(expected),
510 actual_output_hash: Some(actual_hash),
511 reason: Some("validate output hash changed".to_string()),
512 });
513 } else {
514 result.proofs.push(ProofCheckResult {
515 gate: "validate_passes".to_string(),
516 status: "pass".to_string(),
517 expected_output_hash: Some(expected),
518 actual_output_hash: Some(actual_hash),
519 reason: None,
520 });
521 }
522 }
523
524 if proof_plan.iter().any(|p| p == "state_commit") {
526 let state_commit_proof = artifacts
527 .proof_plan_results
528 .iter()
529 .find(|p| p.proof_gate == "state_commit");
530
531 let expected_root = state_commit_proof.map(|p| p.output_hash.clone());
532
533 let expected = match expected_root {
534 Some(exp) => exp,
535 None => {
536 result.status = "fail".to_string();
537 result.notes.push(
538 "Missing baseline state_commit output hash. Remediation: run `decapod state-commit prove --base <sha> --head <sha> --output scope_record.cbor` and capture as proof artifact.".to_string(),
539 );
540 String::new() }
542 };
543
544 if !expected.is_empty() {
546 let scope_record_path = repo_root.join("scope_record.cbor");
551 if !scope_record_path.exists() {
552 result.status = "fail".to_string();
553 result.proofs.push(ProofCheckResult {
554 gate: "state_commit".to_string(),
555 status: "fail".to_string(),
556 expected_output_hash: Some(expected.clone()),
557 actual_output_hash: None,
558 reason: Some("scope_record.cbor not found in repo root. Run `decapod state-commit prove` first.".to_string()),
559 });
560 } else {
561 let head_output = std::process::Command::new("git")
563 .args(["rev-parse", "HEAD"])
564 .current_dir(repo_root)
565 .output();
566
567 let current_head = match head_output {
570 Ok(o) if o.status.success() => {
571 String::from_utf8_lossy(&o.stdout).trim().to_string()
572 }
573 _ => String::new(),
574 };
575
576 let recomputed_root = if !current_head.is_empty() {
577 let base_output = std::process::Command::new("git")
579 .args(["rev-parse", "HEAD~1"])
580 .current_dir(repo_root)
581 .output();
582
583 let base_sha = match base_output {
584 Ok(o) if o.status.success() => {
585 String::from_utf8_lossy(&o.stdout).trim().to_string()
586 }
587 _ => String::new(),
588 };
589
590 if !base_sha.is_empty() {
591 let input = state_commit::StateCommitInput {
592 base_sha,
593 head_sha: current_head.clone(),
594 ignore_policy_hash: "da39a3ee5e6b4b0d3255bfef95601890afd80709"
595 .to_string(),
596 };
597 match state_commit::prove(&input, repo_root) {
598 Ok(result) => Some(result.state_commit_root),
599 Err(_) => None,
600 }
601 } else {
602 None
603 }
604 } else {
605 None
606 };
607
608 match recomputed_root {
609 Some(root) if root == expected => {
610 let scope_bytes = std::fs::read(&scope_record_path).unwrap_or_default();
612 let head_in_record = !current_head.is_empty()
613 && scope_bytes
614 .windows(current_head.len())
615 .any(|w| w == current_head.as_bytes());
616
617 if !head_in_record {
618 result.status = "fail".to_string();
619 result.proofs.push(ProofCheckResult {
620 gate: "state_commit".to_string(),
621 status: "fail".to_string(),
622 expected_output_hash: Some(expected.clone()),
623 actual_output_hash: Some(root.clone()),
624 reason: Some(format!("STATE_COMMIT head_sha mismatch. Current HEAD: {} not in scope_record. Run `decapod state-commit prove` to regenerate.", current_head)),
625 });
626 } else {
627 result.proofs.push(ProofCheckResult {
628 gate: "state_commit".to_string(),
629 status: "pass".to_string(),
630 expected_output_hash: Some(expected.clone()),
631 actual_output_hash: Some(root),
632 reason: Some("STATE_COMMIT verified: root recomputed from git objects matches expected, bound to current HEAD".to_string()),
633 });
634 }
635 }
636 Some(root) => {
637 result.status = "fail".to_string();
638 result.proofs.push(ProofCheckResult {
639 gate: "state_commit".to_string(),
640 status: "fail".to_string(),
641 expected_output_hash: Some(expected.clone()),
642 actual_output_hash: Some(root.clone()),
643 reason: Some(format!("STATE_COMMIT root mismatch. Expected: {}, Recomputed: {}. Files changed since scope recorded. Run `decapod state-commit prove` to regenerate.", expected, root)),
644 });
645 }
646 None => {
647 result.status = "fail".to_string();
648 result.proofs.push(ProofCheckResult {
649 gate: "state_commit".to_string(),
650 status: "fail".to_string(),
651 expected_output_hash: Some(expected.clone()),
652 actual_output_hash: None,
653 reason: Some("Failed to recompute STATE_COMMIT root. Verify decapod binary is available.".to_string()),
654 });
655 }
656 }
657 }
658 }
659 }
660
661 if artifacts.file_artifacts.is_empty() {
662 result.status = "unknown".to_string();
663 result.notes.push(
664 "Missing file_artifacts. Remediation: capture file hash artifacts (for MVP include AGENTS.md)."
665 .to_string(),
666 );
667 return Ok(result);
668 }
669
670 for expected in artifacts.file_artifacts {
671 let disk_path = resolve_artifact_path(repo_root, &expected.path);
672 if !disk_path.exists() {
673 result.status = "fail".to_string();
674 result.artifacts.push(ArtifactCheckResult {
675 path: expected.path,
676 status: "fail".to_string(),
677 expected_hash: Some(expected.hash),
678 actual_hash: Some("<missing>".to_string()),
679 reason: Some("artifact missing".to_string()),
680 });
681 continue;
682 }
683
684 let (actual_hash, _, _) = hash_file(&disk_path)?;
685 if actual_hash != expected.hash {
686 result.status = "fail".to_string();
687 result.artifacts.push(ArtifactCheckResult {
688 path: expected.path,
689 status: "fail".to_string(),
690 expected_hash: Some(expected.hash),
691 actual_hash: Some(actual_hash),
692 reason: Some("hash mismatch".to_string()),
693 });
694 } else {
695 result.artifacts.push(ArtifactCheckResult {
696 path: expected.path,
697 status: "pass".to_string(),
698 expected_hash: Some(expected.hash),
699 actual_hash: Some(actual_hash),
700 reason: None,
701 });
702 }
703 }
704
705 Ok(result)
706}
707
708pub fn capture_baseline_for_todo(
709 store: &Store,
710 repo_root: &Path,
711 todo_id: &str,
712 artifact_paths: Vec<String>,
713) -> Result<(), error::DecapodError> {
714 todo::initialize_todo_db(&store.root)?;
715
716 let broker = DbBroker::new(&store.root);
717 let db_path = todo::todo_db_path(&store.root);
718 let status: Option<String> =
719 broker.with_conn(&db_path, "decapod", None, "verify.capture.read", |conn| {
720 let status = conn
721 .query_row(
722 "SELECT status FROM tasks WHERE id = ?1",
723 rusqlite::params![todo_id],
724 |row| row.get(0),
725 )
726 .optional()?;
727 Ok(status)
728 })?;
729
730 let Some(task_status) = status else {
731 return Err(error::DecapodError::NotFound(format!(
732 "TODO not found: {}",
733 todo_id
734 )));
735 };
736
737 if task_status != "done" {
738 return Err(error::DecapodError::ValidationError(
739 "Task must be in done state before capturing verification artifacts".to_string(),
740 ));
741 }
742
743 let paths = if artifact_paths.is_empty() {
744 vec!["AGENTS.md".to_string()]
745 } else {
746 artifact_paths
747 };
748
749 let mut file_artifacts = Vec::new();
750 for path in paths {
751 let disk_path = resolve_artifact_path(repo_root, &path);
752 if !disk_path.exists() {
753 return Err(error::DecapodError::NotFound(format!(
754 "Verification artifact file not found: {}",
755 disk_path.display()
756 )));
757 }
758 let (hash, size, mtime) = hash_file(&disk_path)?;
759 file_artifacts.push(FileArtifact {
760 path,
761 hash,
762 size,
763 mtime,
764 });
765 }
766
767 let (validate_ok, output_hash) = run_validate_and_hash(&store.root, repo_root)?;
768
769 let ts = now_iso();
770 let artifacts = VerificationArtifacts {
771 completed_at: ts.clone(),
772 proof_plan_results: vec![ProofPlanResult {
773 proof_gate: "validate_passes".to_string(),
774 status: if validate_ok {
775 "pass".to_string()
776 } else {
777 "fail".to_string()
778 },
779 command: "decapod validate".to_string(),
780 output_hash,
781 }],
782 file_artifacts,
783 };
784
785 let artifacts_json = serde_json::to_string(&artifacts).unwrap();
786 let proof_plan_json = serde_json::to_string(&vec!["validate_passes"]).unwrap();
787 let baseline_status = if validate_ok { "pass" } else { "fail" };
788 let baseline_notes = if validate_ok {
789 "baseline captured"
790 } else {
791 "baseline captured while validate was failing"
792 };
793
794 broker.with_conn(&db_path, "decapod", None, "verify.capture.write", |conn| {
795 conn.execute(
796 "INSERT INTO task_verification(todo_id, proof_plan, verification_artifacts, last_verified_at, last_verified_status, last_verified_notes, verification_policy_days, updated_at)\n VALUES(?1, ?2, ?3, ?4, ?5, ?6, 90, ?4)\n ON CONFLICT(todo_id) DO UPDATE SET\n proof_plan=excluded.proof_plan,\n verification_artifacts=excluded.verification_artifacts,\n last_verified_at=excluded.last_verified_at,\n last_verified_status=excluded.last_verified_status,\n last_verified_notes=excluded.last_verified_notes,\n verification_policy_days=excluded.verification_policy_days,\n updated_at=excluded.updated_at",
797 rusqlite::params![
798 todo_id,
799 proof_plan_json,
800 artifacts_json,
801 ts,
802 baseline_status,
803 baseline_notes
804 ],
805 )?;
806 Ok(())
807 })?;
808
809 todo::record_task_event(
810 &store.root,
811 "task.verify.capture",
812 Some(todo_id),
813 serde_json::json!({
814 "proof_plan": ["validate_passes"],
815 "verification_artifacts": artifacts,
816 "last_verified_status": baseline_status,
817 "last_verified_notes": baseline_notes,
818 "verification_policy_days": 90
819 }),
820 )?;
821 Ok(())
822}
823
824pub fn run_verify_cli(
825 store: &Store,
826 repo_root: &Path,
827 cli: VerifyCli,
828) -> Result<(), error::DecapodError> {
829 let single_id = cli
830 .command
831 .as_ref()
832 .map(|VerifyCommand::Todo { id }| id.as_str());
833
834 let targets = load_targets(store, single_id)?;
835 if single_id.is_some() && targets.is_empty() {
836 return Err(error::DecapodError::NotFound("TODO not found".to_string()));
837 }
838
839 let now = now_iso();
840 let now_secs = epoch_secs(&now).unwrap_or(0);
841
842 if cli.stale {
843 let stale_items: Vec<StaleItem> = targets
844 .iter()
845 .filter(|t| t.status == "done" && is_stale(t, now_secs))
846 .map(|t| StaleItem {
847 todo_id: t.todo_id.clone(),
848 last_verified_at: t.last_verified_at.clone(),
849 verification_policy_days: t.verification_policy_days,
850 })
851 .collect();
852
853 if cli.json {
854 println!(
855 "{}",
856 serde_json::to_string_pretty(&serde_json::json!({
857 "checked_at": now,
858 "stale": stale_items
859 }))
860 .unwrap()
861 );
862 } else if stale_items.is_empty() {
863 println!("No stale TODOs found.");
864 } else {
865 println!("Stale TODOs:");
866 for item in stale_items {
867 println!(
868 "- {} (last_verified_at={}, policy_days={})",
869 item.todo_id,
870 item.last_verified_at.unwrap_or_else(|| "never".to_string()),
871 item.verification_policy_days
872 );
873 }
874 }
875 return Ok(());
876 }
877
878 let run_id = crate::core::ulid::new_ulid();
879 let mut results = Vec::new();
880
881 for target in &targets {
882 let result = verify_target(target, &store.root, repo_root)?;
883 persist_result(
884 store,
885 &result.todo_id,
886 &result.status,
887 &result.notes.join("; "),
888 )?;
889
890 append_jsonl(
891 &verification_events_path(store),
892 &serde_json::json!({
893 "event_type": "verification.todo_result",
894 "ts": now,
895 "run_id": run_id,
896 "todo_id": result.todo_id,
897 "status": result.status,
898 "proofs": result.proofs,
899 "artifacts": result.artifacts,
900 "notes": result.notes,
901 }),
902 )?;
903
904 results.push(result);
905 }
906
907 let summary = VerifySummary {
908 total: results.len(),
909 passed: results.iter().filter(|r| r.status == "pass").count(),
910 failed: results.iter().filter(|r| r.status == "fail").count(),
911 unknown: results.iter().filter(|r| r.status == "unknown").count(),
912 stale: targets.iter().filter(|t| is_stale(t, now_secs)).count(),
913 };
914
915 append_jsonl(
916 &verification_events_path(store),
917 &serde_json::json!({
918 "event_type": "verification.run",
919 "ts": now,
920 "run_id": run_id,
921 "summary": {
922 "total": summary.total,
923 "passed": summary.passed,
924 "failed": summary.failed,
925 "unknown": summary.unknown,
926 "stale": summary.stale,
927 }
928 }),
929 )?;
930
931 let report = VerifyReport {
932 verified_at: now,
933 summary,
934 results,
935 };
936
937 if cli.json {
938 println!("{}", serde_json::to_string_pretty(&report).unwrap());
939 } else {
940 println!("Verification report at {}", report.verified_at);
941 for r in &report.results {
942 println!("- {} [{}]", r.todo_id, r.status);
943 for p in &r.proofs {
944 if p.status == "fail" {
945 println!(
946 " proof {} failed (expected={}, actual={})",
947 p.gate,
948 p.expected_output_hash.as_deref().unwrap_or("n/a"),
949 p.actual_output_hash.as_deref().unwrap_or("n/a")
950 );
951 }
952 }
953 for a in &r.artifacts {
954 if a.status == "fail" {
955 println!(
956 " artifact {} failed (expected={}, actual={})",
957 a.path,
958 a.expected_hash.as_deref().unwrap_or("n/a"),
959 a.actual_hash.as_deref().unwrap_or("n/a")
960 );
961 }
962 }
963 for n in &r.notes {
964 println!(" note: {}", n);
965 }
966 }
967 println!(
968 "Summary: total={} passed={} failed={} unknown={} stale={}",
969 report.summary.total,
970 report.summary.passed,
971 report.summary.failed,
972 report.summary.unknown,
973 report.summary.stale
974 );
975 }
976
977 if report.summary.failed > 0 {
978 return Err(error::DecapodError::ValidationError(format!(
979 "verification failed for {} TODO(s)",
980 report.summary.failed
981 )));
982 }
983
984 Ok(())
985}