Skip to main content

vela_protocol/
frontier_repo.rs

1//! Canonical frontier repository layout helpers.
2//!
3//! This module keeps the user-facing repository shape separate from the
4//! existing `.vela/` object/event storage. The visible files are the clone and
5//! review surface; `.vela/` remains the substrate machinery.
6
7use std::collections::BTreeMap;
8use std::fs;
9use std::path::Path;
10
11use chrono::{SecondsFormat, Utc};
12use serde::{Deserialize, Serialize};
13use serde_json::json;
14use sha2::{Digest, Sha256};
15
16use crate::events;
17use crate::project::{self, Project, ProjectDependency};
18use crate::proposals;
19
20pub const FRONTIER_REPO_LAYOUT: &str = "vela.frontier_repo.v0.1";
21pub const FRONTIER_MANIFEST_SCHEMA: &str = "vela.frontier_manifest.v0.1";
22pub const FRONTIER_LOCK_SCHEMA: &str = "vela.frontier_lock.v0.1";
23pub const FRONTIER_INIT_SCHEMA: &str = "vela.frontier_repo_init.v0.1";
24pub const FRONTIER_MATERIALIZE_SCHEMA: &str = "vela.frontier_materialize.v0.1";
25pub const FRONTIER_REPO_STATUS_SCHEMA: &str = "vela.frontier_repo_status.v0.1";
26pub const FRONTIER_REPO_DOCTOR_SCHEMA: &str = "vela.frontier_repo_doctor.v0.1";
27pub const FRONTIER_PROOF_VERIFY_SCHEMA: &str = "vela.frontier_proof_verify.v0.1";
28pub const DEFAULT_CARINA_KERNEL: &str = "carina@0.1.0";
29
30#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
31pub struct FrontierManifest {
32    pub schema: String,
33    pub layout: String,
34    #[serde(default = "default_split_mode")]
35    pub mode: String,
36    #[serde(default, skip_serializing_if = "Option::is_none")]
37    pub frontier_id: Option<String>,
38    pub name: String,
39    #[serde(default)]
40    pub description: String,
41    #[serde(default = "default_visibility")]
42    pub visibility: String,
43    #[serde(default)]
44    pub scope: FrontierScope,
45    pub carina: CarinaManifest,
46    pub vela: VelaManifest,
47    pub paths: FrontierPaths,
48    #[serde(default, skip_serializing_if = "Vec::is_empty")]
49    pub maintainers: Vec<ManifestMaintainer>,
50    #[serde(default)]
51    pub policies: ManifestPolicies,
52    #[serde(default)]
53    pub license: ManifestLicense,
54    #[serde(default)]
55    pub dependencies: ManifestDependencies,
56    #[serde(default, skip_serializing_if = "Vec::is_empty")]
57    pub templates: Vec<String>,
58}
59
60#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
61pub struct CarinaManifest {
62    pub kernel: String,
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
66pub struct VelaManifest {
67    pub reducer: String,
68}
69
70#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
71pub struct FrontierScope {
72    #[serde(default)]
73    pub question: String,
74    #[serde(default)]
75    pub includes: Vec<String>,
76    #[serde(default)]
77    pub excludes: Vec<String>,
78}
79
80#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
81pub struct FrontierPaths {
82    pub state: String,
83    pub sources: String,
84    pub artifacts: String,
85    pub review: String,
86    pub proof: String,
87    pub exports: String,
88}
89
90#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
91pub struct ManifestMaintainer {
92    pub id: String,
93    pub role: String,
94}
95
96#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
97pub struct ManifestPolicies {
98    pub review: String,
99    pub proof: String,
100}
101
102impl Default for ManifestPolicies {
103    fn default() -> Self {
104        Self {
105            review: "review/policy.yaml".to_string(),
106            proof: "proof/policy.yaml".to_string(),
107        }
108    }
109}
110
111#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
112pub struct ManifestLicense {
113    pub content: String,
114    pub code: String,
115    pub data: String,
116}
117
118impl Default for ManifestLicense {
119    fn default() -> Self {
120        Self {
121            content: "CC-BY-4.0".to_string(),
122            code: "Apache-2.0".to_string(),
123            data: "varies".to_string(),
124        }
125    }
126}
127
128#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
129pub struct ManifestDependencies {
130    #[serde(default)]
131    pub frontiers: Vec<String>,
132    #[serde(default)]
133    pub packages: Vec<String>,
134    #[serde(default)]
135    pub adapters: Vec<String>,
136    /// v0.59: structured cross-frontier dependency entries. Pre-v0.59
137    /// split-repos persisted `Project.dependencies` only into the
138    /// rendered `frontier.json`, which `vela frontier materialize`
139    /// would regenerate without them. This field is the durable
140    /// source of truth in the yaml manifest and is rehydrated into
141    /// `Project.dependencies` on load.
142    #[serde(default, skip_serializing_if = "Vec::is_empty")]
143    pub frontiers_v2: Vec<ProjectDependency>,
144}
145
146#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
147pub struct FrontierLock {
148    pub schema: String,
149    pub generated_at: String,
150    pub vela_version: String,
151    pub carina_kernel: String,
152    pub frontier_id: String,
153    #[serde(default)]
154    pub canonicalization: LockCanonicalization,
155    #[serde(default)]
156    pub reducer: LockPackage,
157    #[serde(default)]
158    pub carina: LockKernel,
159    pub snapshot_hash: String,
160    pub event_log_hash: String,
161    pub proposal_state_hash: String,
162    #[serde(default)]
163    pub sources_hash: String,
164    #[serde(default)]
165    pub artifacts_hash: String,
166    #[serde(default)]
167    pub review_hash: String,
168    pub proof_freshness: String,
169    #[serde(default)]
170    pub proof: LockProof,
171    pub paths: LockPaths,
172    /// v0.109: pinned cross-frontier dependencies. Each entry
173    /// records the dependent frontier's `vfr_id`, the
174    /// `pinned_snapshot_hash` declared in the manifest, and the
175    /// `locator` (typically an https URL or hub registry pointer)
176    /// the resolver was told to use. The lockfile reproduces this
177    /// information in one place so a downstream consumer can
178    /// verify "this frontier depended on exactly these snapshots
179    /// of those dependencies" with no manifest cross-reference.
180    /// Empty for frontiers with no cross-frontier dependencies;
181    /// preserved across pre-v0.109 locks via #[serde(default)].
182    #[serde(default, skip_serializing_if = "Vec::is_empty")]
183    pub dependencies: Vec<LockedDependency>,
184}
185
186/// v0.109: per-dependency pin entry inside `vela.lock`. Mirrors
187/// the manifest's `ProjectDependency` fields that affect
188/// reproducibility (id, snapshot, locator) and drops the rest
189/// (display name, semver-style version) so the lockfile is the
190/// minimum content-addressable witness.
191#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
192pub struct LockedDependency {
193    /// Display name from the manifest. Not part of the
194    /// reproducibility witness; kept for human readability.
195    #[serde(default, skip_serializing_if = "String::is_empty")]
196    pub name: String,
197    /// Source string from the manifest (typically an https URL
198    /// or a `vfr_<id>` reference).
199    pub source: String,
200    /// Content-addressed frontier id of the dependent.
201    #[serde(default, skip_serializing_if = "Option::is_none")]
202    pub vfr_id: Option<String>,
203    /// Locator the resolver was told to fetch from.
204    #[serde(default, skip_serializing_if = "Option::is_none")]
205    pub locator: Option<String>,
206    /// SHA-256 of the dependent's canonical snapshot. The strict
207    /// pull path verifies the fetched dependency matches this
208    /// exact hash before satisfying any cross-frontier link.
209    #[serde(default, skip_serializing_if = "Option::is_none")]
210    pub pinned_snapshot_hash: Option<String>,
211}
212
213#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
214pub struct LockCanonicalization {
215    pub json: String,
216    pub yaml: String,
217}
218
219impl Default for LockCanonicalization {
220    fn default() -> Self {
221        Self {
222            json: "vela-canonical-json-v0.1".to_string(),
223            yaml: "vela-yaml-v0.1".to_string(),
224        }
225    }
226}
227
228#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
229pub struct LockPackage {
230    pub package: String,
231    pub digest: String,
232}
233
234#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
235pub struct LockKernel {
236    pub kernel: String,
237    pub digest: String,
238}
239
240#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
241pub struct LockProof {
242    pub latest: String,
243    pub digest: String,
244    pub freshness: String,
245    pub events_manifest: String,
246    pub replay_trace: String,
247}
248
249#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
250pub struct LockPaths {
251    pub frontier: String,
252    pub events: String,
253}
254
255#[derive(Debug, Clone)]
256struct ProofWrite {
257    digest: String,
258    freshness: String,
259    latest: String,
260    events_manifest: String,
261    replay_trace: String,
262}
263
264#[derive(Debug, Clone, PartialEq, Eq)]
265pub struct RepoLayoutIssue {
266    pub rule_id: String,
267    pub message: String,
268}
269
270#[derive(Debug, Clone)]
271pub struct InitOptions<'a> {
272    pub name: &'a str,
273    pub template: &'a str,
274    pub initialize_git: bool,
275}
276
277pub fn initialize(path: &Path, options: InitOptions<'_>) -> Result<serde_json::Value, String> {
278    if path.exists() && !path.is_dir() {
279        return Err(format!("{} exists and is not a directory", path.display()));
280    }
281    fs::create_dir_all(path).map_err(|e| {
282        format!(
283            "Failed to create frontier directory '{}': {e}",
284            path.display()
285        )
286    })?;
287
288    write_section_readmes(path)?;
289    let now = Utc::now().to_rfc3339_opts(SecondsFormat::Secs, true);
290    let project = empty_project(options.name, "", &now);
291    crate::repo::init_repo(path, &project)?;
292    write_frontier_card(path, options.name, options.template)?;
293    write_scope(path, options.name)?;
294    if options.initialize_git && !path.join(".git").exists() {
295        let status = std::process::Command::new("git")
296            .arg("init")
297            .arg(path)
298            .status()
299            .map_err(|e| format!("Failed to run git init: {e}"))?;
300        if !status.success() {
301            return Err("git init failed".to_string());
302        }
303    }
304
305    Ok(json!({
306        "schema": FRONTIER_INIT_SCHEMA,
307        "ok": true,
308        "layout": FRONTIER_REPO_LAYOUT,
309        "path": path.display().to_string(),
310        "name": options.name,
311        "template": options.template,
312        "wrote": [
313            "README.md",
314            "SCOPE.md",
315            "frontier.yaml",
316            "frontier.json",
317            "vela.lock"
318        ]
319    }))
320}
321
322pub fn materialize(path: &Path) -> Result<serde_json::Value, String> {
323    let source = crate::repo::VelaSource::VelaRepo(path.to_path_buf());
324    let project = crate::repo::load(&source)?;
325    write_section_readmes(path)?;
326    let generated_at = materialization_generated_at(path, &project);
327    write_visible_state(path, &project, &generated_at)?;
328    write_manifest(path, &project)?;
329    let proof = write_proof(path, &project, &generated_at)?;
330    let lock = write_lock(path, &project, &proof, &generated_at)?;
331    Ok(json!({
332        "schema": FRONTIER_MATERIALIZE_SCHEMA,
333        "ok": true,
334        "path": path.display().to_string(),
335        "wrote_frontier": "frontier.json",
336        "wrote_lock": "vela.lock",
337        "wrote_proof": "proof/latest.json",
338        "wrote_events_manifest": "proof/events.manifest.jsonl",
339        "snapshot_hash": lock.snapshot_hash,
340        "event_log_hash": lock.event_log_hash,
341        "proposal_state_hash": lock.proposal_state_hash,
342    }))
343}
344
345pub fn write_visible_repo_files(path: &Path, project: &Project) -> Result<(), String> {
346    write_section_readmes(path)?;
347    let generated_at = materialization_generated_at(path, project);
348    write_visible_state(path, project, &generated_at)?;
349    if !path.join("frontier.yaml").is_file() {
350        write_manifest(path, project)?;
351    } else {
352        // v0.59: keep the structured cross-frontier deps in the
353        // existing yaml in sync with `Project.dependencies`. We
354        // intentionally only touch the `dependencies.frontiers_v2`
355        // field; other user-edited fields (scope, maintainers,
356        // policies) are preserved.
357        sync_manifest_deps(path, &project.project.dependencies)?;
358    }
359    let proof = write_proof(path, project, &generated_at)?;
360    write_lock(path, project, &proof, &generated_at)?;
361    Ok(())
362}
363
364pub fn read_manifest(path: &Path) -> Result<Option<FrontierManifest>, String> {
365    let manifest_path = path.join("frontier.yaml");
366    if !manifest_path.is_file() {
367        return Ok(None);
368    }
369    let data = fs::read_to_string(&manifest_path)
370        .map_err(|e| format!("Failed to read frontier.yaml: {e}"))?;
371    serde_yaml::from_str(&data).map(Some).map_err(|e| {
372        format!(
373            "Failed to parse frontier manifest '{}': {e}",
374            manifest_path.display()
375        )
376    })
377}
378
379pub fn read_lock(path: &Path) -> Result<Option<FrontierLock>, String> {
380    let lock_path = path.join("vela.lock");
381    if !lock_path.is_file() {
382        return Ok(None);
383    }
384    let data =
385        fs::read_to_string(&lock_path).map_err(|e| format!("Failed to read vela.lock: {e}"))?;
386    serde_yaml::from_str(&data).map(Some).map_err(|e| {
387        format!(
388            "Failed to parse frontier lock '{}': {e}",
389            lock_path.display()
390        )
391    })
392}
393
394pub fn layout_issues(path: &Path, project: &Project) -> Vec<RepoLayoutIssue> {
395    if !path.is_dir() || !path.join(".vela").is_dir() {
396        return Vec::new();
397    }
398    if !path.join("frontier.yaml").is_file() && !path.join("vela.lock").is_file() {
399        return Vec::new();
400    }
401    let mut issues = Vec::new();
402    let manifest = match read_manifest(path) {
403        Ok(value) => value,
404        Err(e) => {
405            issues.push(issue("invalid_frontier_manifest", e));
406            None
407        }
408    };
409    let lock = match read_lock(path) {
410        Ok(value) => value,
411        Err(e) => {
412            issues.push(issue("invalid_frontier_lock", e));
413            None
414        }
415    };
416
417    if manifest.is_none() {
418        issues.push(issue(
419            "missing_frontier_manifest",
420            "Split frontier repo is missing frontier.yaml.",
421        ));
422    }
423    let Some(lock) = lock else {
424        issues.push(issue(
425            "missing_frontier_lock",
426            "Split frontier repo is missing generated vela.lock.",
427        ));
428        return issues;
429    };
430
431    let locked_project = project_with_frontier_id(project);
432    let hash_project = locked_project.as_ref().unwrap_or(project);
433    let expected_snapshot = prefixed(events::snapshot_hash(hash_project));
434    let expected_event_log = prefixed(events::event_log_hash(&hash_project.events));
435    let expected_proposals = proposal_state_hash(&project.proposals);
436    let expected_frontier = hash_project.frontier_id();
437    let expected_sources = directory_hash(&path.join("sources"));
438    let expected_artifacts = directory_hash(&path.join("artifacts"));
439    let expected_review = directory_hash(&path.join("review"));
440    let expected_proof = directory_hash(&path.join("proof"));
441    if lock.snapshot_hash != expected_snapshot {
442        issues.push(issue(
443            "frontier_lock_mismatch",
444            format!(
445                "vela.lock snapshot_hash does not match materialized frontier state: lock={}, current={expected_snapshot}",
446                lock.snapshot_hash
447            ),
448        ));
449    }
450    if lock.event_log_hash != expected_event_log {
451        issues.push(issue(
452            "frontier_lock_mismatch",
453            format!(
454                "vela.lock event_log_hash does not match .vela/events: lock={}, current={expected_event_log}",
455                lock.event_log_hash
456            ),
457        ));
458    }
459    if lock.proposal_state_hash != expected_proposals {
460        issues.push(issue(
461            "frontier_lock_mismatch",
462            format!(
463                "vela.lock proposal_state_hash does not match .vela/proposals: lock={}, current={expected_proposals}",
464                lock.proposal_state_hash
465            ),
466        ));
467    }
468    if lock.frontier_id != expected_frontier {
469        issues.push(issue(
470            "frontier_lock_mismatch",
471            format!(
472                "vela.lock frontier_id does not match current frontier: lock={}, current={expected_frontier}",
473                lock.frontier_id
474            ),
475        ));
476    }
477    if !lock.sources_hash.is_empty() && lock.sources_hash != expected_sources {
478        issues.push(issue(
479            "frontier_lock_mismatch",
480            format!(
481                "vela.lock sources_hash does not match sources/: lock={}, current={expected_sources}",
482                lock.sources_hash
483            ),
484        ));
485    }
486    if !lock.artifacts_hash.is_empty() && lock.artifacts_hash != expected_artifacts {
487        issues.push(issue(
488            "frontier_lock_mismatch",
489            format!(
490                "vela.lock artifacts_hash does not match artifacts/: lock={}, current={expected_artifacts}",
491                lock.artifacts_hash
492            ),
493        ));
494    }
495    if !lock.review_hash.is_empty() && lock.review_hash != expected_review {
496        issues.push(issue(
497            "frontier_lock_mismatch",
498            format!(
499                "vela.lock review_hash does not match review/: lock={}, current={expected_review}",
500                lock.review_hash
501            ),
502        ));
503    }
504    if !lock.proof.digest.is_empty() && lock.proof.digest != expected_proof {
505        issues.push(issue(
506            "frontier_lock_mismatch",
507            format!(
508                "vela.lock proof digest does not match proof/: lock={}, current={expected_proof}",
509                lock.proof.digest
510            ),
511        ));
512    }
513
514    let visible_path = path.join("frontier.json");
515    if !visible_path.is_file() {
516        issues.push(issue(
517            "missing_materialized_frontier",
518            "Split frontier repo is missing frontier.json.",
519        ));
520        return issues;
521    }
522    match crate::repo::load_project_file(&visible_path) {
523        Ok(visible) => {
524            let visible_hash = prefixed(events::snapshot_hash(&visible));
525            if visible_hash != expected_snapshot {
526                issues.push(issue(
527                    "frontier_lock_mismatch",
528                    format!(
529                        "frontier.json does not match .vela materialized state: visible={visible_hash}, current={expected_snapshot}",
530                    ),
531                ));
532            }
533        }
534        Err(e) => issues.push(issue("invalid_materialized_frontier", e)),
535    }
536
537    issues
538}
539
540pub fn manifest_overrides(path: &Path) -> Result<Option<FrontierManifest>, String> {
541    read_manifest(path)
542}
543
544pub fn repo_status(path: &Path) -> Result<serde_json::Value, String> {
545    let project = crate::repo::load_from_path(path)?;
546    let lock = read_lock(path)?;
547    let layout_issues = layout_issues(path, &project);
548    let structural_issue_count = layout_issues.len();
549    let lock_agreement = structural_issue_count == 0;
550    let open_proposals = project
551        .proposals
552        .iter()
553        .filter(|proposal| {
554            !matches!(
555                proposal.status.as_str(),
556                "accepted" | "applied" | "rejected"
557            )
558        })
559        .count();
560    let lock = lock.as_ref();
561    let sources_hash = directory_hash(&path.join("sources"));
562    let artifacts_hash = directory_hash(&path.join("artifacts"));
563    let review_hash = directory_hash(&path.join("review"));
564    let proof_hash = directory_hash(&path.join("proof"));
565    let source_changed =
566        lock.is_some_and(|lock| !lock.sources_hash.is_empty() && lock.sources_hash != sources_hash);
567    let artifact_changed = lock.is_some_and(|lock| {
568        !lock.artifacts_hash.is_empty() && lock.artifacts_hash != artifacts_hash
569    });
570    let review_changed =
571        lock.is_some_and(|lock| !lock.review_hash.is_empty() && lock.review_hash != review_hash);
572    let proof_changed =
573        lock.is_some_and(|lock| !lock.proof.digest.is_empty() && lock.proof.digest != proof_hash);
574    Ok(json!({
575        "schema": FRONTIER_REPO_STATUS_SCHEMA,
576        "ok": lock_agreement,
577        "path": path.display().to_string(),
578        "layout": FRONTIER_REPO_LAYOUT,
579        "frontier_id": project.frontier_id(),
580        "summary": {
581            "accepted_events": project.events.len(),
582            "open_proposals": open_proposals,
583            "findings": project.findings.len(),
584            "sources": project.sources.len(),
585            "artifacts": project.artifacts.len(),
586        },
587        "freshness": {
588            "materialized_state": if lock_agreement { "fresh" } else { "stale_or_invalid" },
589            "proof": lock.map_or("unknown", |lock| lock.proof_freshness.as_str()),
590            "sources_changed": source_changed,
591            "artifacts_changed": artifact_changed,
592            "review_changed": review_changed,
593            "proof_changed": proof_changed,
594        },
595        "hashes": {
596            "snapshot_hash": prefixed(events::snapshot_hash(&project_with_frontier_id(&project)?)),
597            "event_log_hash": prefixed(events::event_log_hash(&project.events)),
598            "sources_hash": sources_hash,
599            "artifacts_hash": artifacts_hash,
600            "review_hash": review_hash,
601            "proof_hash": proof_hash,
602        },
603        "lock_agreement": lock_agreement,
604        "issues": layout_issues.iter().map(|issue| json!({
605            "rule_id": issue.rule_id,
606            "message": issue.message,
607        })).collect::<Vec<_>>(),
608    }))
609}
610
611pub fn repo_doctor(path: &Path) -> Result<serde_json::Value, String> {
612    let project = crate::repo::load_from_path(path)?;
613    let mut issues = layout_issues(path, &project)
614        .into_iter()
615        .map(|issue| {
616            json!({
617                "rule_id": issue.rule_id,
618                "severity": "error",
619                "message": issue.message,
620            })
621        })
622        .collect::<Vec<_>>();
623
624    for file in [
625        "README.md",
626        "SCOPE.md",
627        "frontier.yaml",
628        "frontier.json",
629        "vela.lock",
630    ] {
631        if !path.join(file).is_file() {
632            issues.push(json!({
633                "rule_id": "missing_repo_file",
634                "severity": "error",
635                "path": file,
636                "message": format!("Frontier repo is missing {file}."),
637            }));
638        }
639    }
640    for dir in [
641        "sources",
642        "artifacts",
643        "review",
644        "proof",
645        "exports",
646        ".vela",
647    ] {
648        if !path.join(dir).is_dir() {
649            issues.push(json!({
650                "rule_id": "missing_repo_directory",
651                "severity": "error",
652                "path": dir,
653                "message": format!("Frontier repo is missing {dir}/."),
654            }));
655        }
656    }
657    for dir in [
658        "artifacts/packets",
659        "artifacts/runs",
660        "artifacts/code",
661        "artifacts/notebooks",
662        "artifacts/data",
663        "artifacts/notes",
664        "artifacts/tables",
665        "artifacts/figures",
666        "artifacts/analyses",
667        "artifacts/environments",
668        "proof/signatures",
669        "proof/attestations",
670        "exports/prov",
671        "exports/ro-crate",
672        "exports/frictionless",
673        "exports/mcp",
674        "exports/report",
675        "exports/registry",
676    ] {
677        if !path.join(dir).is_dir() {
678            issues.push(json!({
679                "rule_id": "missing_optional_repo_hook",
680                "severity": "warning",
681                "path": dir,
682                "message": format!("Optional repo hook {dir}/ is not present."),
683            }));
684        }
685    }
686    for root_artifact in [
687        "bbb-core.v0.1.json",
688        "bbb-core.v0.2.json",
689        "bbb-core.v0.3.json",
690        "bbb-core.v0.4.json",
691        "review-packet.v1.json",
692        "promotion-core.v1.json",
693        "review-debt.v1.json",
694        "seed-manifest.v1.json",
695    ] {
696        if path.join(root_artifact).exists() {
697            issues.push(json!({
698                "rule_id": "root_artifact_clutter",
699                "severity": "warning",
700                "path": root_artifact,
701                "message": format!("{root_artifact} should live under sources/, artifacts/, review/, proof/, or exports/."),
702            }));
703        }
704    }
705
706    Ok(json!({
707        "schema": FRONTIER_REPO_DOCTOR_SCHEMA,
708        "ok": !issues.iter().any(|issue| issue.get("severity").and_then(|v| v.as_str()) == Some("error")),
709        "path": path.display().to_string(),
710        "layout": FRONTIER_REPO_LAYOUT,
711        "issues": issues,
712    }))
713}
714
715pub fn proof_verify(path: &Path) -> Result<serde_json::Value, String> {
716    let project = crate::repo::load_from_path(path)?;
717    let lock = read_lock(path)?;
718    let proof_path = path.join("proof/latest.json");
719    let mut issues = layout_issues(path, &project)
720        .into_iter()
721        .map(|issue| {
722            json!({
723                "rule_id": issue.rule_id,
724                "message": issue.message,
725            })
726        })
727        .collect::<Vec<_>>();
728    let locked = project_with_frontier_id(&project)?;
729    let snapshot_hash = prefixed(events::snapshot_hash(&locked));
730    let event_log_hash = prefixed(events::event_log_hash(&locked.events));
731    let mut latest_payload = serde_json::Value::Null;
732    if !proof_path.is_file() {
733        issues.push(json!({
734            "rule_id": "missing_proof_latest",
735            "message": "proof/latest.json is missing.",
736        }));
737    } else {
738        let data = fs::read_to_string(&proof_path)
739            .map_err(|e| format!("Failed to read proof/latest.json: {e}"))?;
740        latest_payload = serde_json::from_str(&data).map_err(|e| {
741            format!(
742                "Failed to parse proof/latest.json '{}': {e}",
743                proof_path.display()
744            )
745        })?;
746        if latest_payload
747            .get("frontier_hash")
748            .and_then(|value| value.as_str())
749            != Some(snapshot_hash.as_str())
750        {
751            issues.push(json!({
752                "rule_id": "proof_snapshot_mismatch",
753                "message": "proof/latest.json frontier_hash does not match replayed frontier state.",
754            }));
755        }
756        if latest_payload
757            .get("event_log_hash")
758            .and_then(|value| value.as_str())
759            != Some(event_log_hash.as_str())
760        {
761            issues.push(json!({
762                "rule_id": "proof_event_log_mismatch",
763                "message": "proof/latest.json event_log_hash does not match .vela/events/.",
764            }));
765        }
766    }
767    let proof_digest = directory_hash(&path.join("proof"));
768    if let Some(lock) = &lock {
769        if !lock.proof.digest.is_empty() && lock.proof.digest != proof_digest {
770            issues.push(json!({
771                "rule_id": "proof_digest_mismatch",
772                "message": format!("proof/ digest does not match vela.lock: lock={}, current={proof_digest}", lock.proof.digest),
773            }));
774        }
775    } else {
776        issues.push(json!({
777            "rule_id": "missing_frontier_lock",
778            "message": "vela.lock is missing.",
779        }));
780    }
781
782    Ok(json!({
783        "schema": FRONTIER_PROOF_VERIFY_SCHEMA,
784        "ok": issues.is_empty(),
785        "path": path.display().to_string(),
786        "frontier_id": locked.frontier_id(),
787        "snapshot_hash": snapshot_hash,
788        "event_log_hash": event_log_hash,
789        "proof_digest": proof_digest,
790        "proof": latest_payload,
791        "issues": issues,
792    }))
793}
794
795pub fn proof_explain(path: &Path) -> Result<String, String> {
796    let project = crate::repo::load_from_path(path)?;
797    let report = proof_verify(path)?;
798    let ok = report.get("ok").and_then(|value| value.as_bool()) == Some(true);
799    let locked = project_with_frontier_id(&project)?;
800    let snapshot_hash = prefixed(events::snapshot_hash(&locked));
801    let event_log_hash = prefixed(events::event_log_hash(&locked.events));
802    let open_proposals = project
803        .proposals
804        .iter()
805        .filter(|proposal| {
806            !matches!(
807                proposal.status.as_str(),
808                "accepted" | "applied" | "rejected"
809            )
810        })
811        .count();
812    let status = if ok { "fresh" } else { "stale or invalid" };
813    Ok(format!(
814        "vela proof explain\n\nFrontier: {}\nFrontier id: {}\nProof status: {status}\nAccepted events: {}\nOpen proposals: {open_proposals}\nSnapshot hash: {snapshot_hash}\nEvent log hash: {event_log_hash}\n\nAuthority: `.vela/events/` is replayed into `frontier.json`.\nVisible proof: `proof/latest.json`, `proof/events.manifest.jsonl`, and `proof/replay.trace.jsonl`.\nLockfile: `vela.lock` binds the event log, reducer, Carina kernel, visible state, and proof digest.\n",
815        project.project.name,
816        locked.frontier_id(),
817        project.events.len(),
818    ))
819}
820
821fn empty_project(name: &str, description: &str, compiled_at: &str) -> Project {
822    Project {
823        vela_version: project::VELA_SCHEMA_VERSION.to_string(),
824        schema: project::VELA_SCHEMA_URL.to_string(),
825        frontier_id: None,
826        project: project::ProjectMeta {
827            name: name.to_string(),
828            description: description.to_string(),
829            compiled_at: compiled_at.to_string(),
830            compiler: project::VELA_COMPILER_VERSION.to_string(),
831            papers_processed: 0,
832            errors: 0,
833            dependencies: Vec::new(),
834        },
835        stats: project::ProjectStats::default(),
836        findings: Vec::new(),
837        sources: Vec::new(),
838        evidence_atoms: Vec::new(),
839        condition_records: Vec::new(),
840        review_events: Vec::new(),
841        confidence_updates: Vec::new(),
842        events: Vec::new(),
843        proposals: Vec::new(),
844        proof_state: proposals::ProofState::default(),
845        signatures: Vec::new(),
846        actors: Vec::new(),
847        replications: Vec::new(),
848        datasets: Vec::new(),
849        code_artifacts: Vec::new(),
850        artifacts: Vec::new(),
851        predictions: Vec::new(),
852        resolutions: Vec::new(),
853        peers: Vec::new(),
854        negative_results: Vec::new(),
855        trajectories: Vec::new(),
856    }
857}
858
859fn write_visible_state(path: &Path, project: &Project, generated_at: &str) -> Result<(), String> {
860    let visible = project_with_frontier_id(project)?;
861    let snapshot_hash = prefixed(events::snapshot_hash(&visible));
862    let event_log_hash = prefixed(events::event_log_hash(&visible.events));
863    let mut value = serde_json::to_value(&visible)
864        .map_err(|e| format!("Failed to prepare frontier.json: {e}"))?;
865    if let Some(object) = value.as_object_mut() {
866        object.insert(
867            "_warning".to_string(),
868            serde_json::Value::String(
869                "Generated by Vela. Do not edit frontier.json directly; use Vela commands to propose, accept, reject, materialize, and prove frontier state."
870                    .to_string(),
871            ),
872        );
873        object.insert(
874            "_meta".to_string(),
875            json!({
876                "schema": "vela.frontier_state_meta.v0.1",
877                "generated_at": generated_at,
878                "materialized_from": ".vela/events/",
879                "proof": "proof/latest.json",
880                "lockfile": "vela.lock",
881                "events_manifest": "proof/events.manifest.jsonl",
882                "replay_trace": "proof/replay.trace.jsonl",
883                "snapshot_hash": snapshot_hash,
884                "event_log_hash": event_log_hash,
885                "carina_kernel": DEFAULT_CARINA_KERNEL,
886                "vela_reducer": format!("vela@{}", env!("CARGO_PKG_VERSION")),
887            }),
888        );
889    }
890    let json = serde_json::to_string_pretty(&value)
891        .map_err(|e| format!("Failed to serialize frontier.json: {e}"))?;
892    fs::write(path.join("frontier.json"), json)
893        .map_err(|e| format!("Failed to write frontier.json: {e}"))
894}
895
896/// v0.59: read the existing frontier.yaml, replace its
897/// `dependencies.frontiers_v2` field with the project's live
898/// dependencies, and write it back. Preserves every other field
899/// the user may have customized (scope.question, maintainers,
900/// policies, license). No-op if no manifest exists yet.
901fn sync_manifest_deps(path: &Path, deps: &[ProjectDependency]) -> Result<(), String> {
902    let manifest_path = path.join("frontier.yaml");
903    if !manifest_path.is_file() {
904        return Ok(());
905    }
906    let mut manifest = match read_manifest(path)? {
907        Some(m) => m,
908        None => return Ok(()),
909    };
910    manifest.dependencies.frontiers_v2 = deps.to_vec();
911    let yaml = serde_yaml::to_string(&manifest)
912        .map_err(|e| format!("Failed to serialize frontier.yaml: {e}"))?;
913    fs::write(&manifest_path, yaml).map_err(|e| format!("Failed to write frontier.yaml: {e}"))
914}
915
916fn write_manifest(path: &Path, project: &Project) -> Result<(), String> {
917    let manifest = FrontierManifest {
918        schema: FRONTIER_MANIFEST_SCHEMA.to_string(),
919        layout: FRONTIER_REPO_LAYOUT.to_string(),
920        mode: "split".to_string(),
921        frontier_id: Some(project.frontier_id()),
922        name: project.project.name.clone(),
923        description: project.project.description.clone(),
924        visibility: "public".to_string(),
925        scope: FrontierScope {
926            question: project.project.description.clone(),
927            includes: Vec::new(),
928            excludes: Vec::new(),
929        },
930        carina: CarinaManifest {
931            kernel: DEFAULT_CARINA_KERNEL.to_string(),
932        },
933        vela: VelaManifest {
934            reducer: format!("vela@{}", env!("CARGO_PKG_VERSION")),
935        },
936        paths: FrontierPaths {
937            state: "frontier.json".to_string(),
938            sources: "sources/".to_string(),
939            artifacts: "artifacts/".to_string(),
940            review: "review/".to_string(),
941            proof: "proof/".to_string(),
942            exports: "exports/".to_string(),
943        },
944        maintainers: Vec::new(),
945        policies: ManifestPolicies::default(),
946        license: ManifestLicense::default(),
947        dependencies: ManifestDependencies {
948            frontiers: Vec::new(),
949            packages: Vec::new(),
950            adapters: Vec::new(),
951            frontiers_v2: project.project.dependencies.clone(),
952        },
953        templates: Vec::new(),
954    };
955    let yaml = serde_yaml::to_string(&manifest)
956        .map_err(|e| format!("Failed to serialize frontier.yaml: {e}"))?;
957    fs::write(path.join("frontier.yaml"), yaml)
958        .map_err(|e| format!("Failed to write frontier.yaml: {e}"))
959}
960
961fn write_lock(
962    path: &Path,
963    project: &Project,
964    proof: &ProofWrite,
965    generated_at: &str,
966) -> Result<FrontierLock, String> {
967    let locked = project_with_frontier_id(project)?;
968    let reducer_package = format!("vela@{}", env!("CARGO_PKG_VERSION"));
969    let lock = FrontierLock {
970        schema: FRONTIER_LOCK_SCHEMA.to_string(),
971        generated_at: generated_at.to_string(),
972        vela_version: env!("CARGO_PKG_VERSION").to_string(),
973        carina_kernel: DEFAULT_CARINA_KERNEL.to_string(),
974        frontier_id: locked.frontier_id(),
975        canonicalization: LockCanonicalization::default(),
976        reducer: LockPackage {
977            package: reducer_package.clone(),
978            digest: identity_digest(&reducer_package),
979        },
980        carina: LockKernel {
981            kernel: DEFAULT_CARINA_KERNEL.to_string(),
982            digest: identity_digest(DEFAULT_CARINA_KERNEL),
983        },
984        snapshot_hash: prefixed(events::snapshot_hash(&locked)),
985        event_log_hash: prefixed(events::event_log_hash(&locked.events)),
986        proposal_state_hash: proposal_state_hash(&locked.proposals),
987        sources_hash: directory_hash(&path.join("sources")),
988        artifacts_hash: directory_hash(&path.join("artifacts")),
989        review_hash: directory_hash(&path.join("review")),
990        proof_freshness: proof.freshness.clone(),
991        proof: LockProof {
992            latest: proof.latest.clone(),
993            digest: proof.digest.clone(),
994            freshness: proof.freshness.clone(),
995            events_manifest: proof.events_manifest.clone(),
996            replay_trace: proof.replay_trace.clone(),
997        },
998        paths: LockPaths {
999            frontier: "frontier.json".to_string(),
1000            events: ".vela/events/".to_string(),
1001        },
1002        // v0.109: surface every cross-frontier dependency the
1003        // project declares, in deterministic source order, so the
1004        // lockfile alone witnesses what state the parent committed
1005        // to. Pre-v0.109 these pins lived only in `frontier.yaml`
1006        // and were absent from the lock; v0.109 mirrors them.
1007        dependencies: locked
1008            .project
1009            .dependencies
1010            .iter()
1011            .map(|d| LockedDependency {
1012                name: d.name.clone(),
1013                source: d.source.clone(),
1014                vfr_id: d.vfr_id.clone(),
1015                locator: d.locator.clone(),
1016                pinned_snapshot_hash: d.pinned_snapshot_hash.clone(),
1017            })
1018            .collect(),
1019    };
1020    let yaml =
1021        serde_yaml::to_string(&lock).map_err(|e| format!("Failed to serialize vela.lock: {e}"))?;
1022    fs::write(path.join("vela.lock"), yaml)
1023        .map_err(|e| format!("Failed to write vela.lock: {e}"))?;
1024    Ok(lock)
1025}
1026
1027fn materialization_generated_at(path: &Path, project: &Project) -> String {
1028    let now = Utc::now().to_rfc3339_opts(SecondsFormat::Secs, true);
1029    let Ok(Some(lock)) = read_lock(path) else {
1030        return now;
1031    };
1032    if lock.generated_at.trim().is_empty() {
1033        return now;
1034    }
1035    let Ok(locked) = project_with_frontier_id(project) else {
1036        return now;
1037    };
1038    let reducer_package = format!("vela@{}", env!("CARGO_PKG_VERSION"));
1039    let current = [
1040        (
1041            lock.snapshot_hash.as_str(),
1042            prefixed(events::snapshot_hash(&locked)),
1043        ),
1044        (
1045            lock.event_log_hash.as_str(),
1046            prefixed(events::event_log_hash(&locked.events)),
1047        ),
1048        (
1049            lock.proposal_state_hash.as_str(),
1050            proposal_state_hash(&locked.proposals),
1051        ),
1052        (
1053            lock.sources_hash.as_str(),
1054            directory_hash(&path.join("sources")),
1055        ),
1056        (
1057            lock.artifacts_hash.as_str(),
1058            directory_hash(&path.join("artifacts")),
1059        ),
1060        (
1061            lock.review_hash.as_str(),
1062            directory_hash(&path.join("review")),
1063        ),
1064    ];
1065    let hashes_match = current.iter().all(|(locked, current)| *locked == current);
1066    let versions_match = lock.vela_version == env!("CARGO_PKG_VERSION")
1067        && lock.carina_kernel == DEFAULT_CARINA_KERNEL
1068        && lock.reducer.package == reducer_package
1069        && lock.carina.kernel == DEFAULT_CARINA_KERNEL;
1070    if hashes_match && versions_match && lock.proof_freshness == "fresh" {
1071        lock.generated_at
1072    } else {
1073        now
1074    }
1075}
1076
1077fn project_with_frontier_id(project: &Project) -> Result<Project, String> {
1078    let frontier_id = project.frontier_id();
1079    let mut value = serde_json::to_value(project)
1080        .map_err(|e| format!("Failed to prepare frontier state: {e}"))?;
1081    if let Some(object) = value.as_object_mut() {
1082        object.insert(
1083            "frontier_id".to_string(),
1084            serde_json::Value::String(frontier_id),
1085        );
1086    }
1087    serde_json::from_value(value).map_err(|e| format!("Failed to normalize frontier state: {e}"))
1088}
1089
1090fn write_frontier_card(path: &Path, name: &str, template: &str) -> Result<(), String> {
1091    let text = format!(
1092        "# {name}\n\nThis is a Vela frontier repository.\n\n- State entrypoint: `frontier.json`\n- Manifest: `frontier.yaml`\n- Lockfile: `vela.lock`\n- Template: `{template}`\n\nRun:\n\n```bash\nvela check . --strict --json\nvela integrity . --json\nvela proof . --out proof/latest\n```\n"
1093    );
1094    fs::write(path.join("README.md"), text).map_err(|e| format!("Failed to write README.md: {e}"))
1095}
1096
1097fn write_scope(path: &Path, name: &str) -> Result<(), String> {
1098    let text = format!(
1099        "# Scope\n\nFrontier: {name}\n\nThis file records boundaries, exclusions, caveats, and review policy for the frontier.\n\nExternal artifacts and agent outputs are source material until reviewed into accepted Vela events.\n"
1100    );
1101    fs::write(path.join("SCOPE.md"), text).map_err(|e| format!("Failed to write SCOPE.md: {e}"))
1102}
1103
1104fn write_section_readmes(path: &Path) -> Result<(), String> {
1105    let sections = BTreeMap::from([
1106        (
1107            "sources",
1108            "Source manifests, papers, datasets, registries, and protocols.",
1109        ),
1110        (
1111            "artifacts",
1112            "Packets, runs, code, notebooks, data pointers, tables, and figures.",
1113        ),
1114        (
1115            "review",
1116            "Proposal queues, decisions, caveats, rejected records, and audits.",
1117        ),
1118        ("proof", "Proof packets, traces, and freshness records."),
1119        (
1120            "exports",
1121            "Generated hub, RO-Crate, Frictionless, MCP, and report bundles.",
1122        ),
1123    ]);
1124    for (dir, description) in sections {
1125        let section = path.join(dir);
1126        fs::create_dir_all(&section).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1127        let readme = section.join("README.md");
1128        if !readme.exists() {
1129            fs::write(readme, format!("# {dir}\n\n{description}\n"))
1130                .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1131        }
1132    }
1133    let artifact_sections = BTreeMap::from([
1134        ("artifacts/packets", "Import, review, and proof packets."),
1135        (
1136            "artifacts/runs",
1137            "Agent, source-adapter, computational, and lab run records.",
1138        ),
1139        (
1140            "artifacts/code",
1141            "Analysis, extraction, and validation code.",
1142        ),
1143        ("artifacts/notebooks", "Exploratory and report notebooks."),
1144        (
1145            "artifacts/data",
1146            "Small data files, data pointers, and external-data metadata.",
1147        ),
1148        (
1149            "artifacts/notes",
1150            "Reading notes, decision notes, meeting notes, and scratch context.",
1151        ),
1152        (
1153            "artifacts/tables",
1154            "Generated tables and tabular review outputs.",
1155        ),
1156        ("artifacts/figures", "Generated figures and visual outputs."),
1157        (
1158            "artifacts/analyses",
1159            "Analysis outputs and state-transition examples.",
1160        ),
1161        (
1162            "artifacts/environments",
1163            "Execution context pointers: containers, lockfiles, hardware, cloud runtimes, and lab instruments.",
1164        ),
1165    ]);
1166    for (dir, description) in artifact_sections {
1167        let section = path.join(dir);
1168        fs::create_dir_all(&section).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1169        let readme = section.join("README.md");
1170        if !readme.exists() {
1171            let title = dir.rsplit('/').next().unwrap_or(dir);
1172            fs::write(readme, format!("# {title}\n\n{description}\n"))
1173                .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1174        }
1175    }
1176    let proof_sections = BTreeMap::from([
1177        (
1178            "proof/signatures",
1179            "Optional signatures for proof packets, events, lockfiles, or institutional attestations.",
1180        ),
1181        (
1182            "proof/attestations",
1183            "Optional external proof attestations. Vela events remain the state authority.",
1184        ),
1185    ]);
1186    for (dir, description) in proof_sections {
1187        let section = path.join(dir);
1188        fs::create_dir_all(&section).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1189        let readme = section.join("README.md");
1190        if !readme.exists() {
1191            let title = dir.rsplit('/').next().unwrap_or(dir);
1192            fs::write(readme, format!("# {title}\n\n{description}\n"))
1193                .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1194        }
1195    }
1196    let export_sections = BTreeMap::from([
1197        ("exports/prov", "Generated W3C PROV exports."),
1198        ("exports/ro-crate", "Generated RO-Crate exports."),
1199        (
1200            "exports/frictionless",
1201            "Generated Frictionless Data Package exports.",
1202        ),
1203        ("exports/mcp", "Generated MCP-serving export bundles."),
1204        (
1205            "exports/report",
1206            "Generated reviewer, funder, or release reports.",
1207        ),
1208        (
1209            "exports/registry",
1210            "Generated registry and federation bundles.",
1211        ),
1212    ]);
1213    for (dir, description) in export_sections {
1214        let section = path.join(dir);
1215        fs::create_dir_all(&section).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1216        let readme = section.join("README.md");
1217        if !readme.exists() {
1218            let title = dir.rsplit('/').next().unwrap_or(dir);
1219            fs::write(readme, format!("# {title}\n\n{description}\n"))
1220                .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1221        }
1222    }
1223    Ok(())
1224}
1225
1226fn write_proof(path: &Path, project: &Project, generated_at: &str) -> Result<ProofWrite, String> {
1227    let locked = project_with_frontier_id(project)?;
1228    let proof_dir = path.join("proof");
1229    fs::create_dir_all(&proof_dir).map_err(|e| format!("Failed to create proof/: {e}"))?;
1230
1231    let snapshot_hash = prefixed(events::snapshot_hash(&locked));
1232    let event_log_hash = prefixed(events::event_log_hash(&locked.events));
1233    let proposal_state_hash = proposal_state_hash(&locked.proposals);
1234    let reducer_package = format!("vela@{}", env!("CARGO_PKG_VERSION"));
1235
1236    let latest = json!({
1237        "schema": "vela.frontier_repo_proof.v0.1",
1238        "frontier_id": locked.frontier_id(),
1239        "frontier_hash": snapshot_hash,
1240        "event_log_hash": event_log_hash,
1241        "proposal_state_hash": proposal_state_hash,
1242        "reducer": {
1243            "name": "vela",
1244            "version": env!("CARGO_PKG_VERSION"),
1245            "package": reducer_package,
1246            "digest": identity_digest(&format!("vela@{}", env!("CARGO_PKG_VERSION"))),
1247        },
1248        "carina": {
1249            "kernel": DEFAULT_CARINA_KERNEL,
1250            "digest": identity_digest(DEFAULT_CARINA_KERNEL),
1251        },
1252        "materialized_at": generated_at,
1253        "freshness": "fresh",
1254        "event_count": locked.events.len(),
1255        "paths": {
1256            "frontier": "frontier.json",
1257            "lockfile": "vela.lock",
1258            "events_authority": ".vela/events/",
1259            "events_manifest": "proof/events.manifest.jsonl",
1260            "replay_trace": "proof/replay.trace.jsonl"
1261        },
1262        "warning": "Do not edit frontier.json directly. Use Vela commands to propose, accept, reject, materialize, and prove frontier state."
1263    });
1264    fs::write(
1265        proof_dir.join("latest.json"),
1266        serde_json::to_string_pretty(&latest)
1267            .map_err(|e| format!("Failed to serialize proof/latest.json: {e}"))?,
1268    )
1269    .map_err(|e| format!("Failed to write proof/latest.json: {e}"))?;
1270
1271    let mut manifest_lines = String::new();
1272    let mut trace_lines = String::new();
1273    for (idx, event) in locked.events.iter().enumerate() {
1274        let event_hash = prefixed(event_hash(event));
1275        let entry = json!({
1276            "schema": "vela.proof_event_manifest_entry.v0.1",
1277            "index": idx + 1,
1278            "id": event.id,
1279            "kind": event.kind,
1280            "target": event.target,
1281            "actor": event.actor,
1282            "timestamp": event.timestamp,
1283            "event_hash": event_hash,
1284            "before_hash": event.before_hash,
1285            "after_hash": event.after_hash,
1286            "caveat_count": event.caveats.len(),
1287        });
1288        manifest_lines.push_str(
1289            &serde_json::to_string(&entry)
1290                .map_err(|e| format!("Failed to serialize event manifest entry: {e}"))?,
1291        );
1292        manifest_lines.push('\n');
1293
1294        let event_log_hash_after = prefixed(events::event_log_hash(&locked.events[..=idx]));
1295        let trace = json!({
1296            "schema": "vela.replay_trace_entry.v0.1",
1297            "step": idx + 1,
1298            "event": event.id,
1299            "kind": event.kind,
1300            "event_hash": event_hash,
1301            "event_log_hash_after": event_log_hash_after,
1302            "target_after_hash": event.after_hash,
1303        });
1304        trace_lines.push_str(
1305            &serde_json::to_string(&trace)
1306                .map_err(|e| format!("Failed to serialize replay trace entry: {e}"))?,
1307        );
1308        trace_lines.push('\n');
1309    }
1310    fs::write(proof_dir.join("events.manifest.jsonl"), manifest_lines)
1311        .map_err(|e| format!("Failed to write proof/events.manifest.jsonl: {e}"))?;
1312    fs::write(proof_dir.join("replay.trace.jsonl"), trace_lines)
1313        .map_err(|e| format!("Failed to write proof/replay.trace.jsonl: {e}"))?;
1314
1315    fs::write(
1316        proof_dir.join("freshness.md"),
1317        format!(
1318            "# Freshness\n\nCurrent proof status: fresh\n\n`frontier.json` was materialized from `.vela/events/` at {generated_at}.\n\nAccepted events: {}\nEvent log hash: `{event_log_hash}`\nSnapshot hash: `{snapshot_hash}`\n\nRun:\n\n```bash\nvela check . --strict --json\nvela integrity . --json\n```\n",
1319            locked.events.len()
1320        ),
1321    )
1322    .map_err(|e| format!("Failed to write proof/freshness.md: {e}"))?;
1323
1324    let hashes = json!({
1325        "schema": "vela.frontier_repo_hashes.v0.1",
1326        "frontier_id": locked.frontier_id(),
1327        "snapshot_hash": snapshot_hash,
1328        "event_log_hash": event_log_hash,
1329        "proposal_state_hash": proposal_state_hash,
1330        "sources_hash": directory_hash(&path.join("sources")),
1331        "artifacts_hash": directory_hash(&path.join("artifacts")),
1332        "review_hash": directory_hash(&path.join("review")),
1333    });
1334    fs::write(
1335        proof_dir.join("hashes.json"),
1336        serde_json::to_string_pretty(&hashes)
1337            .map_err(|e| format!("Failed to serialize proof/hashes.json: {e}"))?,
1338    )
1339    .map_err(|e| format!("Failed to write proof/hashes.json: {e}"))?;
1340
1341    Ok(ProofWrite {
1342        digest: directory_hash(&proof_dir),
1343        freshness: "fresh".to_string(),
1344        latest: "proof/latest.json".to_string(),
1345        events_manifest: "proof/events.manifest.jsonl".to_string(),
1346        replay_trace: "proof/replay.trace.jsonl".to_string(),
1347    })
1348}
1349
1350fn proposal_state_hash(proposals: &[crate::proposals::StateProposal]) -> String {
1351    let bytes = crate::canonical::to_canonical_bytes(proposals).unwrap_or_default();
1352    prefixed(hex::encode(Sha256::digest(bytes)))
1353}
1354
1355fn directory_hash(path: &Path) -> String {
1356    let mut entries = Vec::new();
1357    if path.is_dir() {
1358        collect_file_entries(path, path, &mut entries);
1359    }
1360    entries.sort_by(|a, b| a.0.cmp(&b.0));
1361    let bytes = crate::canonical::to_canonical_bytes(&entries).unwrap_or_default();
1362    prefixed(hex::encode(Sha256::digest(bytes)))
1363}
1364
1365fn collect_file_entries(root: &Path, path: &Path, entries: &mut Vec<(String, String)>) {
1366    let Ok(read_dir) = fs::read_dir(path) else {
1367        return;
1368    };
1369    for entry in read_dir.flatten() {
1370        let entry_path = entry.path();
1371        let Some(name) = entry_path.file_name().and_then(|s| s.to_str()) else {
1372            continue;
1373        };
1374        if name == ".DS_Store" {
1375            continue;
1376        }
1377        if entry_path.is_dir() {
1378            collect_file_entries(root, &entry_path, entries);
1379        } else if entry_path.is_file() {
1380            let rel = entry_path
1381                .strip_prefix(root)
1382                .unwrap_or(&entry_path)
1383                .to_string_lossy()
1384                .replace('\\', "/");
1385            let digest = fs::read(&entry_path)
1386                .map(|bytes| prefixed(hex::encode(Sha256::digest(bytes))))
1387                .unwrap_or_else(|_| "sha256:unreadable".to_string());
1388            entries.push((rel, digest));
1389        }
1390    }
1391}
1392
1393fn event_hash(event: &crate::events::StateEvent) -> String {
1394    let bytes = crate::canonical::to_canonical_bytes(event).unwrap_or_default();
1395    hex::encode(Sha256::digest(bytes))
1396}
1397
1398fn identity_digest(value: &str) -> String {
1399    prefixed(hex::encode(Sha256::digest(value.as_bytes())))
1400}
1401
1402fn prefixed(hash: String) -> String {
1403    if hash.starts_with("sha256:") {
1404        hash
1405    } else {
1406        format!("sha256:{hash}")
1407    }
1408}
1409
1410fn issue(rule_id: &str, message: impl Into<String>) -> RepoLayoutIssue {
1411    RepoLayoutIssue {
1412        rule_id: rule_id.to_string(),
1413        message: message.into(),
1414    }
1415}
1416
1417fn default_split_mode() -> String {
1418    "split".to_string()
1419}
1420
1421fn default_visibility() -> String {
1422    "public".to_string()
1423}