1use std::collections::BTreeMap;
8use std::fs;
9use std::path::Path;
10
11use chrono::{SecondsFormat, Utc};
12use serde::{Deserialize, Serialize};
13use serde_json::json;
14use sha2::{Digest, Sha256};
15
16use crate::events;
17use crate::project::{self, Project, ProjectDependency};
18use crate::proposals;
19
20pub const FRONTIER_REPO_LAYOUT: &str = "vela.frontier_repo.v0.1";
21pub const FRONTIER_MANIFEST_SCHEMA: &str = "vela.frontier_manifest.v0.1";
22pub const FRONTIER_LOCK_SCHEMA: &str = "vela.frontier_lock.v0.1";
23pub const FRONTIER_INIT_SCHEMA: &str = "vela.frontier_repo_init.v0.1";
24pub const FRONTIER_MATERIALIZE_SCHEMA: &str = "vela.frontier_materialize.v0.1";
25pub const FRONTIER_REPO_STATUS_SCHEMA: &str = "vela.frontier_repo_status.v0.1";
26pub const FRONTIER_REPO_DOCTOR_SCHEMA: &str = "vela.frontier_repo_doctor.v0.1";
27pub const FRONTIER_PROOF_VERIFY_SCHEMA: &str = "vela.frontier_proof_verify.v0.1";
28pub const DEFAULT_CARINA_KERNEL: &str = "carina@0.1.0";
29
30#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
31pub struct FrontierManifest {
32 pub schema: String,
33 pub layout: String,
34 #[serde(default = "default_split_mode")]
35 pub mode: String,
36 #[serde(default, skip_serializing_if = "Option::is_none")]
37 pub frontier_id: Option<String>,
38 pub name: String,
39 #[serde(default)]
40 pub description: String,
41 #[serde(default = "default_visibility")]
42 pub visibility: String,
43 #[serde(default)]
44 pub scope: FrontierScope,
45 pub carina: CarinaManifest,
46 pub vela: VelaManifest,
47 pub paths: FrontierPaths,
48 #[serde(default, skip_serializing_if = "Vec::is_empty")]
49 pub maintainers: Vec<ManifestMaintainer>,
50 #[serde(default)]
51 pub policies: ManifestPolicies,
52 #[serde(default)]
53 pub license: ManifestLicense,
54 #[serde(default)]
55 pub dependencies: ManifestDependencies,
56 #[serde(default, skip_serializing_if = "Vec::is_empty")]
57 pub templates: Vec<String>,
58}
59
60#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
61pub struct CarinaManifest {
62 pub kernel: String,
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
66pub struct VelaManifest {
67 pub reducer: String,
68}
69
70#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
71pub struct FrontierScope {
72 #[serde(default)]
73 pub question: String,
74 #[serde(default)]
75 pub includes: Vec<String>,
76 #[serde(default)]
77 pub excludes: Vec<String>,
78}
79
80#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
81pub struct FrontierPaths {
82 pub state: String,
83 pub sources: String,
84 pub artifacts: String,
85 pub review: String,
86 pub proof: String,
87 pub exports: String,
88}
89
90#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
91pub struct ManifestMaintainer {
92 pub id: String,
93 pub role: String,
94}
95
96#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
97pub struct ManifestPolicies {
98 pub review: String,
99 pub proof: String,
100}
101
102impl Default for ManifestPolicies {
103 fn default() -> Self {
104 Self {
105 review: "review/policy.yaml".to_string(),
106 proof: "proof/policy.yaml".to_string(),
107 }
108 }
109}
110
111#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
112pub struct ManifestLicense {
113 pub content: String,
114 pub code: String,
115 pub data: String,
116}
117
118impl Default for ManifestLicense {
119 fn default() -> Self {
120 Self {
121 content: "CC-BY-4.0".to_string(),
122 code: "Apache-2.0".to_string(),
123 data: "varies".to_string(),
124 }
125 }
126}
127
128#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
129pub struct ManifestDependencies {
130 #[serde(default)]
131 pub frontiers: Vec<String>,
132 #[serde(default)]
133 pub packages: Vec<String>,
134 #[serde(default)]
135 pub adapters: Vec<String>,
136 #[serde(default, skip_serializing_if = "Vec::is_empty")]
143 pub frontiers_v2: Vec<ProjectDependency>,
144}
145
146#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
147pub struct FrontierLock {
148 pub schema: String,
149 pub generated_at: String,
150 pub vela_version: String,
151 pub carina_kernel: String,
152 pub frontier_id: String,
153 #[serde(default)]
154 pub canonicalization: LockCanonicalization,
155 #[serde(default)]
156 pub reducer: LockPackage,
157 #[serde(default)]
158 pub carina: LockKernel,
159 pub snapshot_hash: String,
160 pub event_log_hash: String,
161 pub proposal_state_hash: String,
162 #[serde(default)]
163 pub sources_hash: String,
164 #[serde(default)]
165 pub artifacts_hash: String,
166 #[serde(default)]
167 pub review_hash: String,
168 pub proof_freshness: String,
169 #[serde(default)]
170 pub proof: LockProof,
171 pub paths: LockPaths,
172}
173
174#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
175pub struct LockCanonicalization {
176 pub json: String,
177 pub yaml: String,
178}
179
180impl Default for LockCanonicalization {
181 fn default() -> Self {
182 Self {
183 json: "vela-canonical-json-v0.1".to_string(),
184 yaml: "vela-yaml-v0.1".to_string(),
185 }
186 }
187}
188
189#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
190pub struct LockPackage {
191 pub package: String,
192 pub digest: String,
193}
194
195#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
196pub struct LockKernel {
197 pub kernel: String,
198 pub digest: String,
199}
200
201#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
202pub struct LockProof {
203 pub latest: String,
204 pub digest: String,
205 pub freshness: String,
206 pub events_manifest: String,
207 pub replay_trace: String,
208}
209
210#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
211pub struct LockPaths {
212 pub frontier: String,
213 pub events: String,
214}
215
216#[derive(Debug, Clone)]
217struct ProofWrite {
218 digest: String,
219 freshness: String,
220 latest: String,
221 events_manifest: String,
222 replay_trace: String,
223}
224
225#[derive(Debug, Clone, PartialEq, Eq)]
226pub struct RepoLayoutIssue {
227 pub rule_id: String,
228 pub message: String,
229}
230
231#[derive(Debug, Clone)]
232pub struct InitOptions<'a> {
233 pub name: &'a str,
234 pub template: &'a str,
235 pub initialize_git: bool,
236}
237
238pub fn initialize(path: &Path, options: InitOptions<'_>) -> Result<serde_json::Value, String> {
239 if path.exists() && !path.is_dir() {
240 return Err(format!("{} exists and is not a directory", path.display()));
241 }
242 fs::create_dir_all(path).map_err(|e| {
243 format!(
244 "Failed to create frontier directory '{}': {e}",
245 path.display()
246 )
247 })?;
248
249 write_section_readmes(path)?;
250 let now = Utc::now().to_rfc3339_opts(SecondsFormat::Secs, true);
251 let project = empty_project(options.name, "", &now);
252 crate::repo::init_repo(path, &project)?;
253 write_frontier_card(path, options.name, options.template)?;
254 write_scope(path, options.name)?;
255 if options.initialize_git && !path.join(".git").exists() {
256 let status = std::process::Command::new("git")
257 .arg("init")
258 .arg(path)
259 .status()
260 .map_err(|e| format!("Failed to run git init: {e}"))?;
261 if !status.success() {
262 return Err("git init failed".to_string());
263 }
264 }
265
266 Ok(json!({
267 "schema": FRONTIER_INIT_SCHEMA,
268 "ok": true,
269 "layout": FRONTIER_REPO_LAYOUT,
270 "path": path.display().to_string(),
271 "name": options.name,
272 "template": options.template,
273 "wrote": [
274 "README.md",
275 "SCOPE.md",
276 "frontier.yaml",
277 "frontier.json",
278 "vela.lock"
279 ]
280 }))
281}
282
283pub fn materialize(path: &Path) -> Result<serde_json::Value, String> {
284 let source = crate::repo::VelaSource::VelaRepo(path.to_path_buf());
285 let project = crate::repo::load(&source)?;
286 write_section_readmes(path)?;
287 let generated_at = materialization_generated_at(path, &project);
288 write_visible_state(path, &project, &generated_at)?;
289 write_manifest(path, &project)?;
290 let proof = write_proof(path, &project, &generated_at)?;
291 let lock = write_lock(path, &project, &proof, &generated_at)?;
292 Ok(json!({
293 "schema": FRONTIER_MATERIALIZE_SCHEMA,
294 "ok": true,
295 "path": path.display().to_string(),
296 "wrote_frontier": "frontier.json",
297 "wrote_lock": "vela.lock",
298 "wrote_proof": "proof/latest.json",
299 "wrote_events_manifest": "proof/events.manifest.jsonl",
300 "snapshot_hash": lock.snapshot_hash,
301 "event_log_hash": lock.event_log_hash,
302 "proposal_state_hash": lock.proposal_state_hash,
303 }))
304}
305
306pub fn write_visible_repo_files(path: &Path, project: &Project) -> Result<(), String> {
307 write_section_readmes(path)?;
308 let generated_at = materialization_generated_at(path, project);
309 write_visible_state(path, project, &generated_at)?;
310 if !path.join("frontier.yaml").is_file() {
311 write_manifest(path, project)?;
312 } else {
313 sync_manifest_deps(path, &project.project.dependencies)?;
319 }
320 let proof = write_proof(path, project, &generated_at)?;
321 write_lock(path, project, &proof, &generated_at)?;
322 Ok(())
323}
324
325pub fn read_manifest(path: &Path) -> Result<Option<FrontierManifest>, String> {
326 let manifest_path = path.join("frontier.yaml");
327 if !manifest_path.is_file() {
328 return Ok(None);
329 }
330 let data = fs::read_to_string(&manifest_path)
331 .map_err(|e| format!("Failed to read frontier.yaml: {e}"))?;
332 serde_yaml::from_str(&data).map(Some).map_err(|e| {
333 format!(
334 "Failed to parse frontier manifest '{}': {e}",
335 manifest_path.display()
336 )
337 })
338}
339
340pub fn read_lock(path: &Path) -> Result<Option<FrontierLock>, String> {
341 let lock_path = path.join("vela.lock");
342 if !lock_path.is_file() {
343 return Ok(None);
344 }
345 let data =
346 fs::read_to_string(&lock_path).map_err(|e| format!("Failed to read vela.lock: {e}"))?;
347 serde_yaml::from_str(&data).map(Some).map_err(|e| {
348 format!(
349 "Failed to parse frontier lock '{}': {e}",
350 lock_path.display()
351 )
352 })
353}
354
355pub fn layout_issues(path: &Path, project: &Project) -> Vec<RepoLayoutIssue> {
356 if !path.is_dir() || !path.join(".vela").is_dir() {
357 return Vec::new();
358 }
359 if !path.join("frontier.yaml").is_file() && !path.join("vela.lock").is_file() {
360 return Vec::new();
361 }
362 let mut issues = Vec::new();
363 let manifest = match read_manifest(path) {
364 Ok(value) => value,
365 Err(e) => {
366 issues.push(issue("invalid_frontier_manifest", e));
367 None
368 }
369 };
370 let lock = match read_lock(path) {
371 Ok(value) => value,
372 Err(e) => {
373 issues.push(issue("invalid_frontier_lock", e));
374 None
375 }
376 };
377
378 if manifest.is_none() {
379 issues.push(issue(
380 "missing_frontier_manifest",
381 "Split frontier repo is missing frontier.yaml.",
382 ));
383 }
384 let Some(lock) = lock else {
385 issues.push(issue(
386 "missing_frontier_lock",
387 "Split frontier repo is missing generated vela.lock.",
388 ));
389 return issues;
390 };
391
392 let locked_project = project_with_frontier_id(project);
393 let hash_project = locked_project.as_ref().unwrap_or(project);
394 let expected_snapshot = prefixed(events::snapshot_hash(hash_project));
395 let expected_event_log = prefixed(events::event_log_hash(&hash_project.events));
396 let expected_proposals = proposal_state_hash(&project.proposals);
397 let expected_frontier = hash_project.frontier_id();
398 let expected_sources = directory_hash(&path.join("sources"));
399 let expected_artifacts = directory_hash(&path.join("artifacts"));
400 let expected_review = directory_hash(&path.join("review"));
401 let expected_proof = directory_hash(&path.join("proof"));
402 if lock.snapshot_hash != expected_snapshot {
403 issues.push(issue(
404 "frontier_lock_mismatch",
405 format!(
406 "vela.lock snapshot_hash does not match materialized frontier state: lock={}, current={expected_snapshot}",
407 lock.snapshot_hash
408 ),
409 ));
410 }
411 if lock.event_log_hash != expected_event_log {
412 issues.push(issue(
413 "frontier_lock_mismatch",
414 format!(
415 "vela.lock event_log_hash does not match .vela/events: lock={}, current={expected_event_log}",
416 lock.event_log_hash
417 ),
418 ));
419 }
420 if lock.proposal_state_hash != expected_proposals {
421 issues.push(issue(
422 "frontier_lock_mismatch",
423 format!(
424 "vela.lock proposal_state_hash does not match .vela/proposals: lock={}, current={expected_proposals}",
425 lock.proposal_state_hash
426 ),
427 ));
428 }
429 if lock.frontier_id != expected_frontier {
430 issues.push(issue(
431 "frontier_lock_mismatch",
432 format!(
433 "vela.lock frontier_id does not match current frontier: lock={}, current={expected_frontier}",
434 lock.frontier_id
435 ),
436 ));
437 }
438 if !lock.sources_hash.is_empty() && lock.sources_hash != expected_sources {
439 issues.push(issue(
440 "frontier_lock_mismatch",
441 format!(
442 "vela.lock sources_hash does not match sources/: lock={}, current={expected_sources}",
443 lock.sources_hash
444 ),
445 ));
446 }
447 if !lock.artifacts_hash.is_empty() && lock.artifacts_hash != expected_artifacts {
448 issues.push(issue(
449 "frontier_lock_mismatch",
450 format!(
451 "vela.lock artifacts_hash does not match artifacts/: lock={}, current={expected_artifacts}",
452 lock.artifacts_hash
453 ),
454 ));
455 }
456 if !lock.review_hash.is_empty() && lock.review_hash != expected_review {
457 issues.push(issue(
458 "frontier_lock_mismatch",
459 format!(
460 "vela.lock review_hash does not match review/: lock={}, current={expected_review}",
461 lock.review_hash
462 ),
463 ));
464 }
465 if !lock.proof.digest.is_empty() && lock.proof.digest != expected_proof {
466 issues.push(issue(
467 "frontier_lock_mismatch",
468 format!(
469 "vela.lock proof digest does not match proof/: lock={}, current={expected_proof}",
470 lock.proof.digest
471 ),
472 ));
473 }
474
475 let visible_path = path.join("frontier.json");
476 if !visible_path.is_file() {
477 issues.push(issue(
478 "missing_materialized_frontier",
479 "Split frontier repo is missing frontier.json.",
480 ));
481 return issues;
482 }
483 match crate::repo::load_project_file(&visible_path) {
484 Ok(visible) => {
485 let visible_hash = prefixed(events::snapshot_hash(&visible));
486 if visible_hash != expected_snapshot {
487 issues.push(issue(
488 "frontier_lock_mismatch",
489 format!(
490 "frontier.json does not match .vela materialized state: visible={visible_hash}, current={expected_snapshot}",
491 ),
492 ));
493 }
494 }
495 Err(e) => issues.push(issue("invalid_materialized_frontier", e)),
496 }
497
498 issues
499}
500
501pub fn manifest_overrides(path: &Path) -> Result<Option<FrontierManifest>, String> {
502 read_manifest(path)
503}
504
505pub fn repo_status(path: &Path) -> Result<serde_json::Value, String> {
506 let project = crate::repo::load_from_path(path)?;
507 let lock = read_lock(path)?;
508 let layout_issues = layout_issues(path, &project);
509 let structural_issue_count = layout_issues.len();
510 let lock_agreement = structural_issue_count == 0;
511 let open_proposals = project
512 .proposals
513 .iter()
514 .filter(|proposal| {
515 !matches!(
516 proposal.status.as_str(),
517 "accepted" | "applied" | "rejected"
518 )
519 })
520 .count();
521 let lock = lock.as_ref();
522 let sources_hash = directory_hash(&path.join("sources"));
523 let artifacts_hash = directory_hash(&path.join("artifacts"));
524 let review_hash = directory_hash(&path.join("review"));
525 let proof_hash = directory_hash(&path.join("proof"));
526 let source_changed =
527 lock.is_some_and(|lock| !lock.sources_hash.is_empty() && lock.sources_hash != sources_hash);
528 let artifact_changed = lock.is_some_and(|lock| {
529 !lock.artifacts_hash.is_empty() && lock.artifacts_hash != artifacts_hash
530 });
531 let review_changed =
532 lock.is_some_and(|lock| !lock.review_hash.is_empty() && lock.review_hash != review_hash);
533 let proof_changed =
534 lock.is_some_and(|lock| !lock.proof.digest.is_empty() && lock.proof.digest != proof_hash);
535 Ok(json!({
536 "schema": FRONTIER_REPO_STATUS_SCHEMA,
537 "ok": lock_agreement,
538 "path": path.display().to_string(),
539 "layout": FRONTIER_REPO_LAYOUT,
540 "frontier_id": project.frontier_id(),
541 "summary": {
542 "accepted_events": project.events.len(),
543 "open_proposals": open_proposals,
544 "findings": project.findings.len(),
545 "sources": project.sources.len(),
546 "artifacts": project.artifacts.len(),
547 },
548 "freshness": {
549 "materialized_state": if lock_agreement { "fresh" } else { "stale_or_invalid" },
550 "proof": lock.map_or("unknown", |lock| lock.proof_freshness.as_str()),
551 "sources_changed": source_changed,
552 "artifacts_changed": artifact_changed,
553 "review_changed": review_changed,
554 "proof_changed": proof_changed,
555 },
556 "hashes": {
557 "snapshot_hash": prefixed(events::snapshot_hash(&project_with_frontier_id(&project)?)),
558 "event_log_hash": prefixed(events::event_log_hash(&project.events)),
559 "sources_hash": sources_hash,
560 "artifacts_hash": artifacts_hash,
561 "review_hash": review_hash,
562 "proof_hash": proof_hash,
563 },
564 "lock_agreement": lock_agreement,
565 "issues": layout_issues.iter().map(|issue| json!({
566 "rule_id": issue.rule_id,
567 "message": issue.message,
568 })).collect::<Vec<_>>(),
569 }))
570}
571
572pub fn repo_doctor(path: &Path) -> Result<serde_json::Value, String> {
573 let project = crate::repo::load_from_path(path)?;
574 let mut issues = layout_issues(path, &project)
575 .into_iter()
576 .map(|issue| {
577 json!({
578 "rule_id": issue.rule_id,
579 "severity": "error",
580 "message": issue.message,
581 })
582 })
583 .collect::<Vec<_>>();
584
585 for file in [
586 "README.md",
587 "SCOPE.md",
588 "frontier.yaml",
589 "frontier.json",
590 "vela.lock",
591 ] {
592 if !path.join(file).is_file() {
593 issues.push(json!({
594 "rule_id": "missing_repo_file",
595 "severity": "error",
596 "path": file,
597 "message": format!("Frontier repo is missing {file}."),
598 }));
599 }
600 }
601 for dir in [
602 "sources",
603 "artifacts",
604 "review",
605 "proof",
606 "exports",
607 ".vela",
608 ] {
609 if !path.join(dir).is_dir() {
610 issues.push(json!({
611 "rule_id": "missing_repo_directory",
612 "severity": "error",
613 "path": dir,
614 "message": format!("Frontier repo is missing {dir}/."),
615 }));
616 }
617 }
618 for dir in [
619 "artifacts/packets",
620 "artifacts/runs",
621 "artifacts/code",
622 "artifacts/notebooks",
623 "artifacts/data",
624 "artifacts/notes",
625 "artifacts/tables",
626 "artifacts/figures",
627 "artifacts/analyses",
628 "artifacts/environments",
629 "proof/signatures",
630 "proof/attestations",
631 "exports/prov",
632 "exports/ro-crate",
633 "exports/frictionless",
634 "exports/mcp",
635 "exports/report",
636 "exports/registry",
637 ] {
638 if !path.join(dir).is_dir() {
639 issues.push(json!({
640 "rule_id": "missing_optional_repo_hook",
641 "severity": "warning",
642 "path": dir,
643 "message": format!("Optional repo hook {dir}/ is not present."),
644 }));
645 }
646 }
647 for root_artifact in [
648 "bbb-core.v0.1.json",
649 "bbb-core.v0.2.json",
650 "bbb-core.v0.3.json",
651 "bbb-core.v0.4.json",
652 "review-packet.v1.json",
653 "promotion-core.v1.json",
654 "review-debt.v1.json",
655 "seed-manifest.v1.json",
656 ] {
657 if path.join(root_artifact).exists() {
658 issues.push(json!({
659 "rule_id": "root_artifact_clutter",
660 "severity": "warning",
661 "path": root_artifact,
662 "message": format!("{root_artifact} should live under sources/, artifacts/, review/, proof/, or exports/."),
663 }));
664 }
665 }
666
667 Ok(json!({
668 "schema": FRONTIER_REPO_DOCTOR_SCHEMA,
669 "ok": !issues.iter().any(|issue| issue.get("severity").and_then(|v| v.as_str()) == Some("error")),
670 "path": path.display().to_string(),
671 "layout": FRONTIER_REPO_LAYOUT,
672 "issues": issues,
673 }))
674}
675
676pub fn proof_verify(path: &Path) -> Result<serde_json::Value, String> {
677 let project = crate::repo::load_from_path(path)?;
678 let lock = read_lock(path)?;
679 let proof_path = path.join("proof/latest.json");
680 let mut issues = layout_issues(path, &project)
681 .into_iter()
682 .map(|issue| {
683 json!({
684 "rule_id": issue.rule_id,
685 "message": issue.message,
686 })
687 })
688 .collect::<Vec<_>>();
689 let locked = project_with_frontier_id(&project)?;
690 let snapshot_hash = prefixed(events::snapshot_hash(&locked));
691 let event_log_hash = prefixed(events::event_log_hash(&locked.events));
692 let mut latest_payload = serde_json::Value::Null;
693 if !proof_path.is_file() {
694 issues.push(json!({
695 "rule_id": "missing_proof_latest",
696 "message": "proof/latest.json is missing.",
697 }));
698 } else {
699 let data = fs::read_to_string(&proof_path)
700 .map_err(|e| format!("Failed to read proof/latest.json: {e}"))?;
701 latest_payload = serde_json::from_str(&data).map_err(|e| {
702 format!(
703 "Failed to parse proof/latest.json '{}': {e}",
704 proof_path.display()
705 )
706 })?;
707 if latest_payload
708 .get("frontier_hash")
709 .and_then(|value| value.as_str())
710 != Some(snapshot_hash.as_str())
711 {
712 issues.push(json!({
713 "rule_id": "proof_snapshot_mismatch",
714 "message": "proof/latest.json frontier_hash does not match replayed frontier state.",
715 }));
716 }
717 if latest_payload
718 .get("event_log_hash")
719 .and_then(|value| value.as_str())
720 != Some(event_log_hash.as_str())
721 {
722 issues.push(json!({
723 "rule_id": "proof_event_log_mismatch",
724 "message": "proof/latest.json event_log_hash does not match .vela/events/.",
725 }));
726 }
727 }
728 let proof_digest = directory_hash(&path.join("proof"));
729 if let Some(lock) = &lock {
730 if !lock.proof.digest.is_empty() && lock.proof.digest != proof_digest {
731 issues.push(json!({
732 "rule_id": "proof_digest_mismatch",
733 "message": format!("proof/ digest does not match vela.lock: lock={}, current={proof_digest}", lock.proof.digest),
734 }));
735 }
736 } else {
737 issues.push(json!({
738 "rule_id": "missing_frontier_lock",
739 "message": "vela.lock is missing.",
740 }));
741 }
742
743 Ok(json!({
744 "schema": FRONTIER_PROOF_VERIFY_SCHEMA,
745 "ok": issues.is_empty(),
746 "path": path.display().to_string(),
747 "frontier_id": locked.frontier_id(),
748 "snapshot_hash": snapshot_hash,
749 "event_log_hash": event_log_hash,
750 "proof_digest": proof_digest,
751 "proof": latest_payload,
752 "issues": issues,
753 }))
754}
755
756pub fn proof_explain(path: &Path) -> Result<String, String> {
757 let project = crate::repo::load_from_path(path)?;
758 let report = proof_verify(path)?;
759 let ok = report.get("ok").and_then(|value| value.as_bool()) == Some(true);
760 let locked = project_with_frontier_id(&project)?;
761 let snapshot_hash = prefixed(events::snapshot_hash(&locked));
762 let event_log_hash = prefixed(events::event_log_hash(&locked.events));
763 let open_proposals = project
764 .proposals
765 .iter()
766 .filter(|proposal| {
767 !matches!(
768 proposal.status.as_str(),
769 "accepted" | "applied" | "rejected"
770 )
771 })
772 .count();
773 let status = if ok { "fresh" } else { "stale or invalid" };
774 Ok(format!(
775 "vela proof explain\n\nFrontier: {}\nFrontier id: {}\nProof status: {status}\nAccepted events: {}\nOpen proposals: {open_proposals}\nSnapshot hash: {snapshot_hash}\nEvent log hash: {event_log_hash}\n\nAuthority: `.vela/events/` is replayed into `frontier.json`.\nVisible proof: `proof/latest.json`, `proof/events.manifest.jsonl`, and `proof/replay.trace.jsonl`.\nLockfile: `vela.lock` binds the event log, reducer, Carina kernel, visible state, and proof digest.\n",
776 project.project.name,
777 locked.frontier_id(),
778 project.events.len(),
779 ))
780}
781
782fn empty_project(name: &str, description: &str, compiled_at: &str) -> Project {
783 Project {
784 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
785 schema: project::VELA_SCHEMA_URL.to_string(),
786 frontier_id: None,
787 project: project::ProjectMeta {
788 name: name.to_string(),
789 description: description.to_string(),
790 compiled_at: compiled_at.to_string(),
791 compiler: project::VELA_COMPILER_VERSION.to_string(),
792 papers_processed: 0,
793 errors: 0,
794 dependencies: Vec::new(),
795 },
796 stats: project::ProjectStats::default(),
797 findings: Vec::new(),
798 sources: Vec::new(),
799 evidence_atoms: Vec::new(),
800 condition_records: Vec::new(),
801 review_events: Vec::new(),
802 confidence_updates: Vec::new(),
803 events: Vec::new(),
804 proposals: Vec::new(),
805 proof_state: proposals::ProofState::default(),
806 signatures: Vec::new(),
807 actors: Vec::new(),
808 replications: Vec::new(),
809 datasets: Vec::new(),
810 code_artifacts: Vec::new(),
811 artifacts: Vec::new(),
812 predictions: Vec::new(),
813 resolutions: Vec::new(),
814 peers: Vec::new(),
815 negative_results: Vec::new(),
816 trajectories: Vec::new(),
817 }
818}
819
820fn write_visible_state(path: &Path, project: &Project, generated_at: &str) -> Result<(), String> {
821 let visible = project_with_frontier_id(project)?;
822 let snapshot_hash = prefixed(events::snapshot_hash(&visible));
823 let event_log_hash = prefixed(events::event_log_hash(&visible.events));
824 let mut value = serde_json::to_value(&visible)
825 .map_err(|e| format!("Failed to prepare frontier.json: {e}"))?;
826 if let Some(object) = value.as_object_mut() {
827 object.insert(
828 "_warning".to_string(),
829 serde_json::Value::String(
830 "Generated by Vela. Do not edit frontier.json directly; use Vela commands to propose, accept, reject, materialize, and prove frontier state."
831 .to_string(),
832 ),
833 );
834 object.insert(
835 "_meta".to_string(),
836 json!({
837 "schema": "vela.frontier_state_meta.v0.1",
838 "generated_at": generated_at,
839 "materialized_from": ".vela/events/",
840 "proof": "proof/latest.json",
841 "lockfile": "vela.lock",
842 "events_manifest": "proof/events.manifest.jsonl",
843 "replay_trace": "proof/replay.trace.jsonl",
844 "snapshot_hash": snapshot_hash,
845 "event_log_hash": event_log_hash,
846 "carina_kernel": DEFAULT_CARINA_KERNEL,
847 "vela_reducer": format!("vela@{}", env!("CARGO_PKG_VERSION")),
848 }),
849 );
850 }
851 let json = serde_json::to_string_pretty(&value)
852 .map_err(|e| format!("Failed to serialize frontier.json: {e}"))?;
853 fs::write(path.join("frontier.json"), json)
854 .map_err(|e| format!("Failed to write frontier.json: {e}"))
855}
856
857fn sync_manifest_deps(path: &Path, deps: &[ProjectDependency]) -> Result<(), String> {
863 let manifest_path = path.join("frontier.yaml");
864 if !manifest_path.is_file() {
865 return Ok(());
866 }
867 let mut manifest = match read_manifest(path)? {
868 Some(m) => m,
869 None => return Ok(()),
870 };
871 manifest.dependencies.frontiers_v2 = deps.to_vec();
872 let yaml = serde_yaml::to_string(&manifest)
873 .map_err(|e| format!("Failed to serialize frontier.yaml: {e}"))?;
874 fs::write(&manifest_path, yaml).map_err(|e| format!("Failed to write frontier.yaml: {e}"))
875}
876
877fn write_manifest(path: &Path, project: &Project) -> Result<(), String> {
878 let manifest = FrontierManifest {
879 schema: FRONTIER_MANIFEST_SCHEMA.to_string(),
880 layout: FRONTIER_REPO_LAYOUT.to_string(),
881 mode: "split".to_string(),
882 frontier_id: Some(project.frontier_id()),
883 name: project.project.name.clone(),
884 description: project.project.description.clone(),
885 visibility: "public".to_string(),
886 scope: FrontierScope {
887 question: project.project.description.clone(),
888 includes: Vec::new(),
889 excludes: Vec::new(),
890 },
891 carina: CarinaManifest {
892 kernel: DEFAULT_CARINA_KERNEL.to_string(),
893 },
894 vela: VelaManifest {
895 reducer: format!("vela@{}", env!("CARGO_PKG_VERSION")),
896 },
897 paths: FrontierPaths {
898 state: "frontier.json".to_string(),
899 sources: "sources/".to_string(),
900 artifacts: "artifacts/".to_string(),
901 review: "review/".to_string(),
902 proof: "proof/".to_string(),
903 exports: "exports/".to_string(),
904 },
905 maintainers: Vec::new(),
906 policies: ManifestPolicies::default(),
907 license: ManifestLicense::default(),
908 dependencies: ManifestDependencies {
909 frontiers: Vec::new(),
910 packages: Vec::new(),
911 adapters: Vec::new(),
912 frontiers_v2: project.project.dependencies.clone(),
913 },
914 templates: Vec::new(),
915 };
916 let yaml = serde_yaml::to_string(&manifest)
917 .map_err(|e| format!("Failed to serialize frontier.yaml: {e}"))?;
918 fs::write(path.join("frontier.yaml"), yaml)
919 .map_err(|e| format!("Failed to write frontier.yaml: {e}"))
920}
921
922fn write_lock(
923 path: &Path,
924 project: &Project,
925 proof: &ProofWrite,
926 generated_at: &str,
927) -> Result<FrontierLock, String> {
928 let locked = project_with_frontier_id(project)?;
929 let reducer_package = format!("vela@{}", env!("CARGO_PKG_VERSION"));
930 let lock = FrontierLock {
931 schema: FRONTIER_LOCK_SCHEMA.to_string(),
932 generated_at: generated_at.to_string(),
933 vela_version: env!("CARGO_PKG_VERSION").to_string(),
934 carina_kernel: DEFAULT_CARINA_KERNEL.to_string(),
935 frontier_id: locked.frontier_id(),
936 canonicalization: LockCanonicalization::default(),
937 reducer: LockPackage {
938 package: reducer_package.clone(),
939 digest: identity_digest(&reducer_package),
940 },
941 carina: LockKernel {
942 kernel: DEFAULT_CARINA_KERNEL.to_string(),
943 digest: identity_digest(DEFAULT_CARINA_KERNEL),
944 },
945 snapshot_hash: prefixed(events::snapshot_hash(&locked)),
946 event_log_hash: prefixed(events::event_log_hash(&locked.events)),
947 proposal_state_hash: proposal_state_hash(&locked.proposals),
948 sources_hash: directory_hash(&path.join("sources")),
949 artifacts_hash: directory_hash(&path.join("artifacts")),
950 review_hash: directory_hash(&path.join("review")),
951 proof_freshness: proof.freshness.clone(),
952 proof: LockProof {
953 latest: proof.latest.clone(),
954 digest: proof.digest.clone(),
955 freshness: proof.freshness.clone(),
956 events_manifest: proof.events_manifest.clone(),
957 replay_trace: proof.replay_trace.clone(),
958 },
959 paths: LockPaths {
960 frontier: "frontier.json".to_string(),
961 events: ".vela/events/".to_string(),
962 },
963 };
964 let yaml =
965 serde_yaml::to_string(&lock).map_err(|e| format!("Failed to serialize vela.lock: {e}"))?;
966 fs::write(path.join("vela.lock"), yaml)
967 .map_err(|e| format!("Failed to write vela.lock: {e}"))?;
968 Ok(lock)
969}
970
971fn materialization_generated_at(path: &Path, project: &Project) -> String {
972 let now = Utc::now().to_rfc3339_opts(SecondsFormat::Secs, true);
973 let Ok(Some(lock)) = read_lock(path) else {
974 return now;
975 };
976 if lock.generated_at.trim().is_empty() {
977 return now;
978 }
979 let Ok(locked) = project_with_frontier_id(project) else {
980 return now;
981 };
982 let reducer_package = format!("vela@{}", env!("CARGO_PKG_VERSION"));
983 let current = [
984 (
985 lock.snapshot_hash.as_str(),
986 prefixed(events::snapshot_hash(&locked)),
987 ),
988 (
989 lock.event_log_hash.as_str(),
990 prefixed(events::event_log_hash(&locked.events)),
991 ),
992 (
993 lock.proposal_state_hash.as_str(),
994 proposal_state_hash(&locked.proposals),
995 ),
996 (
997 lock.sources_hash.as_str(),
998 directory_hash(&path.join("sources")),
999 ),
1000 (
1001 lock.artifacts_hash.as_str(),
1002 directory_hash(&path.join("artifacts")),
1003 ),
1004 (
1005 lock.review_hash.as_str(),
1006 directory_hash(&path.join("review")),
1007 ),
1008 ];
1009 let hashes_match = current.iter().all(|(locked, current)| *locked == current);
1010 let versions_match = lock.vela_version == env!("CARGO_PKG_VERSION")
1011 && lock.carina_kernel == DEFAULT_CARINA_KERNEL
1012 && lock.reducer.package == reducer_package
1013 && lock.carina.kernel == DEFAULT_CARINA_KERNEL;
1014 if hashes_match && versions_match && lock.proof_freshness == "fresh" {
1015 lock.generated_at
1016 } else {
1017 now
1018 }
1019}
1020
1021fn project_with_frontier_id(project: &Project) -> Result<Project, String> {
1022 let frontier_id = project.frontier_id();
1023 let mut value = serde_json::to_value(project)
1024 .map_err(|e| format!("Failed to prepare frontier state: {e}"))?;
1025 if let Some(object) = value.as_object_mut() {
1026 object.insert(
1027 "frontier_id".to_string(),
1028 serde_json::Value::String(frontier_id),
1029 );
1030 }
1031 serde_json::from_value(value).map_err(|e| format!("Failed to normalize frontier state: {e}"))
1032}
1033
1034fn write_frontier_card(path: &Path, name: &str, template: &str) -> Result<(), String> {
1035 let text = format!(
1036 "# {name}\n\nThis is a Vela frontier repository.\n\n- State entrypoint: `frontier.json`\n- Manifest: `frontier.yaml`\n- Lockfile: `vela.lock`\n- Template: `{template}`\n\nRun:\n\n```bash\nvela check . --strict --json\nvela integrity . --json\nvela proof . --out proof/latest\n```\n"
1037 );
1038 fs::write(path.join("README.md"), text).map_err(|e| format!("Failed to write README.md: {e}"))
1039}
1040
1041fn write_scope(path: &Path, name: &str) -> Result<(), String> {
1042 let text = format!(
1043 "# Scope\n\nFrontier: {name}\n\nThis file records boundaries, exclusions, caveats, and review policy for the frontier.\n\nExternal artifacts and agent outputs are source material until reviewed into accepted Vela events.\n"
1044 );
1045 fs::write(path.join("SCOPE.md"), text).map_err(|e| format!("Failed to write SCOPE.md: {e}"))
1046}
1047
1048fn write_section_readmes(path: &Path) -> Result<(), String> {
1049 let sections = BTreeMap::from([
1050 (
1051 "sources",
1052 "Source manifests, papers, datasets, registries, and protocols.",
1053 ),
1054 (
1055 "artifacts",
1056 "Packets, runs, code, notebooks, data pointers, tables, and figures.",
1057 ),
1058 (
1059 "review",
1060 "Proposal queues, decisions, caveats, rejected records, and audits.",
1061 ),
1062 ("proof", "Proof packets, traces, and freshness records."),
1063 (
1064 "exports",
1065 "Generated hub, RO-Crate, Frictionless, MCP, and report bundles.",
1066 ),
1067 ]);
1068 for (dir, description) in sections {
1069 let section = path.join(dir);
1070 fs::create_dir_all(§ion).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1071 let readme = section.join("README.md");
1072 if !readme.exists() {
1073 fs::write(readme, format!("# {dir}\n\n{description}\n"))
1074 .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1075 }
1076 }
1077 let artifact_sections = BTreeMap::from([
1078 ("artifacts/packets", "Import, review, and proof packets."),
1079 (
1080 "artifacts/runs",
1081 "Agent, source-adapter, computational, and lab run records.",
1082 ),
1083 (
1084 "artifacts/code",
1085 "Analysis, extraction, and validation code.",
1086 ),
1087 ("artifacts/notebooks", "Exploratory and report notebooks."),
1088 (
1089 "artifacts/data",
1090 "Small data files, data pointers, and external-data metadata.",
1091 ),
1092 (
1093 "artifacts/notes",
1094 "Reading notes, decision notes, meeting notes, and scratch context.",
1095 ),
1096 (
1097 "artifacts/tables",
1098 "Generated tables and tabular review outputs.",
1099 ),
1100 ("artifacts/figures", "Generated figures and visual outputs."),
1101 (
1102 "artifacts/analyses",
1103 "Analysis outputs and state-transition examples.",
1104 ),
1105 (
1106 "artifacts/environments",
1107 "Execution context pointers: containers, lockfiles, hardware, cloud runtimes, and lab instruments.",
1108 ),
1109 ]);
1110 for (dir, description) in artifact_sections {
1111 let section = path.join(dir);
1112 fs::create_dir_all(§ion).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1113 let readme = section.join("README.md");
1114 if !readme.exists() {
1115 let title = dir.rsplit('/').next().unwrap_or(dir);
1116 fs::write(readme, format!("# {title}\n\n{description}\n"))
1117 .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1118 }
1119 }
1120 let proof_sections = BTreeMap::from([
1121 (
1122 "proof/signatures",
1123 "Optional signatures for proof packets, events, lockfiles, or institutional attestations.",
1124 ),
1125 (
1126 "proof/attestations",
1127 "Optional external proof attestations. Vela events remain the state authority.",
1128 ),
1129 ]);
1130 for (dir, description) in proof_sections {
1131 let section = path.join(dir);
1132 fs::create_dir_all(§ion).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1133 let readme = section.join("README.md");
1134 if !readme.exists() {
1135 let title = dir.rsplit('/').next().unwrap_or(dir);
1136 fs::write(readme, format!("# {title}\n\n{description}\n"))
1137 .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1138 }
1139 }
1140 let export_sections = BTreeMap::from([
1141 ("exports/prov", "Generated W3C PROV exports."),
1142 ("exports/ro-crate", "Generated RO-Crate exports."),
1143 (
1144 "exports/frictionless",
1145 "Generated Frictionless Data Package exports.",
1146 ),
1147 ("exports/mcp", "Generated MCP-serving export bundles."),
1148 (
1149 "exports/report",
1150 "Generated reviewer, funder, or release reports.",
1151 ),
1152 (
1153 "exports/registry",
1154 "Generated registry and federation bundles.",
1155 ),
1156 ]);
1157 for (dir, description) in export_sections {
1158 let section = path.join(dir);
1159 fs::create_dir_all(§ion).map_err(|e| format!("Failed to create {dir}/: {e}"))?;
1160 let readme = section.join("README.md");
1161 if !readme.exists() {
1162 let title = dir.rsplit('/').next().unwrap_or(dir);
1163 fs::write(readme, format!("# {title}\n\n{description}\n"))
1164 .map_err(|e| format!("Failed to write {dir}/README.md: {e}"))?;
1165 }
1166 }
1167 Ok(())
1168}
1169
1170fn write_proof(path: &Path, project: &Project, generated_at: &str) -> Result<ProofWrite, String> {
1171 let locked = project_with_frontier_id(project)?;
1172 let proof_dir = path.join("proof");
1173 fs::create_dir_all(&proof_dir).map_err(|e| format!("Failed to create proof/: {e}"))?;
1174
1175 let snapshot_hash = prefixed(events::snapshot_hash(&locked));
1176 let event_log_hash = prefixed(events::event_log_hash(&locked.events));
1177 let proposal_state_hash = proposal_state_hash(&locked.proposals);
1178 let reducer_package = format!("vela@{}", env!("CARGO_PKG_VERSION"));
1179
1180 let latest = json!({
1181 "schema": "vela.frontier_repo_proof.v0.1",
1182 "frontier_id": locked.frontier_id(),
1183 "frontier_hash": snapshot_hash,
1184 "event_log_hash": event_log_hash,
1185 "proposal_state_hash": proposal_state_hash,
1186 "reducer": {
1187 "name": "vela",
1188 "version": env!("CARGO_PKG_VERSION"),
1189 "package": reducer_package,
1190 "digest": identity_digest(&format!("vela@{}", env!("CARGO_PKG_VERSION"))),
1191 },
1192 "carina": {
1193 "kernel": DEFAULT_CARINA_KERNEL,
1194 "digest": identity_digest(DEFAULT_CARINA_KERNEL),
1195 },
1196 "materialized_at": generated_at,
1197 "freshness": "fresh",
1198 "event_count": locked.events.len(),
1199 "paths": {
1200 "frontier": "frontier.json",
1201 "lockfile": "vela.lock",
1202 "events_authority": ".vela/events/",
1203 "events_manifest": "proof/events.manifest.jsonl",
1204 "replay_trace": "proof/replay.trace.jsonl"
1205 },
1206 "warning": "Do not edit frontier.json directly. Use Vela commands to propose, accept, reject, materialize, and prove frontier state."
1207 });
1208 fs::write(
1209 proof_dir.join("latest.json"),
1210 serde_json::to_string_pretty(&latest)
1211 .map_err(|e| format!("Failed to serialize proof/latest.json: {e}"))?,
1212 )
1213 .map_err(|e| format!("Failed to write proof/latest.json: {e}"))?;
1214
1215 let mut manifest_lines = String::new();
1216 let mut trace_lines = String::new();
1217 for (idx, event) in locked.events.iter().enumerate() {
1218 let event_hash = prefixed(event_hash(event));
1219 let entry = json!({
1220 "schema": "vela.proof_event_manifest_entry.v0.1",
1221 "index": idx + 1,
1222 "id": event.id,
1223 "kind": event.kind,
1224 "target": event.target,
1225 "actor": event.actor,
1226 "timestamp": event.timestamp,
1227 "event_hash": event_hash,
1228 "before_hash": event.before_hash,
1229 "after_hash": event.after_hash,
1230 "caveat_count": event.caveats.len(),
1231 });
1232 manifest_lines.push_str(
1233 &serde_json::to_string(&entry)
1234 .map_err(|e| format!("Failed to serialize event manifest entry: {e}"))?,
1235 );
1236 manifest_lines.push('\n');
1237
1238 let event_log_hash_after = prefixed(events::event_log_hash(&locked.events[..=idx]));
1239 let trace = json!({
1240 "schema": "vela.replay_trace_entry.v0.1",
1241 "step": idx + 1,
1242 "event": event.id,
1243 "kind": event.kind,
1244 "event_hash": event_hash,
1245 "event_log_hash_after": event_log_hash_after,
1246 "target_after_hash": event.after_hash,
1247 });
1248 trace_lines.push_str(
1249 &serde_json::to_string(&trace)
1250 .map_err(|e| format!("Failed to serialize replay trace entry: {e}"))?,
1251 );
1252 trace_lines.push('\n');
1253 }
1254 fs::write(proof_dir.join("events.manifest.jsonl"), manifest_lines)
1255 .map_err(|e| format!("Failed to write proof/events.manifest.jsonl: {e}"))?;
1256 fs::write(proof_dir.join("replay.trace.jsonl"), trace_lines)
1257 .map_err(|e| format!("Failed to write proof/replay.trace.jsonl: {e}"))?;
1258
1259 fs::write(
1260 proof_dir.join("freshness.md"),
1261 format!(
1262 "# Freshness\n\nCurrent proof status: fresh\n\n`frontier.json` was materialized from `.vela/events/` at {generated_at}.\n\nAccepted events: {}\nEvent log hash: `{event_log_hash}`\nSnapshot hash: `{snapshot_hash}`\n\nRun:\n\n```bash\nvela check . --strict --json\nvela integrity . --json\n```\n",
1263 locked.events.len()
1264 ),
1265 )
1266 .map_err(|e| format!("Failed to write proof/freshness.md: {e}"))?;
1267
1268 let hashes = json!({
1269 "schema": "vela.frontier_repo_hashes.v0.1",
1270 "frontier_id": locked.frontier_id(),
1271 "snapshot_hash": snapshot_hash,
1272 "event_log_hash": event_log_hash,
1273 "proposal_state_hash": proposal_state_hash,
1274 "sources_hash": directory_hash(&path.join("sources")),
1275 "artifacts_hash": directory_hash(&path.join("artifacts")),
1276 "review_hash": directory_hash(&path.join("review")),
1277 });
1278 fs::write(
1279 proof_dir.join("hashes.json"),
1280 serde_json::to_string_pretty(&hashes)
1281 .map_err(|e| format!("Failed to serialize proof/hashes.json: {e}"))?,
1282 )
1283 .map_err(|e| format!("Failed to write proof/hashes.json: {e}"))?;
1284
1285 Ok(ProofWrite {
1286 digest: directory_hash(&proof_dir),
1287 freshness: "fresh".to_string(),
1288 latest: "proof/latest.json".to_string(),
1289 events_manifest: "proof/events.manifest.jsonl".to_string(),
1290 replay_trace: "proof/replay.trace.jsonl".to_string(),
1291 })
1292}
1293
1294fn proposal_state_hash(proposals: &[crate::proposals::StateProposal]) -> String {
1295 let bytes = crate::canonical::to_canonical_bytes(proposals).unwrap_or_default();
1296 prefixed(hex::encode(Sha256::digest(bytes)))
1297}
1298
1299fn directory_hash(path: &Path) -> String {
1300 let mut entries = Vec::new();
1301 if path.is_dir() {
1302 collect_file_entries(path, path, &mut entries);
1303 }
1304 entries.sort_by(|a, b| a.0.cmp(&b.0));
1305 let bytes = crate::canonical::to_canonical_bytes(&entries).unwrap_or_default();
1306 prefixed(hex::encode(Sha256::digest(bytes)))
1307}
1308
1309fn collect_file_entries(root: &Path, path: &Path, entries: &mut Vec<(String, String)>) {
1310 let Ok(read_dir) = fs::read_dir(path) else {
1311 return;
1312 };
1313 for entry in read_dir.flatten() {
1314 let entry_path = entry.path();
1315 let Some(name) = entry_path.file_name().and_then(|s| s.to_str()) else {
1316 continue;
1317 };
1318 if name == ".DS_Store" {
1319 continue;
1320 }
1321 if entry_path.is_dir() {
1322 collect_file_entries(root, &entry_path, entries);
1323 } else if entry_path.is_file() {
1324 let rel = entry_path
1325 .strip_prefix(root)
1326 .unwrap_or(&entry_path)
1327 .to_string_lossy()
1328 .replace('\\', "/");
1329 let digest = fs::read(&entry_path)
1330 .map(|bytes| prefixed(hex::encode(Sha256::digest(bytes))))
1331 .unwrap_or_else(|_| "sha256:unreadable".to_string());
1332 entries.push((rel, digest));
1333 }
1334 }
1335}
1336
1337fn event_hash(event: &crate::events::StateEvent) -> String {
1338 let bytes = crate::canonical::to_canonical_bytes(event).unwrap_or_default();
1339 hex::encode(Sha256::digest(bytes))
1340}
1341
1342fn identity_digest(value: &str) -> String {
1343 prefixed(hex::encode(Sha256::digest(value.as_bytes())))
1344}
1345
1346fn prefixed(hash: String) -> String {
1347 if hash.starts_with("sha256:") {
1348 hash
1349 } else {
1350 format!("sha256:{hash}")
1351 }
1352}
1353
1354fn issue(rule_id: &str, message: impl Into<String>) -> RepoLayoutIssue {
1355 RepoLayoutIssue {
1356 rule_id: rule_id.to_string(),
1357 message: message.into(),
1358 }
1359}
1360
1361fn default_split_mode() -> String {
1362 "split".to_string()
1363}
1364
1365fn default_visibility() -> String {
1366 "public".to_string()
1367}