use std::collections::HashMap;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use crate::bundle::{ConfidenceUpdate, FindingBundle, Link, ReviewEvent};
use crate::events::StateEvent;
use crate::project::{self, Project};
use crate::proposals::{ProofState, StateProposal};
#[derive(Debug, Clone, PartialEq)]
pub enum VelaSource {
ProjectFile(PathBuf),
VelaRepo(PathBuf),
PacketDir(PathBuf),
}
#[derive(Debug, Deserialize)]
struct PacketManifestHeader {
packet_format: String,
#[serde(default)]
source: Option<PacketSourceHeader>,
}
#[derive(Debug, Default, Deserialize)]
struct PacketSourceHeader {
#[serde(default)]
project_name: String,
#[serde(default)]
description: String,
#[serde(default)]
compiled_at: String,
#[serde(default)]
compiler: String,
#[serde(default)]
vela_version: String,
#[serde(default)]
schema: String,
}
#[derive(Debug, Default, Deserialize)]
struct PacketOverviewHeader {
#[serde(default)]
project_name: String,
#[serde(default)]
description: String,
#[serde(default)]
compiled_at: String,
#[serde(default)]
papers_processed: usize,
}
pub fn detect(path: &Path) -> Result<VelaSource, String> {
if path.is_file() {
return Ok(VelaSource::ProjectFile(path.to_path_buf()));
}
if path.is_dir() {
if is_packet_dir(path) {
return Ok(VelaSource::PacketDir(path.to_path_buf()));
}
let vela_dir = path.join(".vela");
if vela_dir.is_dir() {
return Ok(VelaSource::VelaRepo(path.to_path_buf()));
}
if path.extension().is_some_and(|ext| ext == "json") {
return Ok(VelaSource::ProjectFile(path.to_path_buf()));
}
return Err(format!(
"Directory '{}' is not a Vela repository or frontier packet. Run `vela init`, `vela import`, or `vela migrate` first.",
path.display()
));
}
if path.extension().is_some_and(|ext| ext == "json") {
return Ok(VelaSource::ProjectFile(path.to_path_buf()));
}
Err(format!(
"Path '{}' does not exist. Provide a .json file, frontier packet, or a directory with .vela/",
path.display()
))
}
#[derive(Debug, Serialize, Deserialize)]
struct RepoConfig {
project: RepoProjectMeta,
}
#[derive(Debug, Serialize, Deserialize)]
struct RepoProjectMeta {
name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
frontier_id: Option<String>,
#[serde(default, skip_serializing_if = "String::is_empty")]
compiled_at: String,
#[serde(default)]
description: String,
#[serde(default = "default_compiler")]
compiler: String,
#[serde(default)]
papers_processed: usize,
}
fn default_compiler() -> String {
crate::project::VELA_COMPILER_VERSION.into()
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct ManifestLink {
source: String,
target: String,
#[serde(rename = "type")]
link_type: String,
#[serde(default)]
note: String,
#[serde(default = "default_inferred_by")]
inferred_by: String,
#[serde(default)]
created_at: String,
}
fn default_inferred_by() -> String {
"compiler".into()
}
pub fn load(source: &VelaSource) -> Result<Project, String> {
match source {
VelaSource::ProjectFile(path) => load_project_file(path),
VelaSource::VelaRepo(dir) => load_vela_repo(dir),
VelaSource::PacketDir(dir) => load_packet_dir(dir),
}
}
pub(crate) fn load_project_file(path: &Path) -> Result<Project, String> {
let data = std::fs::read_to_string(path)
.map_err(|e| format!("Failed to read project file '{}': {e}", path.display()))?;
serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse project JSON '{}': {e}", path.display()))
}
fn load_packet_dir(dir: &Path) -> Result<Project, String> {
let manifest_path = dir.join("manifest.json");
let manifest_data = std::fs::read_to_string(&manifest_path).map_err(|e| {
format!(
"Failed to read packet manifest '{}': {e}",
manifest_path.display()
)
})?;
let manifest: PacketManifestHeader = serde_json::from_str(&manifest_data).map_err(|e| {
format!(
"Failed to parse packet manifest '{}': {e}",
manifest_path.display()
)
})?;
if manifest.packet_format != "vela.frontier-packet" {
return Err(format!(
"Unsupported packet format '{}' in {}",
manifest.packet_format,
manifest_path.display()
));
}
let findings_path = dir.join("findings/full.json");
let findings_data = std::fs::read_to_string(&findings_path).map_err(|e| {
format!(
"Failed to read packet findings '{}': {e}",
findings_path.display()
)
})?;
let findings: Vec<FindingBundle> = serde_json::from_str(&findings_data).map_err(|e| {
format!(
"Failed to parse packet findings '{}': {e}",
findings_path.display()
)
})?;
let reviews_path = dir.join("reviews/review-events.json");
let review_events: Vec<ReviewEvent> = if reviews_path.is_file() {
let reviews_data = std::fs::read_to_string(&reviews_path).map_err(|e| {
format!(
"Failed to read packet reviews '{}': {e}",
reviews_path.display()
)
})?;
serde_json::from_str(&reviews_data).map_err(|e| {
format!(
"Failed to parse packet reviews '{}': {e}",
reviews_path.display()
)
})?
} else {
Vec::new()
};
let confidence_updates_path = dir.join("reviews/confidence-updates.json");
let confidence_updates: Vec<ConfidenceUpdate> = if confidence_updates_path.is_file() {
let updates_data = std::fs::read_to_string(&confidence_updates_path).map_err(|e| {
format!(
"Failed to read packet confidence updates '{}': {e}",
confidence_updates_path.display()
)
})?;
serde_json::from_str(&updates_data).map_err(|e| {
format!(
"Failed to parse packet confidence updates '{}': {e}",
confidence_updates_path.display()
)
})?
} else {
Vec::new()
};
let events_path = dir.join("events/events.json");
let events: Vec<StateEvent> = if events_path.is_file() {
let events_data = std::fs::read_to_string(&events_path).map_err(|e| {
format!(
"Failed to read packet events '{}': {e}",
events_path.display()
)
})?;
serde_json::from_str(&events_data).map_err(|e| {
format!(
"Failed to parse packet events '{}': {e}",
events_path.display()
)
})?
} else {
Vec::new()
};
let proposals_path = dir.join("proposals/proposals.json");
let proposals: Vec<StateProposal> = if proposals_path.is_file() {
let proposals_data = std::fs::read_to_string(&proposals_path).map_err(|e| {
format!(
"Failed to read packet proposals '{}': {e}",
proposals_path.display()
)
})?;
serde_json::from_str(&proposals_data).map_err(|e| {
format!(
"Failed to parse packet proposals '{}': {e}",
proposals_path.display()
)
})?
} else {
Vec::new()
};
let overview_path = dir.join("overview.json");
let overview: PacketOverviewHeader = if overview_path.is_file() {
let overview_data = std::fs::read_to_string(&overview_path).map_err(|e| {
format!(
"Failed to read packet overview '{}': {e}",
overview_path.display()
)
})?;
serde_json::from_str(&overview_data).map_err(|e| {
format!(
"Failed to parse packet overview '{}': {e}",
overview_path.display()
)
})?
} else {
PacketOverviewHeader::default()
};
let source = manifest.source.unwrap_or_default();
let name = first_non_empty([
source.project_name.as_str(),
overview.project_name.as_str(),
dir.file_name()
.and_then(|name| name.to_str())
.unwrap_or("packet"),
]);
let description = first_non_empty([
source.description.as_str(),
overview.description.as_str(),
"",
]);
let compiled_at = first_non_empty([
source.compiled_at.as_str(),
overview.compiled_at.as_str(),
"",
]);
let mut project = project::assemble(name, findings, overview.papers_processed, 0, description);
if !compiled_at.is_empty() {
project.project.compiled_at = compiled_at.to_string();
}
if !source.compiler.is_empty() {
project.project.compiler = source.compiler;
}
if !source.vela_version.is_empty() {
project.vela_version = source.vela_version;
}
if !source.schema.is_empty() {
project.schema = source.schema;
}
project.review_events = review_events;
project.confidence_updates = confidence_updates;
project.events = events;
project.proposals = proposals;
project::recompute_stats(&mut project);
Ok(project)
}
fn load_vela_repo(dir: &Path) -> Result<Project, String> {
let vela_dir = dir.join(".vela");
let config_path = vela_dir.join("config.toml");
let config: RepoConfig = if config_path.exists() {
let toml_str = std::fs::read_to_string(&config_path)
.map_err(|e| format!("Failed to read config.toml: {e}"))?;
toml::from_str(&toml_str).map_err(|e| format!("Failed to parse config.toml: {e}"))?
} else {
RepoConfig {
project: RepoProjectMeta {
name: dir
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
frontier_id: None,
compiled_at: String::new(),
description: String::new(),
compiler: default_compiler(),
papers_processed: 0,
},
}
};
let findings_dir = dir.join(".vela/findings");
let mut findings: Vec<FindingBundle> = Vec::new();
if findings_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&findings_dir)
.map_err(|e| format!("Failed to read findings/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let finding: FindingBundle = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
findings.push(finding);
}
}
let links_dir = dir.join(".vela/links");
let manifest_path = links_dir.join("manifest.json");
if manifest_path.exists() {
let data = std::fs::read_to_string(&manifest_path)
.map_err(|e| format!("Failed to read links/manifest.json: {e}"))?;
let manifest_links: Vec<ManifestLink> = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse links/manifest.json: {e}"))?;
let mut links_by_source: HashMap<String, Vec<Link>> = HashMap::new();
for ml in manifest_links {
links_by_source
.entry(ml.source.clone())
.or_default()
.push(Link {
target: ml.target,
link_type: ml.link_type,
note: ml.note,
inferred_by: ml.inferred_by,
created_at: ml.created_at,
mechanism: None,
});
}
for finding in &mut findings {
if let Some(links) = links_by_source.remove(&finding.id) {
finding.links = links;
}
}
}
let reviews_dir = dir.join(".vela/reviews");
let mut review_events: Vec<ReviewEvent> = Vec::new();
if reviews_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&reviews_dir)
.map_err(|e| format!("Failed to read reviews/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let event: ReviewEvent = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
review_events.push(event);
}
}
let confidence_updates_dir = dir.join(".vela/confidence-updates");
let mut confidence_updates: Vec<ConfidenceUpdate> = Vec::new();
if confidence_updates_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&confidence_updates_dir)
.map_err(|e| format!("Failed to read confidence-updates/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let update: ConfidenceUpdate = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
confidence_updates.push(update);
}
}
let events_dir = dir.join(".vela/events");
let proposals_dir = dir.join(".vela/proposals");
let proof_state_path = vela_dir.join("proof-state.json");
let mut events: Vec<StateEvent> = Vec::new();
if events_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&events_dir)
.map_err(|e| format!("Failed to read events/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let event: StateEvent = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
events.push(event);
}
}
let mut proposals: Vec<StateProposal> = Vec::new();
if proposals_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&proposals_dir)
.map_err(|e| format!("Failed to read proposals/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let proposal: StateProposal = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
proposals.push(proposal);
}
}
let proof_state = if proof_state_path.is_file() {
let data = std::fs::read_to_string(&proof_state_path)
.map_err(|e| format!("Failed to read {}: {e}", proof_state_path.display()))?;
serde_json::from_str::<ProofState>(&data)
.map_err(|e| format!("Failed to parse {}: {e}", proof_state_path.display()))?
} else {
ProofState::default()
};
let replications_dir = dir.join(".vela/replications");
let mut replications: Vec<crate::bundle::Replication> = Vec::new();
if replications_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&replications_dir)
.map_err(|e| format!("Failed to read replications/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let replication: crate::bundle::Replication = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
replications.push(replication);
}
}
let datasets_dir = dir.join(".vela/datasets");
let mut datasets: Vec<crate::bundle::Dataset> = Vec::new();
if datasets_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&datasets_dir)
.map_err(|e| format!("Failed to read datasets/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let dataset: crate::bundle::Dataset = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
datasets.push(dataset);
}
}
let code_artifacts_dir = dir.join(".vela/code-artifacts");
let mut code_artifacts: Vec<crate::bundle::CodeArtifact> = Vec::new();
if code_artifacts_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&code_artifacts_dir)
.map_err(|e| format!("Failed to read code-artifacts/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let artifact: crate::bundle::CodeArtifact = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
code_artifacts.push(artifact);
}
}
let artifacts_dir = dir.join(".vela/artifacts");
let mut artifacts: Vec<crate::bundle::Artifact> = Vec::new();
if artifacts_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&artifacts_dir)
.map_err(|e| format!("Failed to read artifacts/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let artifact: crate::bundle::Artifact = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
artifacts.push(artifact);
}
}
let predictions_dir = dir.join(".vela/predictions");
let mut predictions: Vec<crate::bundle::Prediction> = Vec::new();
if predictions_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&predictions_dir)
.map_err(|e| format!("Failed to read predictions/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let prediction: crate::bundle::Prediction = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
predictions.push(prediction);
}
}
let resolutions_dir = dir.join(".vela/resolutions");
let mut resolutions: Vec<crate::bundle::Resolution> = Vec::new();
if resolutions_dir.is_dir() {
let mut entries: Vec<PathBuf> = std::fs::read_dir(&resolutions_dir)
.map_err(|e| format!("Failed to read resolutions/: {e}"))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().is_some_and(|ext| ext == "json"))
.collect();
entries.sort();
for path in entries {
let data = std::fs::read_to_string(&path)
.map_err(|e| format!("Failed to read {}: {e}", path.display()))?;
let resolution: crate::bundle::Resolution = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", path.display()))?;
resolutions.push(resolution);
}
}
let peers_path = dir.join(".vela/peers.json");
let peers: Vec<crate::federation::PeerHub> = if peers_path.is_file() {
let data = std::fs::read_to_string(&peers_path)
.map_err(|e| format!("Failed to read {}: {e}", peers_path.display()))?;
serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", peers_path.display()))?
} else {
Vec::new()
};
let actors_path = dir.join(".vela/actors.json");
let actors: Vec<crate::sign::ActorRecord> = if actors_path.is_file() {
let data = std::fs::read_to_string(&actors_path)
.map_err(|e| format!("Failed to read {}: {e}", actors_path.display()))?;
serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", actors_path.display()))?
} else {
Vec::new()
};
let signatures_path = dir.join(".vela/signatures.json");
let signatures: Vec<crate::sign::SignedEnvelope> = if signatures_path.is_file() {
let data = std::fs::read_to_string(&signatures_path)
.map_err(|e| format!("Failed to read {}: {e}", signatures_path.display()))?;
serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", signatures_path.display()))?
} else {
Vec::new()
};
let manifest = crate::frontier_repo::manifest_overrides(dir)?;
let manifest_name = manifest
.as_ref()
.map(|m| m.name.as_str())
.unwrap_or(config.project.name.as_str());
let manifest_description = manifest
.as_ref()
.map(|m| m.description.as_str())
.unwrap_or(config.project.description.as_str());
let manifest_deps: Vec<project::ProjectDependency> = manifest
.as_ref()
.map(|m| m.dependencies.frontiers_v2.clone())
.unwrap_or_default();
let mut c = project::assemble(
manifest_name,
findings,
config.project.papers_processed,
0,
manifest_description,
);
if !config.project.compiled_at.is_empty() {
c.project.compiled_at = config.project.compiled_at;
}
c.project.compiler = config.project.compiler;
if !manifest_deps.is_empty() {
c.project.dependencies = manifest_deps;
}
let configured_frontier_id = manifest
.and_then(|m| m.frontier_id)
.or(config.project.frontier_id);
c.review_events = review_events;
c.confidence_updates = confidence_updates;
c.events = events;
c.frontier_id = configured_frontier_id.or_else(|| project::frontier_id_from_genesis(&c.events));
c.proposals = proposals;
c.proof_state = proof_state;
c.actors = actors;
c.signatures = signatures;
c.replications = replications;
c.datasets = datasets;
c.code_artifacts = code_artifacts;
c.artifacts = artifacts;
c.predictions = predictions;
c.resolutions = resolutions;
c.peers = peers;
materialize_trajectories_and_nulls_from_events(&mut c);
materialize_evidence_atom_locators_from_events(&mut c);
project::recompute_stats(&mut c);
Ok(c)
}
fn materialize_evidence_atom_locators_from_events(p: &mut Project) {
for ev in &p.events {
if ev.kind != "evidence_atom.locator_repaired" {
continue;
}
if ev.target.r#type != "evidence_atom" {
continue;
}
let atom_id = ev.target.id.as_str();
let locator = match ev.payload.get("locator").and_then(|v| v.as_str()) {
Some(value) if !value.is_empty() => value.to_string(),
_ => continue,
};
if let Some(atom) = p.evidence_atoms.iter_mut().find(|a| a.id == atom_id)
&& atom.locator.is_none()
{
atom.locator = Some(locator);
atom.caveats.retain(|c| c != "missing evidence locator");
}
}
}
fn materialize_trajectories_and_nulls_from_events(p: &mut Project) {
use crate::bundle::{NegativeResult, Trajectory, TrajectoryStep};
let mut trajectories: std::collections::HashMap<String, Trajectory> =
std::collections::HashMap::new();
let mut nulls: std::collections::HashMap<String, NegativeResult> =
std::collections::HashMap::new();
for ev in &p.events {
match ev.kind.as_str() {
"trajectory.created" => {
if let Some(traj_value) = ev.payload.get("trajectory")
&& let Ok(traj) = serde_json::from_value::<Trajectory>(traj_value.clone())
{
trajectories.insert(traj.id.clone(), traj);
}
}
"trajectory.step_appended" => {
let traj_id = ev.target.id.clone();
if let Some(step_value) = ev.payload.get("step")
&& let Ok(step) = serde_json::from_value::<TrajectoryStep>(step_value.clone())
&& let Some(traj) = trajectories.get_mut(&traj_id)
{
traj.steps.push(step);
}
}
"trajectory.retracted" => {
if let Some(traj) = trajectories.get_mut(&ev.target.id) {
traj.retracted = true;
}
}
"negative_result.asserted" => {
if let Some(nr_value) = ev.payload.get("negative_result")
&& let Ok(nr) = serde_json::from_value::<NegativeResult>(nr_value.clone())
{
nulls.insert(nr.id.clone(), nr);
}
}
"negative_result.retracted" => {
if let Some(nr) = nulls.get_mut(&ev.target.id) {
nr.retracted = true;
}
}
_ => {}
}
}
if !trajectories.is_empty() {
let mut traj_vec: Vec<Trajectory> = trajectories.into_values().collect();
traj_vec.sort_by(|a, b| a.id.cmp(&b.id));
p.trajectories = traj_vec;
}
if !nulls.is_empty() {
let mut nr_vec: Vec<NegativeResult> = nulls.into_values().collect();
nr_vec.sort_by(|a, b| a.id.cmp(&b.id));
p.negative_results = nr_vec;
}
}
pub fn save(source: &VelaSource, project: &Project) -> Result<(), String> {
match source {
VelaSource::ProjectFile(path) => save_project_file(path, project),
VelaSource::VelaRepo(dir) => save_vela_repo(dir, project),
VelaSource::PacketDir(dir) => Err(format!(
"Cannot save directly into packet directory '{}'. Export a new packet instead.",
dir.display()
)),
}
}
fn save_project_file(path: &Path, project: &Project) -> Result<(), String> {
let json = serde_json::to_string_pretty(project)
.map_err(|e| format!("Failed to serialize project: {e}"))?;
std::fs::write(path, json)
.map_err(|e| format!("Failed to write project file '{}': {e}", path.display()))
}
fn save_vela_repo(dir: &Path, project: &Project) -> Result<(), String> {
let vela_dir = dir.join(".vela");
let findings_dir = vela_dir.join("findings");
let events_dir = vela_dir.join("events");
let proposals_dir = vela_dir.join("proposals");
let replications_dir = vela_dir.join("replications");
let datasets_dir = vela_dir.join("datasets");
let code_artifacts_dir = vela_dir.join("code-artifacts");
let artifacts_dir = vela_dir.join("artifacts");
let predictions_dir = vela_dir.join("predictions");
let resolutions_dir = vela_dir.join("resolutions");
for d in [
&vela_dir,
&findings_dir,
&events_dir,
&proposals_dir,
&replications_dir,
&datasets_dir,
&code_artifacts_dir,
&artifacts_dir,
&predictions_dir,
&resolutions_dir,
] {
std::fs::create_dir_all(d)
.map_err(|e| format!("Failed to create directory {}: {e}", d.display()))?;
}
let config = RepoConfig {
project: RepoProjectMeta {
name: project.project.name.clone(),
frontier_id: Some(project.frontier_id()),
compiled_at: project.project.compiled_at.clone(),
description: project.project.description.clone(),
compiler: project.project.compiler.clone(),
papers_processed: project.project.papers_processed,
},
};
let toml_str = toml::to_string_pretty(&config)
.map_err(|e| format!("Failed to serialize config.toml: {e}"))?;
std::fs::write(vela_dir.join("config.toml"), toml_str)
.map_err(|e| format!("Failed to write config.toml: {e}"))?;
for finding in &project.findings {
let json = serde_json::to_string_pretty(finding)
.map_err(|e| format!("Failed to serialize finding {}: {e}", finding.id))?;
let filename = format!("{}.json", finding.id);
std::fs::write(findings_dir.join(&filename), json)
.map_err(|e| format!("Failed to write {}: {e}", filename))?;
}
for event in &project.events {
let json = serde_json::to_string_pretty(event)
.map_err(|e| format!("Failed to serialize state event {}: {e}", event.id))?;
let filename = format!("{}.json", event.id);
std::fs::write(events_dir.join(&filename), json)
.map_err(|e| format!("Failed to write event {}: {e}", filename))?;
}
for proposal in &project.proposals {
let json = serde_json::to_string_pretty(proposal)
.map_err(|e| format!("Failed to serialize proposal {}: {e}", proposal.id))?;
let filename = format!("{}.json", proposal.id);
std::fs::write(proposals_dir.join(&filename), json)
.map_err(|e| format!("Failed to write proposal {}: {e}", filename))?;
}
let proof_state_json = serde_json::to_string_pretty(&project.proof_state)
.map_err(|e| format!("Failed to serialize proof state: {e}"))?;
std::fs::write(vela_dir.join("proof-state.json"), proof_state_json)
.map_err(|e| format!("Failed to write proof-state.json: {e}"))?;
for replication in &project.replications {
let json = serde_json::to_string_pretty(replication)
.map_err(|e| format!("Failed to serialize replication {}: {e}", replication.id))?;
let filename = format!("{}.json", replication.id);
std::fs::write(replications_dir.join(&filename), json)
.map_err(|e| format!("Failed to write replication {}: {e}", filename))?;
}
for dataset in &project.datasets {
let json = serde_json::to_string_pretty(dataset)
.map_err(|e| format!("Failed to serialize dataset {}: {e}", dataset.id))?;
let filename = format!("{}.json", dataset.id);
std::fs::write(datasets_dir.join(&filename), json)
.map_err(|e| format!("Failed to write dataset {}: {e}", filename))?;
}
for artifact in &project.code_artifacts {
let json = serde_json::to_string_pretty(artifact)
.map_err(|e| format!("Failed to serialize code artifact {}: {e}", artifact.id))?;
let filename = format!("{}.json", artifact.id);
std::fs::write(code_artifacts_dir.join(&filename), json)
.map_err(|e| format!("Failed to write code artifact {}: {e}", filename))?;
}
for artifact in &project.artifacts {
let json = serde_json::to_string_pretty(artifact)
.map_err(|e| format!("Failed to serialize artifact {}: {e}", artifact.id))?;
let filename = format!("{}.json", artifact.id);
std::fs::write(artifacts_dir.join(&filename), json)
.map_err(|e| format!("Failed to write artifact {}: {e}", filename))?;
}
for prediction in &project.predictions {
let json = serde_json::to_string_pretty(prediction)
.map_err(|e| format!("Failed to serialize prediction {}: {e}", prediction.id))?;
let filename = format!("{}.json", prediction.id);
std::fs::write(predictions_dir.join(&filename), json)
.map_err(|e| format!("Failed to write prediction {}: {e}", filename))?;
}
for resolution in &project.resolutions {
let json = serde_json::to_string_pretty(resolution)
.map_err(|e| format!("Failed to serialize resolution {}: {e}", resolution.id))?;
let filename = format!("{}.json", resolution.id);
std::fs::write(resolutions_dir.join(&filename), json)
.map_err(|e| format!("Failed to write resolution {}: {e}", filename))?;
}
let peers_path = vela_dir.join("peers.json");
if project.peers.is_empty() {
if peers_path.is_file() {
std::fs::remove_file(&peers_path)
.map_err(|e| format!("Failed to remove stale peers.json: {e}"))?;
}
} else {
let json = serde_json::to_string_pretty(&project.peers)
.map_err(|e| format!("Failed to serialize peers: {e}"))?;
std::fs::write(&peers_path, json)
.map_err(|e| format!("Failed to write peers.json: {e}"))?;
}
let actors_path = vela_dir.join("actors.json");
let json = serde_json::to_string_pretty(&project.actors)
.map_err(|e| format!("Failed to serialize actors: {e}"))?;
std::fs::write(&actors_path, json).map_err(|e| format!("Failed to write actors.json: {e}"))?;
let signatures_path = vela_dir.join("signatures.json");
if project.signatures.is_empty() {
if signatures_path.is_file() {
std::fs::remove_file(&signatures_path)
.map_err(|e| format!("Failed to remove stale signatures.json: {e}"))?;
}
} else {
let json = serde_json::to_string_pretty(&project.signatures)
.map_err(|e| format!("Failed to serialize signatures: {e}"))?;
std::fs::write(&signatures_path, json)
.map_err(|e| format!("Failed to write signatures.json: {e}"))?;
}
crate::frontier_repo::write_visible_repo_files(dir, project)?;
Ok(())
}
pub fn load_from_path(path: &Path) -> Result<Project, String> {
let source = detect(path)?;
load(&source)
}
fn is_packet_dir(path: &Path) -> bool {
let manifest_path = path.join("manifest.json");
if !manifest_path.is_file() {
return false;
}
let Ok(data) = std::fs::read_to_string(&manifest_path) else {
return false;
};
let Ok(manifest) = serde_json::from_str::<PacketManifestHeader>(&data) else {
return false;
};
manifest.packet_format == "vela.frontier-packet"
}
fn first_non_empty<'a>(values: impl IntoIterator<Item = &'a str>) -> &'a str {
values
.into_iter()
.find(|value| !value.is_empty())
.unwrap_or("")
}
pub fn save_to_path(path: &Path, project: &Project) -> Result<(), String> {
let source = detect(path)?;
save(&source, project)
}
pub fn init_repo(dir: &Path, project: &Project) -> Result<(), String> {
let vela_dir = dir.join(".vela");
std::fs::create_dir_all(&vela_dir).map_err(|e| format!("Failed to create .vela/: {e}"))?;
save_vela_repo(dir, project)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bundle::*;
use crate::project;
use tempfile::TempDir;
fn make_finding(id: &str, score: f64, assertion_type: &str) -> FindingBundle {
FindingBundle {
id: id.into(),
version: 1,
previous_version: None,
assertion: Assertion {
text: format!("Finding {id}"),
assertion_type: assertion_type.into(),
entities: vec![Entity {
name: "TestEntity".into(),
entity_type: "protein".into(),
identifiers: serde_json::Map::new(),
canonical_id: None,
candidates: vec![],
aliases: vec![],
resolution_provenance: None,
resolution_confidence: 1.0,
resolution_method: None,
species_context: None,
needs_review: false,
}],
relation: None,
direction: None,
causal_claim: None,
causal_evidence_grade: None,
},
evidence: Evidence {
evidence_type: "experimental".into(),
model_system: String::new(),
species: None,
method: String::new(),
sample_size: None,
effect_size: None,
p_value: None,
replicated: false,
replication_count: None,
evidence_spans: vec![],
},
conditions: Conditions {
text: String::new(),
species_verified: vec![],
species_unverified: vec![],
in_vitro: false,
in_vivo: false,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: None,
},
confidence: Confidence::raw(score, "seeded prior", 0.85),
provenance: Provenance {
source_type: "published_paper".into(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: "Test".into(),
authors: vec![],
year: Some(2024),
journal: None,
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: None,
},
flags: Flags {
gap: false,
negative_space: false,
contested: false,
retracted: false,
declining: false,
gravity_well: false,
review_state: None,
superseded: false,
signature_threshold: None,
jointly_accepted: false,
},
links: vec![],
annotations: vec![],
attachments: vec![],
created: String::new(),
updated: None,
access_tier: crate::access_tier::AccessTier::Public,
}
}
fn make_project(name: &str, findings: Vec<FindingBundle>) -> Project {
project::assemble(name, findings, 10, 0, "Test project")
}
#[test]
fn detect_json_file() {
let tmp = TempDir::new().unwrap();
let json_path = tmp.path().join("test.json");
std::fs::write(&json_path, "{}").unwrap();
let source = detect(&json_path).unwrap();
assert_eq!(source, VelaSource::ProjectFile(json_path));
}
#[test]
fn detect_vela_repo() {
let tmp = TempDir::new().unwrap();
let repo_dir = tmp.path().join("my-repo");
std::fs::create_dir_all(repo_dir.join(".vela")).unwrap();
let source = detect(&repo_dir).unwrap();
assert_eq!(source, VelaSource::VelaRepo(repo_dir));
}
#[test]
fn detect_dir_without_vela_errors() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("plain-dir");
std::fs::create_dir_all(&dir).unwrap();
let result = detect(&dir);
assert!(result.is_err());
let error = result.unwrap_err();
assert!(error.contains("frontier packet"));
assert!(error.contains("vela init"));
}
#[test]
fn detect_nonexistent_json_path() {
let path = Path::new("/tmp/nonexistent_test_vela.json");
let source = detect(path).unwrap();
assert_eq!(source, VelaSource::ProjectFile(path.to_path_buf()));
}
#[test]
fn detect_nonexistent_non_json_errors() {
let path = Path::new("/tmp/nonexistent_test_vela_dir");
let result = detect(path);
assert!(result.is_err());
}
#[test]
fn roundtrip_project_file() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("test.json");
let mut f1 = make_finding("vf_001", 0.8, "mechanism");
f1.add_link("vf_002", "extends", "shared entity");
let f2 = make_finding("vf_002", 0.6, "therapeutic");
let original = make_project("roundtrip-test", vec![f1, f2]);
let source = VelaSource::ProjectFile(path.clone());
save(&source, &original).unwrap();
let loaded = load(&source).unwrap();
assert_eq!(loaded.findings.len(), 2);
assert_eq!(loaded.project.name, "roundtrip-test");
assert_eq!(loaded.findings[0].links.len(), 1);
assert_eq!(loaded.findings[0].links[0].target, "vf_002");
}
#[test]
fn roundtrip_vela_repo() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("test-repo");
let mut f1 = make_finding("vf_aaa", 0.9, "mechanism");
f1.add_link("vf_bbb", "contradicts", "opposite direction");
f1.add_link("vf_ccc", "supports", "same pathway");
let f2 = make_finding("vf_bbb", 0.7, "therapeutic");
let f3 = make_finding("vf_ccc", 0.5, "biomarker");
let original = make_project("repo-test", vec![f1, f2, f3]);
init_repo(&dir, &original).unwrap();
assert!(dir.join(".vela").is_dir());
assert!(dir.join(".vela/config.toml").exists());
assert!(dir.join(".vela/findings").is_dir());
assert!(dir.join(".vela/findings/vf_aaa.json").exists());
assert!(dir.join(".vela/findings/vf_bbb.json").exists());
assert!(dir.join(".vela/findings/vf_ccc.json").exists());
assert!(dir.join(".vela/events").is_dir());
assert!(dir.join(".vela/proposals").is_dir());
assert!(dir.join(".vela/proof-state.json").exists());
assert!(!dir.join(".vela/links/manifest.json").exists());
assert!(!dir.join(".vela/reviews").exists());
let source = VelaSource::VelaRepo(dir);
let loaded = load(&source).unwrap();
assert_eq!(loaded.findings.len(), 3);
assert_eq!(loaded.project.name, "repo-test");
assert_eq!(loaded.project.description, "Test project");
let f1_loaded = loaded.findings.iter().find(|f| f.id == "vf_aaa").unwrap();
assert_eq!(f1_loaded.links.len(), 2);
let f2_loaded = loaded.findings.iter().find(|f| f.id == "vf_bbb").unwrap();
assert!(f2_loaded.links.is_empty());
}
#[test]
fn embedded_links_roundtrip() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("link-test");
let mut f1 = make_finding("vf_x1", 0.8, "mechanism");
f1.add_link("vf_x2", "extends", "entity overlap");
f1.add_link_with_source("vf_x3", "supports", "pathway link", "llm");
let mut f2 = make_finding("vf_x2", 0.7, "mechanism");
f2.add_link("vf_x1", "contradicts", "opposite");
let f3 = make_finding("vf_x3", 0.6, "therapeutic");
let original = make_project("link-test", vec![f1, f2, f3]);
init_repo(&dir, &original).unwrap();
assert!(!dir.join(".vela/links/manifest.json").exists());
let loaded = load(&VelaSource::VelaRepo(dir)).unwrap();
let lf1 = loaded.findings.iter().find(|f| f.id == "vf_x1").unwrap();
assert_eq!(lf1.links.len(), 2);
let lf2 = loaded.findings.iter().find(|f| f.id == "vf_x2").unwrap();
assert_eq!(lf2.links.len(), 1);
assert_eq!(lf2.links[0].link_type, "contradicts");
}
#[test]
fn config_toml_parsing() {
let toml_str = r#"
[project]
name = "alzheimers-tau"
description = "Tau pathology in Alzheimer's disease"
compiler = "vela/0.2.0"
papers_processed = 700
"#;
let config: RepoConfig = toml::from_str(toml_str).unwrap();
assert_eq!(config.project.name, "alzheimers-tau");
assert_eq!(
config.project.description,
"Tau pathology in Alzheimer's disease"
);
assert_eq!(config.project.papers_processed, 700);
assert_eq!(config.project.compiler, "vela/0.2.0");
assert_eq!(config.project.frontier_id, None);
assert_eq!(config.project.compiled_at, "");
}
#[test]
fn config_toml_minimal() {
let toml_str = r#"
[project]
name = "minimal"
"#;
let config: RepoConfig = toml::from_str(toml_str).unwrap();
assert_eq!(config.project.name, "minimal");
assert_eq!(config.project.description, "");
assert_eq!(config.project.papers_processed, 0);
}
#[test]
fn vela_repo_persists_frontier_id_and_actors() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("actor-repo");
let mut original = make_project(
"actor-test",
vec![make_finding("vf_actor", 0.8, "mechanism")],
);
let expected_frontier_id = original.frontier_id();
original.actors.push(crate::sign::ActorRecord {
id: "reviewer:test".into(),
public_key: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".into(),
algorithm: "ed25519".into(),
created_at: "2026-01-01T00:00:00Z".into(),
tier: None,
orcid: None,
access_clearance: None,
});
original.signatures.push(crate::sign::SignedEnvelope {
finding_id: "vf_actor".into(),
signature: "00".repeat(64),
public_key: "aa".repeat(32),
signed_at: "2026-01-01T00:00:00Z".into(),
algorithm: "ed25519".into(),
});
init_repo(&dir, &original).unwrap();
assert!(dir.join(".vela/actors.json").exists());
assert!(dir.join(".vela/signatures.json").exists());
let first_load = load(&VelaSource::VelaRepo(dir.clone())).unwrap();
let second_load = load(&VelaSource::VelaRepo(dir)).unwrap();
assert_eq!(first_load.frontier_id(), expected_frontier_id);
assert_eq!(second_load.frontier_id(), expected_frontier_id);
assert_eq!(first_load.actors, original.actors);
assert_eq!(first_load.signatures.len(), 1);
assert_eq!(second_load.signatures.len(), 1);
assert_eq!(second_load.signatures[0].finding_id, "vf_actor");
}
#[test]
fn empty_project_roundtrip() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("empty-repo");
let original = make_project("empty", vec![]);
init_repo(&dir, &original).unwrap();
let loaded = load(&VelaSource::VelaRepo(dir)).unwrap();
assert_eq!(loaded.findings.len(), 0);
assert_eq!(loaded.stats.findings, 0);
assert_eq!(loaded.stats.links, 0);
assert_eq!(loaded.project.name, "empty");
}
#[test]
fn artifacts_roundtrip_from_vela_repo() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("artifact-repo");
let mut original = make_project("artifact-test", vec![]);
let artifact = Artifact::new(
"protocol",
"trial protocol",
"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
Some(17),
Some("application/json".into()),
"local_blob",
Some(".vela/artifact-blobs/sha256/bbbb".into()),
Some("https://example.test/protocol".into()),
Some("CC0-1.0".into()),
vec!["vf_target".into()],
Provenance {
source_type: "clinical_trial".into(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: Some("https://example.test/protocol".into()),
title: "trial protocol".into(),
authors: vec![],
year: Some(2026),
journal: None,
license: Some("CC0-1.0".into()),
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: None,
},
std::collections::BTreeMap::new(),
crate::access_tier::AccessTier::Public,
)
.unwrap();
let id = artifact.id.clone();
original.artifacts.push(artifact);
init_repo(&dir, &original).unwrap();
let loaded = load(&VelaSource::VelaRepo(dir.clone())).unwrap();
assert_eq!(loaded.artifacts.len(), 1);
assert_eq!(loaded.artifacts[0].id, id);
assert!(dir.join(".vela/artifacts").is_dir());
}
#[test]
fn large_finding_count() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("large-repo");
let findings: Vec<FindingBundle> = (0..100)
.map(|i| make_finding(&format!("vf_{i:04}"), 0.5 + (i as f64) * 0.004, "mechanism"))
.collect();
let original = make_project("large", findings);
assert_eq!(original.findings.len(), 100);
init_repo(&dir, &original).unwrap();
let loaded = load(&VelaSource::VelaRepo(dir)).unwrap();
assert_eq!(loaded.findings.len(), 100);
assert_eq!(loaded.stats.findings, 100);
}
#[test]
fn legacy_review_events_load() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("review-repo");
let mut original =
make_project("review-test", vec![make_finding("vf_r1", 0.8, "mechanism")]);
original.review_events.push(ReviewEvent {
id: "rev_001".into(),
workspace: None,
finding_id: "vf_r1".into(),
reviewer: "0000-0001-2345-6789".into(),
reviewed_at: "2024-01-01T00:00:00Z".into(),
scope: None,
status: None,
action: ReviewAction::Approved,
reason: "Looks correct".into(),
evidence_considered: vec![],
state_change: None,
});
init_repo(&dir, &original).unwrap();
assert!(!dir.join(".vela/reviews").exists());
std::fs::create_dir_all(dir.join(".vela/reviews")).unwrap();
std::fs::write(
dir.join(".vela/reviews/rev_001.json"),
serde_json::to_string_pretty(&original.review_events[0]).unwrap(),
)
.unwrap();
let loaded = load(&VelaSource::VelaRepo(dir)).unwrap();
assert_eq!(loaded.review_events.len(), 1);
assert_eq!(loaded.review_events[0].id, "rev_001");
assert_eq!(loaded.review_events[0].finding_id, "vf_r1");
}
#[test]
fn load_vela_repo_accepts_bbb_review_artifact() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("bbb-review-repo");
std::fs::create_dir_all(dir.join(".vela/reviews")).unwrap();
std::fs::write(
dir.join(".vela/config.toml"),
"[project]\nname = \"bbb-review-repo\"\ndescription = \"\"\ncompiler = \"vela/test\"\npapers_processed = 0\n",
)
.unwrap();
std::fs::write(
dir.join(".vela/reviews/rev_001_bbb_correction.json"),
include_str!("../embedded/tests/fixtures/legacy/rev_001_bbb_correction.json"),
)
.unwrap();
let loaded = load(&VelaSource::VelaRepo(dir)).unwrap();
assert_eq!(loaded.review_events.len(), 1);
assert!(matches!(
loaded.review_events[0].action,
ReviewAction::Qualified { .. }
));
assert_eq!(loaded.review_events[0].status.as_deref(), Some("accepted"));
}
#[test]
fn load_from_path_json() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("convenience.json");
let original = make_project("convenience", vec![make_finding("vf_c1", 0.8, "mechanism")]);
let json = serde_json::to_string_pretty(&original).unwrap();
std::fs::write(&path, json).unwrap();
let loaded = load_from_path(&path).unwrap();
assert_eq!(loaded.project.name, "convenience");
assert_eq!(loaded.findings.len(), 1);
}
#[test]
fn load_from_path_repo() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("conv-repo");
let original = make_project("conv-repo", vec![make_finding("vf_cr1", 0.8, "mechanism")]);
init_repo(&dir, &original).unwrap();
let loaded = load_from_path(&dir).unwrap();
assert_eq!(loaded.project.name, "conv-repo");
assert_eq!(loaded.findings.len(), 1);
}
#[test]
fn load_from_path_packet_dir() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("packet-frontier");
let mut original = make_project(
"packet-frontier",
vec![make_finding("vf_pkt1", 0.81, "mechanism")],
);
original.review_events.push(ReviewEvent {
id: "rev_pkt1".into(),
workspace: Some("bbb".into()),
finding_id: "vf_pkt1".into(),
reviewer: "reviewer:test".into(),
reviewed_at: "2026-01-01T00:00:00Z".into(),
scope: Some("external".into()),
status: Some("accepted".into()),
action: ReviewAction::Approved,
reason: "Imported from another lab".into(),
evidence_considered: vec![],
state_change: None,
});
original.stats.review_event_count = original.review_events.len();
crate::export::export_packet(&original, &dir).unwrap();
let loaded = load_from_path(&dir).unwrap();
assert_eq!(loaded.project.name, "packet-frontier");
assert_eq!(loaded.findings.len(), 1);
assert_eq!(loaded.review_events.len(), 1);
assert_eq!(loaded.stats.review_event_count, 1);
}
#[test]
fn full_format_roundtrip() {
let tmp = TempDir::new().unwrap();
let mut f1 = make_finding("vf_rt1", 0.85, "mechanism");
f1.add_link("vf_rt2", "extends", "shared protein");
let f2 = make_finding("vf_rt2", 0.72, "therapeutic");
let original = make_project("full-roundtrip", vec![f1, f2]);
let json_path = tmp.path().join("original.json");
save(&VelaSource::ProjectFile(json_path.clone()), &original).unwrap();
let from_json = load(&VelaSource::ProjectFile(json_path)).unwrap();
let repo_dir = tmp.path().join("repo");
init_repo(&repo_dir, &from_json).unwrap();
let from_repo = load(&VelaSource::VelaRepo(repo_dir)).unwrap();
assert_eq!(from_repo.findings.len(), from_json.findings.len());
assert_eq!(from_repo.project.name, from_json.project.name);
let rt1 = from_repo
.findings
.iter()
.find(|f| f.id == "vf_rt1")
.unwrap();
assert_eq!(rt1.links.len(), 1);
assert_eq!(rt1.links[0].target, "vf_rt2");
assert_eq!(rt1.links[0].link_type, "extends");
}
}