use super::derivation::{
collect_input_hashes, derivation_closure_hash, validate_derivation, Derivation,
DerivationResult,
};
use super::sandbox_exec::{plan_sandbox_build, simulate_sandbox_build, SandboxPlan};
use super::sandbox_run;
use crate::core::types::Machine;
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
#[derive(Debug, Clone, PartialEq)]
pub struct DerivationStep {
pub step: u8,
pub description: String,
pub skipped: bool,
}
#[derive(Debug, Clone, PartialEq)]
pub struct DerivationPlan {
pub steps: Vec<DerivationStep>,
pub closure_hash: String,
pub store_hit: bool,
pub sandbox_plan: Option<SandboxPlan>,
pub input_paths: BTreeMap<String, PathBuf>,
}
pub fn plan_derivation(
derivation: &Derivation,
resolved_resources: &BTreeMap<String, String>,
local_store_entries: &[String],
store_dir: &Path,
) -> Result<DerivationPlan, String> {
let errors = validate_derivation(derivation);
if !errors.is_empty() {
return Err(format!("derivation validation: {}", errors.join("; ")));
}
let mut steps = Vec::new();
let input_hashes = collect_input_hashes(derivation, resolved_resources)?;
let input_paths: BTreeMap<String, PathBuf> = input_hashes
.iter()
.map(|(name, hash)| {
let hash_bare = hash.strip_prefix("blake3:").unwrap_or(hash);
(name.clone(), store_dir.join(hash_bare).join("content"))
})
.collect();
steps.push(DerivationStep {
step: 1,
description: format!("Resolve {} input(s)", input_hashes.len()),
skipped: false,
});
let closure_hash = derivation_closure_hash(derivation, &input_hashes);
steps.push(DerivationStep {
step: 2,
description: format!(
"Compute closure hash: {}",
&closure_hash[..32.min(closure_hash.len())]
),
skipped: false,
});
let store_hit = local_store_entries.contains(&closure_hash);
steps.push(DerivationStep {
step: 3,
description: if store_hit {
"Store HIT — skip build (substitute)".to_string()
} else {
"Store MISS — build required".to_string()
},
skipped: false,
});
if store_hit {
for (i, desc) in [
"Create pepita namespace",
"Bind inputs read-only",
"Execute bashrs script",
"Compute output hash",
"Atomic move to store",
"Write meta.yaml",
"Destroy namespace",
]
.iter()
.enumerate()
{
steps.push(DerivationStep {
step: (i + 4) as u8,
description: desc.to_string(),
skipped: true,
});
}
return Ok(DerivationPlan {
steps,
closure_hash,
store_hit: true,
sandbox_plan: None,
input_paths,
});
}
let sandbox_config = derivation
.sandbox
.clone()
.unwrap_or_else(default_sandbox_config);
let sandbox_plan = plan_sandbox_build(
&sandbox_config,
&closure_hash,
&input_paths,
&derivation.script,
store_dir,
);
steps.push(DerivationStep {
step: 4,
description: format!("Create pepita namespace ({})", sandbox_plan.namespace_id),
skipped: false,
});
steps.push(DerivationStep {
step: 5,
description: format!("Bind {} input(s) read-only", input_paths.len()),
skipped: false,
});
steps.push(DerivationStep {
step: 6,
description: "Execute bashrs-purified build script".to_string(),
skipped: false,
});
steps.push(DerivationStep {
step: 7,
description: "Compute BLAKE3 hash of $out directory".to_string(),
skipped: false,
});
steps.push(DerivationStep {
step: 8,
description: "Atomic move output to store".to_string(),
skipped: false,
});
steps.push(DerivationStep {
step: 9,
description: "Write meta.yaml with closure + provenance".to_string(),
skipped: false,
});
steps.push(DerivationStep {
step: 10,
description: "Destroy namespace and clean up".to_string(),
skipped: false,
});
Ok(DerivationPlan {
steps,
closure_hash,
store_hit: false,
sandbox_plan: Some(sandbox_plan),
input_paths,
})
}
pub fn simulate_derivation(
derivation: &Derivation,
resolved_resources: &BTreeMap<String, String>,
local_store_entries: &[String],
store_dir: &Path,
) -> Result<DerivationResult, String> {
let input_hashes = collect_input_hashes(derivation, resolved_resources)?;
let closure_hash = derivation_closure_hash(derivation, &input_hashes);
if local_store_entries.contains(&closure_hash) {
let hash_bare = closure_hash
.strip_prefix("blake3:")
.unwrap_or(&closure_hash);
return Ok(DerivationResult {
store_hash: closure_hash.clone(),
store_path: format!("{}/{hash_bare}/content", store_dir.display()),
input_closure: input_hashes.values().cloned().collect(),
closure_hash,
derivation_depth: 1,
});
}
let input_paths: BTreeMap<String, PathBuf> = input_hashes
.iter()
.map(|(name, hash)| {
let hash_bare = hash.strip_prefix("blake3:").unwrap_or(hash);
(name.clone(), store_dir.join(hash_bare).join("content"))
})
.collect();
let sandbox_config = derivation
.sandbox
.clone()
.unwrap_or_else(default_sandbox_config);
let result = simulate_sandbox_build(
&sandbox_config,
&closure_hash,
&input_paths,
&derivation.script,
store_dir,
);
Ok(DerivationResult {
store_hash: result.output_hash.clone(),
store_path: result.store_path,
input_closure: input_hashes.values().cloned().collect(),
closure_hash,
derivation_depth: 1,
})
}
pub fn execute_derivation_dag(
derivations: &BTreeMap<String, Derivation>,
topo_order: &[String],
initial_resources: &BTreeMap<String, String>,
local_store_entries: &[String],
store_dir: &Path,
) -> Result<BTreeMap<String, DerivationResult>, String> {
let mut results = BTreeMap::new();
let mut resolved = initial_resources.clone();
for name in topo_order {
let derivation = derivations
.get(name)
.ok_or_else(|| format!("derivation '{name}' not found in DAG"))?;
let result = simulate_derivation(derivation, &resolved, local_store_entries, store_dir)?;
resolved.insert(name.clone(), result.store_hash.clone());
results.insert(name.clone(), result);
}
Ok(results)
}
pub fn execute_derivation_dag_live(
derivations: &BTreeMap<String, Derivation>,
topo_order: &[String],
initial_resources: &BTreeMap<String, String>,
local_store_entries: &[String],
store_dir: &Path,
dry_run: bool,
) -> Result<BTreeMap<String, DerivationResult>, String> {
if dry_run {
return execute_derivation_dag(
derivations,
topo_order,
initial_resources,
local_store_entries,
store_dir,
);
}
let machine = local_machine();
let mut results = BTreeMap::new();
let mut resolved = initial_resources.clone();
for name in topo_order {
let derivation = derivations
.get(name)
.ok_or_else(|| format!("derivation '{name}' not found in DAG"))?;
let plan = plan_derivation(derivation, &resolved, local_store_entries, store_dir)?;
let result = if plan.store_hit {
simulate_derivation(derivation, &resolved, local_store_entries, store_dir)?
} else {
execute_derivation_live(derivation, &plan, &machine, store_dir)?
};
resolved.insert(name.clone(), result.store_hash.clone());
results.insert(name.clone(), result);
}
Ok(results)
}
fn execute_derivation_live(
derivation: &Derivation,
plan: &DerivationPlan,
machine: &Machine,
store_dir: &Path,
) -> Result<DerivationResult, String> {
let sandbox_plan = plan
.sandbox_plan
.as_ref()
.ok_or("no sandbox plan for cache miss")?;
let exec_result = sandbox_run::execute_sandbox_plan(
sandbox_plan,
&derivation.script,
machine,
store_dir,
Some(600),
)?;
Ok(DerivationResult {
store_hash: exec_result.output_hash.clone(),
store_path: exec_result.store_path,
input_closure: plan
.input_paths
.values()
.map(|p| p.display().to_string())
.collect(),
closure_hash: plan.closure_hash.clone(),
derivation_depth: 1,
})
}
fn local_machine() -> Machine {
Machine {
hostname: "localhost".to_string(),
addr: "127.0.0.1".to_string(),
user: "root".to_string(),
arch: std::env::consts::ARCH.to_string(),
ssh_key: None,
roles: Vec::new(),
transport: None,
container: None,
pepita: None,
cost: 0,
allowed_operators: vec![],
}
}
fn default_sandbox_config() -> super::sandbox::SandboxConfig {
super::sandbox::SandboxConfig {
level: super::sandbox::SandboxLevel::Minimal,
memory_mb: 2048,
cpus: 4.0,
timeout: 600,
bind_mounts: Vec::new(),
env: Vec::new(),
}
}
pub fn is_store_hit(plan: &DerivationPlan) -> bool {
plan.store_hit
}
pub fn skipped_steps(plan: &DerivationPlan) -> usize {
plan.steps.iter().filter(|s| s.skipped).count()
}