use std::collections::{HashMap, HashSet};
use anyhow::{Context, Result, bail};
use rayon::prelude::*;
use tracing::error;
use bob::Interrupted;
use bob::build::{self, Build};
use bob::config::Config;
use bob::db::Database;
use bob::sandbox::SandboxScope;
use bob::scan::{ScanResult, ScanSummary};
pub fn check_up_to_date(
config: &Config,
db: &Database,
scan_result: &ScanSummary,
) -> Result<usize> {
let pkgsrc_env = match db.load_pkgsrc_env() {
Ok(env) => env,
Err(_) => {
tracing::warn!("PkgsrcEnv not cached, skipping up-to-date check");
return Ok(0);
}
};
let packages_dir = pkgsrc_env.packages.join("All");
let pkgsrc_dir = config.pkgsrc();
let buildable: Vec<_> = scan_result.buildable().collect();
let mut up_to_date_count = 0usize;
db.clear_build_reasons()?;
print!("Calculating package build status...");
std::io::Write::flush(&mut std::io::stdout())?;
let start = std::time::Instant::now();
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(config.scan_threads())
.build()
.context("Failed to build thread pool for up-to-date check")?;
let buildable_names: HashSet<&str> = buildable.iter().map(|p| p.pkgname().pkgname()).collect();
let pkg_by_name: HashMap<&str, &bob::scan::ResolvedPackage> = buildable
.iter()
.map(|&p| (p.pkgname().pkgname(), p))
.collect();
let forward_deps: HashMap<&str, Vec<&str>> = buildable
.iter()
.map(|p| {
let deps: Vec<&str> = p
.depends()
.iter()
.map(|d| d.pkgname())
.filter(|d| buildable_names.contains(d))
.collect();
(p.pkgname().pkgname(), deps)
})
.collect();
let mut reverse_deps: HashMap<&str, Vec<&str>> = HashMap::new();
for (pkg, deps) in &forward_deps {
for dep in deps {
reverse_deps.entry(*dep).or_default().push(*pkg);
}
}
let mut remaining: HashSet<&str> = buildable_names.clone();
let mut needs_rebuild: HashSet<&str> = HashSet::new();
let mut propagated_from: HashMap<&str, &str> = HashMap::new();
let mut checked_results: Vec<(
&bob::scan::ResolvedPackage,
anyhow::Result<Option<bob::BuildReason>>,
)> = Vec::new();
for &pkgname in &buildable_names {
let pkgfile = packages_dir.join(format!("{}.tgz", pkgname));
if !pkgfile.exists() {
needs_rebuild.insert(pkgname);
db.store_build_reason(pkgname, &bob::BuildReason::PackageNotFound.to_string())?;
}
}
while !remaining.is_empty() {
let ready: Vec<&str> = remaining
.iter()
.filter(|pkg| {
forward_deps[*pkg]
.iter()
.all(|dep| !remaining.contains(dep))
})
.copied()
.collect();
if ready.is_empty() {
break;
}
let to_check: Vec<&str> = ready
.iter()
.filter(|pkg| !needs_rebuild.contains(*pkg))
.copied()
.collect();
let wave_results: Vec<_> = pool.install(|| {
to_check
.par_iter()
.map(|&pkgname| {
let pkg = pkg_by_name[pkgname];
let depends: Vec<&str> = pkg.depends().iter().map(|d| d.pkgname()).collect();
let result = bob::pkg_up_to_date(pkgname, &depends, &packages_dir, pkgsrc_dir);
(pkg, result)
})
.collect()
});
for (pkg, result) in wave_results {
let pkgname = pkg.pkgname().pkgname();
if matches!(&result, Ok(Some(_)) | Err(_)) {
needs_rebuild.insert(pkgname);
let mut worklist = vec![pkgname];
while let Some(dep) = worklist.pop() {
if let Some(dependents) = reverse_deps.get(dep) {
for &dependent in dependents {
if needs_rebuild.insert(dependent) {
propagated_from.insert(dependent, dep);
worklist.push(dependent);
}
}
}
}
}
checked_results.push((pkg, result));
}
for pkg in ready {
remaining.remove(pkg);
}
}
for (pkg, result) in checked_results {
let pkgname = pkg.pkgname().pkgname();
match result {
Ok(None) => {
let build_result = bob::BuildResult {
pkgname: pkg.pkgname().clone(),
pkgpath: Some(pkg.pkgpath.clone()),
state: bob::PackageState::UpToDate,
log_dir: None,
build_stats: bob::PkgBuildStats::default(),
};
db.store_build_by_name(&build_result)?;
up_to_date_count += 1;
}
Ok(Some(reason)) => {
db.store_build_reason(pkgname, &reason.to_string())?;
}
Err(e) => {
tracing::debug!(
pkgname,
error = %e,
"Error checking up-to-date status"
);
db.store_build_reason(pkgname, &format!("check failed: {}", e))?;
}
}
}
for (pkgname, dep) in propagated_from {
let reason = bob::BuildReason::DependencyRefresh(dep.to_string());
db.store_build_reason(pkgname, &reason.to_string())?;
}
println!(" done ({:.1}s)", start.elapsed().as_secs_f32());
Ok(up_to_date_count)
}
pub fn run_build_with(
config: &Config,
db: &Database,
state: &bob::RunState,
scan_result: ScanSummary,
scope: SandboxScope,
) -> Result<build::BuildSummary> {
if scan_result.count_buildable() == 0 {
bail!("No packages to build");
}
let buildable: indexmap::IndexMap<_, _> = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = db
.load_pkgsrc_env()
.context("PkgsrcEnv not cached - try 'bob clean' first")?;
let mut build = Build::new(config, pkgsrc_env, scope, buildable);
build.load_cached_from_db(db)?;
tracing::debug!("Calling build.start()");
let build_start_time = std::time::Instant::now();
let mut summary = build.start(state, db)?;
tracing::debug!(
elapsed_ms = build_start_time.elapsed().as_millis(),
"build.start() returned"
);
if state.interrupted() {
return Err(Interrupted.into());
}
for pkg in scan_result.packages.iter() {
match pkg {
ScanResult::Skipped {
pkgpath,
state,
index,
..
} => {
let Some(pkgname) = index.as_ref().map(|i| &i.pkgname) else {
error!(%pkgpath, "Skipped package missing PKGNAME");
continue;
};
summary.results.push(build::BuildResult {
pkgname: pkgname.clone(),
pkgpath: Some(pkgpath.clone()),
state: state.clone(),
log_dir: None,
build_stats: build::PkgBuildStats::default(),
});
}
ScanResult::ScanFail { pkgpath, error } => {
summary.scanfail.push((pkgpath.clone(), error.clone()));
}
ScanResult::Buildable(_) => {}
}
}
Ok(summary)
}