use std::collections::{HashMap, HashSet};
use anyhow::{Context, Result, bail};
use rayon::prelude::*;
use tracing::error;
use bob::Interrupted;
use bob::build::{self, Build};
use bob::config::Config;
use bob::db::Database;
use bob::sandbox::SandboxScope;
use bob::scan::{ScanResult, ScanSummary};
pub fn check_up_to_date(
config: &Config,
db: &Database,
scan_result: &ScanSummary,
) -> Result<usize> {
let pkgsrc_env = match db.load_pkgsrc_env() {
Ok(env) => env,
Err(_) => {
tracing::warn!("PkgsrcEnv not cached, skipping up-to-date check");
return Ok(0);
}
};
let packages_dir = pkgsrc_env.packages.join("All");
let pkgsrc_dir = config.pkgsrc();
let buildable: Vec<_> = scan_result.buildable().collect();
let mut up_to_date_count = 0usize;
db.clear_build_reasons()?;
bob::print_status("Calculating package build status");
let start = std::time::Instant::now();
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(config.scan_threads())
.build()
.context("Failed to build thread pool for up-to-date check")?;
let buildable_names: HashSet<&str> = buildable.iter().map(|p| p.pkgname().pkgname()).collect();
let pkg_by_name: HashMap<&str, &bob::scan::ResolvedPackage> = buildable
.iter()
.map(|&p| (p.pkgname().pkgname(), p))
.collect();
let forward_deps: HashMap<&str, Vec<&str>> = buildable
.iter()
.map(|p| {
let deps: Vec<&str> = p
.depends()
.iter()
.map(|d| d.pkgname())
.filter(|d| buildable_names.contains(d))
.collect();
(p.pkgname().pkgname(), deps)
})
.collect();
let mut reverse_deps: HashMap<&str, Vec<&str>> = HashMap::new();
for (pkg, deps) in &forward_deps {
for dep in deps {
reverse_deps.entry(*dep).or_default().push(*pkg);
}
}
let mut remaining: HashSet<&str> = buildable_names.clone();
let mut needs_rebuild: HashSet<&str> = HashSet::new();
let mut propagated_from: HashMap<&str, &str> = HashMap::new();
let mut checked_results: Vec<(
&bob::scan::ResolvedPackage,
anyhow::Result<Option<bob::BuildReason>>,
)> = Vec::new();
for &pkgname in &buildable_names {
let pkgfile = packages_dir.join(format!("{}.tgz", pkgname));
if !pkgfile.exists() {
needs_rebuild.insert(pkgname);
db.store_build_reason(pkgname, &bob::BuildReason::PackageNotFound.to_string())?;
}
}
while !remaining.is_empty() {
let ready: Vec<&str> = remaining
.iter()
.filter(|pkg| {
forward_deps[*pkg]
.iter()
.all(|dep| !remaining.contains(dep))
})
.copied()
.collect();
if ready.is_empty() {
break;
}
let to_check: Vec<&str> = ready
.iter()
.filter(|pkg| !needs_rebuild.contains(*pkg))
.copied()
.collect();
let wave_results: Vec<_> = pool.install(|| {
to_check
.par_iter()
.map(|&pkgname| {
let pkg = pkg_by_name[pkgname];
let depends: Vec<&str> = pkg.depends().iter().map(|d| d.pkgname()).collect();
let result = bob::pkg_up_to_date(pkgname, &depends, &packages_dir, pkgsrc_dir);
(pkg, result)
})
.collect()
});
for (pkg, result) in wave_results {
let pkgname = pkg.pkgname().pkgname();
if matches!(&result, Ok(Some(_)) | Err(_)) {
needs_rebuild.insert(pkgname);
let mut worklist = vec![pkgname];
while let Some(dep) = worklist.pop() {
if let Some(dependents) = reverse_deps.get(dep) {
for &dependent in dependents {
if needs_rebuild.insert(dependent) {
propagated_from.insert(dependent, dep);
worklist.push(dependent);
}
}
}
}
}
checked_results.push((pkg, result));
}
for pkg in ready {
remaining.remove(pkg);
}
}
for (pkg, result) in checked_results {
let pkgname = pkg.pkgname().pkgname();
match result {
Ok(None) => {
if db.is_successful(pkgname)? {
up_to_date_count += 1;
continue;
}
let build_result = bob::BuildResult {
pkgname: pkg.pkgname().clone(),
pkgpath: Some(pkg.pkgpath.clone()),
state: bob::PackageState::UpToDate,
log_dir: None,
build_stats: bob::PkgBuildStats::default(),
};
db.store_build_by_name(&build_result)?;
up_to_date_count += 1;
}
Ok(Some(reason)) => {
db.store_build_reason(pkgname, &reason.to_string())?;
}
Err(e) => {
tracing::debug!(
pkgname,
error = format!("{e:#}"),
"Error checking up-to-date status"
);
db.store_build_reason(pkgname, &format!("check failed: {}", e))?;
}
}
}
for (pkgname, dep) in propagated_from {
let reason = bob::BuildReason::DependencyRefresh(dep.to_string());
db.store_build_reason(pkgname, &reason.to_string())?;
}
bob::print_elapsed("Calculating package build status", start.elapsed());
Ok(up_to_date_count)
}
pub fn run_build_with(
config: &Config,
db: &Database,
state: &bob::RunState,
scan_result: ScanSummary,
scope: SandboxScope,
) -> Result<build::BuildSummary> {
if scan_result.count_buildable() == 0 {
bail!("No packages to build");
}
let mut skipped_results: Vec<build::BuildResult> = Vec::new();
let mut scanfail_results: Vec<(pkgsrc::PkgPath, String)> = Vec::new();
for pkg in scan_result.packages {
match pkg {
ScanResult::Buildable(_) => {}
ScanResult::Skipped {
pkgpath,
state,
index,
..
} => {
let Some(pkgname) = index.as_ref().map(|i| &i.pkgname) else {
error!(%pkgpath, "Skipped package missing PKGNAME");
continue;
};
skipped_results.push(build::BuildResult {
pkgname: pkgname.clone(),
pkgpath: Some(pkgpath),
state,
log_dir: None,
build_stats: build::PkgBuildStats::default(),
});
}
ScanResult::ScanFail { pkgpath, error } => {
scanfail_results.push((pkgpath, error));
}
}
}
let pkgsrc_env = db
.load_pkgsrc_env()
.context("PkgsrcEnv not cached - try 'bob clean' first")?;
let buildable = db.load_buildable_packages()?;
let mut build = Build::new(config, pkgsrc_env, scope, buildable);
build.load_cached_from_db(db)?;
tracing::debug!("Calling build.start()");
let build_start_time = std::time::Instant::now();
let mut summary = build.start(state, db)?;
let build_elapsed = build_start_time.elapsed();
tracing::debug!(
elapsed_ms = build_elapsed.as_millis(),
"build.start() returned"
);
db.add_build_duration(build_elapsed)?;
if state.interrupted() {
return Err(Interrupted.into());
}
let build_id = db.build_id().ok();
if let Some(bid) = &build_id {
if let Some(rev) = db.load_vcs_info().ok().and_then(|v| v.revision_full) {
if let Err(e) = db.store_build_revision(bid, &rev) {
tracing::warn!(error = format!("{e:#}"), "Failed to save build revision");
}
}
}
let now = bob::epoch_secs()?;
for result in &skipped_results {
if let Some(mut input) = result.history_input() {
input.build_id = build_id.clone();
if input.timestamp == 0 {
input.timestamp = now;
}
if let Err(e) = db.record_history(&input) {
tracing::warn!(error = format!("{e:#}"), "Failed to save skipped history");
}
}
}
summary.results.extend(skipped_results);
summary.scanfail.extend(scanfail_results);
Ok(summary)
}