use anyhow::{Context, Result};
use bob::PackageStateKind::*;
use bob::{
Build, Config, Database, PackageState, RunState, Sandbox, Scan, ScanSummary, config::PkgsrcEnv,
sandbox::SandboxScope,
};
use std::collections::HashMap;
use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
use std::process::Command;
use tempfile::TempDir;
struct PkgDef<'a> {
pkgpath: &'a str,
pkgname: &'a str,
all_depends: &'a str,
skip_reason: &'a str,
fail_reason: &'a str,
bootstrap_pkg: &'a str,
usergroup_phase: &'a str,
multi_version: &'a str,
fail_target: Option<(&'a str, &'a str)>,
}
impl<'a> PkgDef<'a> {
fn new(pkgpath: &'a str, pkgname: &'a str) -> Self {
Self {
pkgpath,
pkgname,
all_depends: "",
skip_reason: "",
fail_reason: "",
bootstrap_pkg: "",
usergroup_phase: "",
multi_version: "",
fail_target: None,
}
}
fn depends(mut self, d: &'a str) -> Self {
self.all_depends = d;
self
}
fn skip(mut self, s: &'a str) -> Self {
self.skip_reason = s;
self
}
fn fail(mut self, f: &'a str) -> Self {
self.fail_reason = f;
self
}
fn bootstrap(mut self) -> Self {
self.bootstrap_pkg = "yes";
self
}
fn usergroup(mut self, u: &'a str) -> Self {
self.usergroup_phase = u;
self
}
fn multi(mut self, m: &'a str) -> Self {
self.multi_version = m;
self
}
fn fail_at(mut self, target: &'a str, msg: &'a str) -> Self {
self.fail_target = Some((target, msg));
self
}
}
fn find_make() -> Option<PathBuf> {
for candidate in &["/opt/pkg/bin/bmake", "/usr/pkg/bin/bmake", "/usr/bin/make"] {
let p = PathBuf::from(candidate);
if p.exists() {
return Some(p);
}
}
None
}
struct TestHarness {
_tmpdir: TempDir,
root: PathBuf,
make: PathBuf,
}
impl TestHarness {
fn new() -> Result<Self> {
let make = match find_make() {
Some(m) => m,
None => anyhow::bail!("bmake not found, skipping integration test"),
};
let tmpdir = TempDir::new().context("Failed to create temp dir")?;
let root = tmpdir.path().to_path_buf();
let harness = Self {
_tmpdir: tmpdir,
root,
make,
};
harness.create_tree()?;
Ok(harness)
}
fn pkgsrc(&self) -> PathBuf {
self.root.join("pkgsrc")
}
fn dbdir(&self) -> PathBuf {
self.root.join("db")
}
fn logdir(&self) -> PathBuf {
self.dbdir().join("logs")
}
fn config_path(&self) -> PathBuf {
self.root.join("config.lua")
}
fn packages_dir(&self) -> PathBuf {
self.root.join("packages")
}
fn create_tree(&self) -> Result<()> {
let dirs = [
"pkgsrc/test/base",
"pkgsrc/test/mid",
"pkgsrc/test/also-base",
"pkgsrc/test/top",
"pkgsrc/test/multi",
"pkgsrc/test/dual",
"pkgsrc/test/skip-me",
"pkgsrc/test/dep-skip",
"pkgsrc/test/fail-me",
"pkgsrc/test/dep-fail",
"pkgsrc/test/bad-dep",
"pkgsrc/test/build-fail",
"pkgsrc/test/dep-bfail",
"pkgsrc/test/fail-checksum",
"pkgsrc/test/fail-at-build",
"pkgsrc/test/fail-install",
"pkgsrc/test/fail-package",
"pkgsrc/test/chain-a",
"pkgsrc/test/chain-b",
"pkgsrc/test/chain-c",
"pkgsrc/test/chain-d",
"pkgsrc/pkgtools/pkg_install",
"packages/All",
"pkgtools",
"pkg_dbdir",
"pkg_refcount_dbdir",
"prefix",
"logs",
];
for d in &dirs {
fs::create_dir_all(self.root.join(d))?;
}
self.write_pkgsrc_root_makefile()?;
self.write_category_makefile()?;
self.write_pkg_install_makefile()?;
self.write_mock_pkg_tools()?;
self.write_package_makefiles()?;
self.write_config_lua()?;
Ok(())
}
fn write_pkgsrc_root_makefile(&self) -> Result<()> {
let content = "\
show-subdir-var:
\t@echo \"test\"
";
fs::write(self.pkgsrc().join("Makefile"), content)?;
Ok(())
}
fn write_category_makefile(&self) -> Result<()> {
let content = "\
show-subdir-var:
\t@echo \"base mid also-base top multi dual skip-me dep-skip fail-me dep-fail bad-dep build-fail dep-bfail fail-checksum fail-at-build fail-install fail-package chain-a chain-b chain-c chain-d\"
";
fs::write(self.pkgsrc().join("test/Makefile"), content)?;
Ok(())
}
fn write_pkg_install_makefile(&self) -> Result<()> {
let content = format!(
"\
show-vars:
\t@for v in ${{VARNAMES}}; do \\\n\
\t case \"$$v\" in \\\n\
\t PACKAGES) echo \"{packages}\" ;; \\\n\
\t PKG_DBDIR) echo \"{pkg_dbdir}\" ;; \\\n\
\t PKG_REFCOUNT_DBDIR) echo \"{pkg_refcount_dbdir}\" ;; \\\n\
\t PKG_TOOLS_BIN) echo \"{pkgtools}\" ;; \\\n\
\t PREFIX) echo \"{prefix}\" ;; \\\n\
\t *) echo ;; \\\n\
\t esac; \\\n\
\tdone
",
packages = self.packages_dir().display(),
pkg_dbdir = self.root.join("pkg_dbdir").display(),
pkg_refcount_dbdir = self.root.join("pkg_refcount_dbdir").display(),
pkgtools = self.root.join("pkgtools").display(),
prefix = self.root.join("prefix").display(),
);
fs::write(self.pkgsrc().join("pkgtools/pkg_install/Makefile"), content)?;
Ok(())
}
fn write_mock_pkg_tools(&self) -> Result<()> {
let script = "#!/bin/sh\nexit 0\n";
for tool in &["pkg_add", "pkg_delete"] {
let path = self.root.join("pkgtools").join(tool);
fs::write(&path, script)?;
fs::set_permissions(&path, fs::Permissions::from_mode(0o755))?;
}
Ok(())
}
fn write_package_makefiles(&self) -> Result<()> {
let packages = [
PkgDef::new("test/base", "base-1.0").bootstrap(),
PkgDef::new("test/mid", "mid-1.0")
.depends("base>=1.0:test/base")
.usergroup("configure"),
PkgDef::new("test/also-base", "also-base-1.0").depends("base>=1.0:test/base"),
PkgDef::new("test/top", "top-1.0").depends(
"mid-[0-9]*:test/mid \
also-base-[0-9]*:test/also-base",
),
PkgDef::new("test/multi", "py313-multi-1.0").multi("PYTHON_VERSION_REQD=313"),
PkgDef::new("test/skip-me", "skip-me-1.0").skip("not supported"),
PkgDef::new("test/dep-skip", "dep-skip-1.0").depends("skip-me-[0-9]*:test/skip-me"),
PkgDef::new("test/fail-me", "fail-me-1.0").fail("known broken"),
PkgDef::new("test/dep-fail", "dep-fail-1.0").depends("fail-me-[0-9]*:test/fail-me"),
PkgDef::new("test/bad-dep", "bad-dep-1.0")
.depends("nonexistent-[0-9]*:test/nonexistent"),
PkgDef::new("test/build-fail", "build-fail-1.0")
.fail_at("configure", "configure failed"),
PkgDef::new("test/dep-bfail", "dep-bfail-1.0")
.depends("build-fail-[0-9]*:test/build-fail"),
PkgDef::new("test/fail-checksum", "fail-checksum-1.0")
.fail_at("checksum", "checksum mismatch"),
PkgDef::new("test/fail-at-build", "fail-at-build-1.0")
.fail_at("all", "compilation error"),
PkgDef::new("test/fail-install", "fail-install-1.0")
.fail_at("stage-install", "install failed: permission denied"),
PkgDef::new("test/fail-package", "fail-package-1.0")
.fail_at("stage-package-create", "pkg_create: error writing archive"),
PkgDef::new("test/chain-d", "chain-d-1.0")
.fail_at("configure", "chain-d configure failed"),
PkgDef::new("test/chain-c", "chain-c-1.0").depends("chain-d-[0-9]*:test/chain-d"),
PkgDef::new("test/chain-b", "chain-b-1.0").depends("chain-c-[0-9]*:test/chain-c"),
PkgDef::new("test/chain-a", "chain-a-1.0").depends("chain-b-[0-9]*:test/chain-b"),
];
for pkg in &packages {
self.write_pkg_makefile(pkg)?;
}
self.write_dual_variant_makefile()?;
Ok(())
}
fn write_dual_variant_makefile(&self) -> Result<()> {
let content = "\
PKGNAME=py314-dual-1.0
pbulk-index:
\t@printf 'PKGNAME=py27-dual-1.0\\nALL_DEPENDS=\\nPKG_SKIP_REASON=\\nPKG_FAIL_REASON=\\nNO_BIN_ON_FTP=\\nRESTRICTED=\\nCATEGORIES=test\\nMAINTAINER=test@example.com\\nUSE_DESTDIR=yes\\nBOOTSTRAP_PKG=\\nUSERGROUP_PHASE=\\nSCAN_DEPENDS=\\nMULTI_VERSION=PYTHON_VERSION_REQD=27\\n'
\t@printf 'PKGNAME=py314-dual-1.0\\nALL_DEPENDS=\\nPKG_SKIP_REASON=\\nPKG_FAIL_REASON=\\nNO_BIN_ON_FTP=\\nRESTRICTED=\\nCATEGORIES=test\\nMAINTAINER=test@example.com\\nUSE_DESTDIR=yes\\nBOOTSTRAP_PKG=\\nUSERGROUP_PHASE=\\nSCAN_DEPENDS=\\nMULTI_VERSION=PYTHON_VERSION_REQD=314\\n'
clean checksum configure all stage-install create-usergroup:
\t@true
stage-package-create:
\t@mkdir -p ${.CURDIR}/pkg
\t@case \"${PYTHON_VERSION_REQD}\" in \\\n\
\t27) pkgname=py27-dual-1.0 ;; \\\n\
\t*) pkgname=py314-dual-1.0 ;; \\\n\
\tesac; \\\n\
\td=$$(mktemp -d) && \\\n\
\tprintf '@name %s\\n' \"$$pkgname\" > \"$$d/+CONTENTS\" && \\\n\
\tprintf 'Test package\\n' > \"$$d/+COMMENT\" && \\\n\
\tprintf 'Test package description\\n' > \"$$d/+DESC\" && \\\n\
\tprintf '0\\n' > \"$$d/+SIZE_PKG\" && \\\n\
\tprintf 'BUILD_DATE=%s\\nCATEGORIES=test\\nMACHINE_ARCH=x86_64\\nOPSYS=Test\\nOS_VERSION=1.0\\nPKGPATH=test/dual\\nPKGTOOLS_VERSION=20210710\\n' \\\n\
\t\"$$(date '+%Y-%m-%d %H:%M:%S %z')\" > \"$$d/+BUILD_INFO\" && \\\n\
\t(cd \"$$d\" && COPYFILE_DISABLE=1 tar czf \"${.CURDIR}/pkg/$$pkgname.tgz\" \\\n\
\t+CONTENTS +COMMENT +DESC +SIZE_PKG +BUILD_INFO) && \\\n\
\trm -rf \"$$d\"
show-var:
\t@case \"${VARNAME}:${PYTHON_VERSION_REQD}\" in \\\n\
\tSTAGE_PKGFILE:27) echo \"${.CURDIR}/pkg/py27-dual-1.0.tgz\" ;; \\\n\
\tSTAGE_PKGFILE:*) echo \"${.CURDIR}/pkg/py314-dual-1.0.tgz\" ;; \\\n\
\tesac
show-vars:
\t@for v in ${VARNAMES}; do \\\n\
\t case \"$$v\" in \\\n\
\t WRKDIR) echo \"${.CURDIR}/work\" ;; \\\n\
\t *) echo ;; \\\n\
\t esac; \\\n\
\tdone
";
fs::write(self.pkgsrc().join("test/dual/Makefile"), content)?;
Ok(())
}
fn write_pkg_makefile(&self, pkg: &PkgDef) -> Result<()> {
let mut fields = vec![
format!("PKGNAME={}\\n", pkg.pkgname),
format!("PKG_LOCATION={}\\n", pkg.pkgpath),
format!("ALL_DEPENDS={}\\n", pkg.all_depends),
format!("PKG_SKIP_REASON={}\\n", pkg.skip_reason),
format!("PKG_FAIL_REASON={}\\n", pkg.fail_reason),
"NO_BIN_ON_FTP=\\n".to_string(),
"RESTRICTED=\\n".to_string(),
"CATEGORIES=test\\n".to_string(),
"MAINTAINER=test@example.com\\n".to_string(),
"USE_DESTDIR=yes\\n".to_string(),
format!("BOOTSTRAP_PKG={}\\n", pkg.bootstrap_pkg),
format!("USERGROUP_PHASE={}\\n", pkg.usergroup_phase),
"SCAN_DEPENDS=\\n".to_string(),
];
if !pkg.multi_version.is_empty() {
fields.push(format!("MULTI_VERSION={}\\n", pkg.multi_version));
}
let index_body = fields.join("");
let buildable = pkg.skip_reason.is_empty() && pkg.fail_reason.is_empty();
let build_targets = if buildable {
let all_targets = [
"clean",
"checksum",
"configure",
"all",
"stage-install",
"stage-package-create",
"create-usergroup",
];
let mut sections = Vec::new();
match pkg.fail_target {
Some((fail_tgt, msg)) => {
let pass: Vec<&str> = all_targets
.iter()
.filter(|t| **t != fail_tgt)
.copied()
.collect();
if !pass.is_empty() {
sections.push(format!("\n{targets}:\n\t@true", targets = pass.join(" "),));
}
sections.push(format!(
"\n{fail_tgt}:\n\
\t@echo '{msg}' >&2; exit 1",
));
}
None => {
let pass: Vec<&str> = all_targets
.iter()
.filter(|t| **t != "stage-package-create")
.copied()
.collect();
sections.push(format!("\n{targets}:\n\t@true", targets = pass.join(" "),));
sections.push(format!(
"\nstage-package-create:\n\
\t@mkdir -p ${{.CURDIR}}/pkg\n\
\t@d=$$(mktemp -d) && \\\n\
\tprintf '@name {pkgname}\\n' > \"$$d/+CONTENTS\" && \\\n\
\tprintf 'Test package\\n' > \"$$d/+COMMENT\" && \\\n\
\tprintf 'Test package description\\n' > \"$$d/+DESC\" && \\\n\
\tprintf '0\\n' > \"$$d/+SIZE_PKG\" && \\\n\
\tprintf 'BUILD_DATE=%s\\nCATEGORIES=test\\nMACHINE_ARCH=x86_64\\nOPSYS=Test\\nOS_VERSION=1.0\\nPKGPATH={pkgpath}\\nPKGTOOLS_VERSION=20210710\\n' \
\"$$(date '+%Y-%m-%d %H:%M:%S %z')\" > \"$$d/+BUILD_INFO\" && \\\n\
\t(cd \"$$d\" && COPYFILE_DISABLE=1 tar czf \"${{.CURDIR}}/pkg/{pkgname}.tgz\" \
+CONTENTS +COMMENT +DESC +SIZE_PKG +BUILD_INFO) && \\\n\
\trm -rf \"$$d\"",
pkgname = pkg.pkgname,
pkgpath = pkg.pkgpath,
));
}
}
sections.push(format!(
"\nshow-var:\n\
\t@case \"${{VARNAME}}\" in \
STAGE_PKGFILE) echo \
\"${{.CURDIR}}/pkg/{pkgname}.tgz\" ;; esac",
pkgname = pkg.pkgname,
));
sections.push(
"\nshow-vars:\n\
\t@for v in ${VARNAMES}; do \\\n\
\t case \"$$v\" in \\\n\
\t WRKDIR) echo \"${.CURDIR}/work\" ;; \\\n\
\t *) echo ;; \\\n\
\t esac; \\\n\
\tdone"
.to_string(),
);
sections.join("\n")
} else {
String::new()
};
let content = format!(
"PKGNAME={pkgname}\n\
\n\
pbulk-index:\n\
\t@printf '{index_body}'\n\
{build_targets}\n",
pkgname = pkg.pkgname,
);
let pkgdir = self.pkgsrc().join(pkg.pkgpath);
fs::write(pkgdir.join("Makefile"), content)?;
Ok(())
}
fn write_config_lua(&self) -> Result<()> {
let content = format!(
"\
options = {{
build_threads = 2,
dbdir = \"{dbdir}\",
scan_threads = 2,
tui = false,
}}
pkgsrc = {{
basedir = \"{pkgsrc}\",
make = \"{make}\",
}}
",
dbdir = self.dbdir().display(),
pkgsrc = self.pkgsrc().display(),
make = self.make.display(),
);
fs::write(self.config_path(), content)?;
Ok(())
}
fn load_config(&self) -> Result<Config> {
let path = self.config_path();
Config::load(Some(&path))
}
fn open_db(&self) -> Result<Database> {
Database::open(&self.dbdir())
}
fn run_state(&self) -> RunState {
RunState::new()
}
fn pkgsrc_env(&self) -> PkgsrcEnv {
PkgsrcEnv {
packages: self.packages_dir(),
pkgtools: self.root.join("pkgtools"),
prefix: self.root.join("prefix"),
pkg_dbdir: self.root.join("pkg_dbdir"),
pkg_refcount_dbdir: self.root.join("pkg_refcount_dbdir"),
metadata: std::collections::HashMap::new(),
cachevars: std::collections::HashMap::new(),
}
}
fn run_scan(&self) -> Result<ScanSummary> {
let config = self.load_config()?;
let db = self.open_db()?;
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, self.run_state());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scope)?;
scan.resolve_with_report(&db, false)
}
}
#[test]
fn test_full_tree_scan() -> Result<()> {
let h = TestHarness::new()?;
let result = h.run_scan()?;
let c = result.counts();
assert_eq!(
c.buildable,
17,
"expected 17 buildable, got {} (total packages: {})",
c.buildable,
result.packages.len()
);
assert_eq!(c.states[PreSkipped], 1, "expected 1 pkg_skip");
assert_eq!(c.states[PreFailed], 1, "expected 1 pkg_fail");
assert_eq!(
c.states[IndirectPreSkipped], 1,
"expected 1 indirect_preskip"
);
assert_eq!(
c.states[IndirectPreFailed], 1,
"expected 1 indirect_prefail"
);
assert_eq!(c.states[Unresolved], 1, "expected 1 unresolved");
assert_eq!(result.packages.len(), 22, "expected 22 total packages");
for pkg in &result.packages {
match pkg.pkgpath().as_path().to_string_lossy().as_ref() {
"test/skip-me" => {
if let bob::ScanResult::Skipped { state, .. } = pkg {
assert!(
matches!(state, PackageState::PreSkipped(_)),
"skip-me should be PreSkipped, got {:?}",
state
);
} else {
panic!("skip-me should be Skipped");
}
}
"test/dep-skip" => {
if let bob::ScanResult::Skipped { state, .. } = pkg {
assert!(
matches!(state, PackageState::IndirectPreSkipped(_)),
"dep-skip should be IndirectPreSkipped, got {:?}",
state
);
} else {
panic!("dep-skip should be Skipped");
}
}
"test/fail-me" => {
if let bob::ScanResult::Skipped { state, .. } = pkg {
assert!(
matches!(state, PackageState::PreFailed(_)),
"fail-me should be PreFailed, got {:?}",
state
);
} else {
panic!("fail-me should be Skipped");
}
}
"test/dep-fail" => {
if let bob::ScanResult::Skipped { state, .. } = pkg {
assert!(
matches!(state, PackageState::IndirectPreFailed(_)),
"dep-fail should be IndirectPreFailed, got {:?}",
state
);
} else {
panic!("dep-fail should be Skipped");
}
}
"test/bad-dep" => {
if let bob::ScanResult::Skipped { state, .. } = pkg {
assert!(
matches!(state, PackageState::Unresolved(_)),
"bad-dep should be Unresolved, got {:?}",
state
);
} else {
panic!("bad-dep should be Skipped");
}
}
_ => {}
}
}
Ok(())
}
#[test]
fn test_limited_scan() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state);
let mut scan = Scan::new(&config);
let top = pkgsrc::PkgPath::new("test/top")?;
scan.add(&top);
scan.init_from_db(&db)?;
scan.start(&db, &mut scope)?;
let result = scan.resolve_with_report(&db, false)?;
assert_eq!(
result.packages.len(),
4,
"expected 4 packages from limited scan of test/top, got {}",
result.packages.len()
);
let c = result.counts();
assert_eq!(c.buildable, 4, "all 4 packages should be buildable");
assert_eq!(c.states[PreSkipped], 0);
assert_eq!(c.states[PreFailed], 0);
assert_eq!(c.states[Unresolved], 0);
let pkgpaths: Vec<String> = result
.packages
.iter()
.map(|p| p.pkgpath().as_path().to_string_lossy().to_string())
.collect();
assert!(pkgpaths.contains(&"test/top".to_string()));
assert!(pkgpaths.contains(&"test/mid".to_string()));
assert!(pkgpaths.contains(&"test/also-base".to_string()));
assert!(pkgpaths.contains(&"test/base".to_string()));
Ok(())
}
#[test]
fn test_full_build() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scan_scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scan_scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let build_result = build.start(&state, &db)?;
let bc = build_result.counts();
assert_eq!(
bc.states[Success], 7,
"expected 7 successful builds, got {}",
bc.states[Success]
);
assert_eq!(
bc.states[Failed], 6,
"expected 6 failed builds, got {}",
bc.states[Failed]
);
let indirect_failed = bc.states[IndirectFailed];
assert_eq!(
indirect_failed, 4,
"expected 4 indirect-failed skips, got {}",
indirect_failed
);
for r in &build_result.results {
let name = r.pkgname.pkgname();
match name {
"base-1.0" | "mid-1.0" | "also-base-1.0" | "top-1.0" | "py313-multi-1.0" => {
assert!(
matches!(r.state, PackageState::Success),
"{} should be Success, got {:?}",
name,
r.state
);
}
"build-fail-1.0" | "fail-checksum-1.0" | "fail-at-build-1.0" | "fail-install-1.0"
| "fail-package-1.0" | "chain-d-1.0" => {
assert!(
matches!(r.state, PackageState::Failed(_)),
"{} should be Failed, got {:?}",
name,
r.state
);
}
"dep-bfail-1.0" | "chain-c-1.0" | "chain-b-1.0" | "chain-a-1.0" => {
assert!(
matches!(r.state, PackageState::IndirectFailed(_)),
"{} should be Skipped(IndirectFailed), got {:?}",
name,
r.state
);
}
_ => {}
}
}
let packages_all = h.packages_dir().join("All");
for name in &[
"base-1.0",
"mid-1.0",
"also-base-1.0",
"top-1.0",
"py313-multi-1.0",
] {
let tgz = packages_all.join(format!("{}.tgz", name));
assert!(tgz.exists(), "Package file {} should exist", tgz.display());
}
for name in &[
"build-fail-1.0",
"fail-checksum-1.0",
"fail-at-build-1.0",
"fail-install-1.0",
"fail-package-1.0",
"chain-d-1.0",
] {
let tgz = packages_all.join(format!("{}.tgz", name));
assert!(
!tgz.exists(),
"Failed package {} should not exist",
tgz.display()
);
}
for name in &["build-fail-1.0", "fail-checksum-1.0", "fail-at-build-1.0"] {
let fail_logdir = h.logdir().join(name);
assert!(
fail_logdir.exists(),
"Log dir for {} should exist at {}",
name,
fail_logdir.display()
);
}
Ok(())
}
#[test]
fn test_scan_database_caching() -> Result<()> {
let h = TestHarness::new()?;
let result1 = h.run_scan()?;
assert_eq!(result1.packages.len(), 22);
let config = h.load_config()?;
let db = h.open_db()?;
let mut scan2 = Scan::new(&config);
let (cached_count, _pending) = scan2.init_from_db(&db)?;
assert_eq!(
cached_count, 21,
"second scan should find 21 cached package paths, got {}",
cached_count
);
scan2.set_full_scan_complete();
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state);
scan2.start(&db, &mut scope)?;
let result2 = scan2.resolve_with_report(&db, false)?;
assert_eq!(
result2.packages.len(),
22,
"cached resolve should produce same result"
);
Ok(())
}
#[test]
fn test_build_bootstrap_skips_deinstall() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scan_scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scan_scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let scanpkgs = scan_result
.buildable()
.filter(|p| p.pkgpath.as_path().to_string_lossy() == "test/base")
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let build_result = build.start(&state, &db)?;
assert_eq!(build_result.counts().states[Success], 1);
let base_logdir = h.logdir().join("base-1.0");
let deinstall_log = base_logdir.join("deinstall.log");
assert!(
!deinstall_log.exists(),
"Bootstrap package should not have deinstall.log (log dir removed on success)"
);
Ok(())
}
#[test]
fn test_scan_resume() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = bob::RunState::new();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
let state_clone = state.clone();
let _trigger = std::thread::spawn(move || {
std::thread::sleep(std::time::Duration::from_millis(200));
state_clone.shutdown();
});
let _ = scan.start(&db, &mut scope);
let scanned = db.get_scanned_pkgpaths()?;
let state2 = h.run_state();
let sandbox2 = Sandbox::new(&config);
let mut scope2 = SandboxScope::new(sandbox2, state2);
let mut scan2 = Scan::new(&config);
let (cached, _) = scan2.init_from_db(&db)?;
assert_eq!(
cached,
scanned.len(),
"cached count should match scanned pkgpaths"
);
scan2.start(&db, &mut scope2)?;
let result = scan2.resolve_with_report(&db, false)?;
assert!(
result.packages.len() >= 22,
"resumed scan should produce at least 22 packages, got {}",
result.packages.len()
);
let c = result.counts();
assert_eq!(c.buildable, 17, "expected 17 buildable after resume");
Ok(())
}
fn run_scan_and_build(h: &TestHarness) -> Result<(Database, ScanSummary, bob::BuildSummary)> {
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scan_scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scan_scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let build_result = build.start(&state, &db)?;
Ok((db, scan_result, build_result))
}
#[test]
fn test_cached_build_resume() -> Result<()> {
let h = TestHarness::new()?;
let (_, scan_result, _) = run_scan_and_build(&h)?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let mut build2 = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let cached = build2.load_cached_from_db(&db)?;
assert_eq!(cached, 17, "expected 17 cached results, got {}", cached);
let result2 = build2.start(&state, &db)?;
assert!(
result2.results.is_empty(),
"second build should produce no new results (all cached), got {}",
result2.results.len()
);
Ok(())
}
#[test]
fn test_build_results_in_db() -> Result<()> {
let h = TestHarness::new()?;
let (db, _, _) = run_scan_and_build(&h)?;
let base = db
.get_package_by_name("base-1.0")?
.expect("base-1.0 should exist");
let base_result = db
.get_build_result(base.id)?
.expect("base-1.0 should have a build result");
assert!(
matches!(base_result.state, PackageState::Success),
"base-1.0 should be Success, got {:?}",
base_result.state
);
let bf = db
.get_package_by_name("build-fail-1.0")?
.expect("build-fail-1.0 should exist");
let bf_result = db
.get_build_result(bf.id)?
.expect("build-fail-1.0 should have a build result");
assert!(
matches!(bf_result.state, PackageState::Failed(_)),
"build-fail-1.0 should be Failed, got {:?}",
bf_result.state
);
let all = db.get_all_build_results()?;
assert!(
all.len() >= 14,
"expected at least 14 build results, got {}",
all.len()
);
let failed = db.get_failed_packages()?;
assert!(
failed.contains(&"build-fail-1.0".to_string()),
"failed list should contain build-fail-1.0"
);
assert!(db.delete_build_by_name("base-1.0")?);
assert!(!db.delete_build_by_name("base-1.0")?);
let del_count = db.delete_build_by_pkgpath("test/build-fail")?;
assert_eq!(del_count, 1, "expected 1 deleted, got {}", del_count);
let cleared = db.clear_builds()?;
assert!(cleared >= 1, "expected at least 1 cleared, got {}", cleared);
let remaining = db.get_all_build_results()?;
assert!(remaining.is_empty(), "expected 0 after clear_builds");
Ok(())
}
#[test]
fn test_strict_scan() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state);
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scope)?;
let result = scan.resolve_with_report(&db, true);
assert!(
result.is_err(),
"strict scan should fail with unresolved deps"
);
let err_msg = format!("{}", result.expect_err("expected error"));
assert!(
err_msg.contains("strict_scan"),
"error should mention strict_scan, got: {}",
err_msg
);
Ok(())
}
#[test]
fn test_pkgsrc_env_persistence() -> Result<()> {
let h = TestHarness::new()?;
let db = h.open_db()?;
assert!(!db.full_scan_complete());
db.set_full_scan_complete()?;
assert!(db.full_scan_complete());
db.clear_full_scan_complete()?;
assert!(!db.full_scan_complete());
let env = h.pkgsrc_env();
db.store_pkgsrc_env(&env)?;
let loaded = db.load_pkgsrc_env()?;
assert_eq!(loaded.packages, env.packages);
assert_eq!(loaded.pkgtools, env.pkgtools);
assert_eq!(loaded.prefix, env.prefix);
assert_eq!(loaded.pkg_dbdir, env.pkg_dbdir);
assert_eq!(loaded.pkg_refcount_dbdir, env.pkg_refcount_dbdir);
let dup = db.store_pkgsrc_env(&env);
assert!(dup.is_err(), "duplicate store_pkgsrc_env should fail");
Ok(())
}
#[test]
fn test_config_options_and_environment() -> Result<()> {
let h = TestHarness::new()?;
let content = format!(
"\
options = {{
build_threads = 4,
dbdir = \"{dbdir}\",
scan_threads = 3,
strict_scan = true,
}}
pkgsrc = {{
basedir = \"{pkgsrc}\",
make = \"{make}\",
cachevars = {{ \"NATIVE_OPSYS\" }},
save_wrkdir_patterns = {{ \"**/config.log\" }},
}}
sandboxes = {{
basedir = \"{dbdir}/sandboxes\",
environment = {{
build = {{
clear = true,
inherit = {{ \"TERM\", \"HOME\" }},
vars = {{ PATH = \"/sbin:/bin\", LC_ALL = \"C\" }},
}},
dev = {{
clear = true,
inherit = {{ \"TERM\", \"HOME\" }},
vars = {{ BINPKG_SITES = \"$bob_packages\" }},
}},
}},
}}
",
dbdir = h.dbdir().display(),
pkgsrc = h.pkgsrc().display(),
make = h.make.display(),
);
fs::write(h.config_path(), content)?;
let config = h.load_config()?;
assert_eq!(config.build_threads(), 4);
assert_eq!(config.scan_threads(), 3);
assert!(config.strict_scan());
assert_eq!(config.cachevars(), &["NATIVE_OPSYS"]);
assert_eq!(config.save_wrkdir_patterns(), &["**/config.log"]);
let env = config
.environment()
.expect("environment section should exist");
let build = env.build.as_ref().expect("environment.build should exist");
assert!(build.clear);
assert_eq!(build.inherit, vec!["TERM", "HOME"]);
let mut expected_build_vars = HashMap::new();
expected_build_vars.insert("PATH".to_string(), "/sbin:/bin".to_string());
expected_build_vars.insert("LC_ALL".to_string(), "C".to_string());
assert_eq!(build.vars, expected_build_vars);
let dev = env.dev.as_ref().expect("environment.dev should exist");
assert!(dev.clear);
assert_eq!(dev.inherit, vec!["TERM", "HOME"]);
let mut expected_dev_vars = HashMap::new();
expected_dev_vars.insert("BINPKG_SITES".to_string(), "$bob_packages".to_string());
assert_eq!(dev.vars, expected_dev_vars);
Ok(())
}
#[test]
fn test_config_validation() -> Result<()> {
let h = TestHarness::new()?;
let content = format!(
"\
pkgsrc = {{
basedir = \"{pkgsrc}\",
make = \"/nonexistent/bmake\",
}}
",
pkgsrc = h.pkgsrc().display(),
);
fs::write(h.config_path(), &content)?;
let config = h.load_config()?;
let result = config.validate();
assert!(result.is_err(), "validate should fail with bad make path");
let errors = result.expect_err("expected validation errors");
assert!(
errors.iter().any(|e| e.contains("make")),
"errors should mention make: {:?}",
errors
);
let content = format!(
"\
options = {{
dbdir = \"/nonexistent/parent/db\",
}}
pkgsrc = {{
basedir = \"{pkgsrc}\",
make = \"{make}\",
}}
",
pkgsrc = h.pkgsrc().display(),
make = h.make.display(),
);
fs::write(h.config_path(), &content)?;
let config = h.load_config()?;
let result = config.validate();
assert!(result.is_err(), "validate should fail with bad dbdir");
let errors = result.expect_err("expected validation errors");
assert!(
errors.iter().any(|e| e.contains("dbdir")),
"errors should mention dbdir: {:?}",
errors
);
Ok(())
}
#[test]
fn test_scan_failure_handling() -> Result<()> {
let h = TestHarness::new()?;
let scan_fail_dir = h.pkgsrc().join("test/scan-fail");
fs::create_dir_all(&scan_fail_dir)?;
let makefile = "\
PKGNAME=scan-fail-1.0
pbulk-index:
\t@echo 'fatal error' >&2; exit 1
";
fs::write(scan_fail_dir.join("Makefile"), makefile)?;
let cat_content = "\
show-subdir-var:
\t@echo \"base mid also-base top multi dual skip-me dep-skip fail-me dep-fail bad-dep build-fail dep-bfail fail-checksum fail-at-build fail-install fail-package chain-a chain-b chain-c chain-d scan-fail\"
";
fs::write(h.pkgsrc().join("test/Makefile"), cat_content)?;
let result = h.run_scan()?;
let c = result.counts();
assert_eq!(c.scanfail, 1, "expected 1 scanfail, got {}", c.scanfail);
assert_eq!(c.buildable, 17, "buildable count should remain 17");
let scan_fail_found = result.packages.iter().any(|p| {
matches!(p, bob::ScanResult::ScanFail { .. })
&& p.pkgpath().as_path().to_string_lossy() == "test/scan-fail"
});
assert!(scan_fail_found, "scan-fail should appear as ScanFail");
Ok(())
}
#[test]
fn test_build_failure_at_each_phase() -> Result<()> {
let h = TestHarness::new()?;
let (_, _, build_result) = run_scan_and_build(&h)?;
let outcomes: HashMap<&str, &PackageState> = build_result
.results
.iter()
.map(|r| (r.pkgname.pkgname(), &r.state))
.collect();
let expected_failures = [
"fail-checksum-1.0",
"fail-at-build-1.0",
"fail-install-1.0",
"fail-package-1.0",
];
for name in &expected_failures {
let outcome = outcomes
.get(name)
.unwrap_or_else(|| panic!("{} should have a build result", name));
assert!(
matches!(outcome, PackageState::Failed(_)),
"{} should be Failed, got {:?}",
name,
outcome
);
}
Ok(())
}
#[test]
fn test_build_logs() -> Result<()> {
let h = TestHarness::new()?;
run_scan_and_build(&h)?;
for name in &["base-1.0", "mid-1.0", "top-1.0"] {
let logdir = h.logdir().join(name);
assert!(
!logdir.exists(),
"Successful build {} should not have log dir at {}",
name,
logdir.display()
);
}
let bf_log = h.logdir().join("build-fail-1.0");
assert!(bf_log.exists(), "build-fail log dir should exist");
assert!(
bf_log.join("pre-clean.log").exists(),
"build-fail should have pre-clean.log"
);
assert!(
bf_log.join("configure.log").exists(),
"build-fail should have configure.log"
);
assert!(
!bf_log.join("build.log").exists(),
"build-fail should not have build.log (configure failed first)"
);
let fc_log = h.logdir().join("fail-checksum-1.0");
assert!(fc_log.exists(), "fail-checksum log dir should exist");
assert!(
fc_log.join("pre-clean.log").exists(),
"fail-checksum should have pre-clean.log"
);
assert!(
fc_log.join("checksum.log").exists(),
"fail-checksum should have checksum.log"
);
assert!(
!fc_log.join("configure.log").exists(),
"fail-checksum should not have configure.log"
);
let fab_log = h.logdir().join("fail-at-build-1.0");
assert!(fab_log.exists(), "fail-at-build log dir should exist");
assert!(
fab_log.join("configure.log").exists(),
"fail-at-build should have configure.log"
);
assert!(
fab_log.join("build.log").exists(),
"fail-at-build should have build.log"
);
assert!(
!fab_log.join("install.log").exists(),
"fail-at-build should not have install.log"
);
let fi_log = h.logdir().join("fail-install-1.0");
assert!(fi_log.exists(), "fail-install log dir should exist");
assert!(
fi_log.join("build.log").exists(),
"fail-install should have build.log"
);
assert!(
fi_log.join("install.log").exists(),
"fail-install should have install.log"
);
let fp_log = h.logdir().join("fail-package-1.0");
assert!(fp_log.exists(), "fail-package log dir should exist");
assert!(
fp_log.join("install.log").exists(),
"fail-package should have install.log"
);
assert!(
fp_log.join("package.log").exists(),
"fail-package should have package.log"
);
Ok(())
}
#[test]
fn test_cascading_failure_chain() -> Result<()> {
let h = TestHarness::new()?;
let (_, _, build_result) = run_scan_and_build(&h)?;
let outcomes: HashMap<&str, &PackageState> = build_result
.results
.iter()
.map(|r| (r.pkgname.pkgname(), &r.state))
.collect();
let chain_d = outcomes.get("chain-d-1.0").expect("chain-d should exist");
assert!(
matches!(chain_d, PackageState::Failed(_)),
"chain-d should be Failed, got {:?}",
chain_d
);
for name in &["chain-c-1.0", "chain-b-1.0", "chain-a-1.0"] {
let outcome = outcomes
.get(name)
.unwrap_or_else(|| panic!("{} should exist", name));
assert!(
matches!(outcome, PackageState::IndirectFailed(_)),
"{} should be IndirectFailed, got {:?}",
name,
outcome
);
if let PackageState::IndirectFailed(msg) = outcome {
assert!(
msg.contains("chain-d"),
"{} IndirectFailed reason should mention chain-d, got: {}",
name,
msg
);
}
}
Ok(())
}
#[test]
fn test_build_order() -> Result<()> {
let h = TestHarness::new()?;
let (_, _, build_result) = run_scan_and_build(&h)?;
let positions: HashMap<&str, usize> = build_result
.results
.iter()
.enumerate()
.map(|(i, r)| (r.pkgname.pkgname(), i))
.collect();
if let (Some(&base), Some(&mid)) = (positions.get("base-1.0"), positions.get("mid-1.0")) {
assert!(
base < mid,
"base ({}) should complete before mid ({})",
base,
mid
);
}
if let (Some(&base), Some(&also)) = (positions.get("base-1.0"), positions.get("also-base-1.0"))
{
assert!(
base < also,
"base ({}) should complete before also-base ({})",
base,
also
);
}
if let (Some(&mid), Some(&top)) = (positions.get("mid-1.0"), positions.get("top-1.0")) {
assert!(
mid < top,
"mid ({}) should complete before top ({})",
mid,
top
);
}
if let (Some(&also), Some(&top)) = (positions.get("also-base-1.0"), positions.get("top-1.0")) {
assert!(
also < top,
"also-base ({}) should complete before top ({})",
also,
top
);
}
Ok(())
}
#[test]
fn test_limited_build() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scan_scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
let top = pkgsrc::PkgPath::new("test/top")?;
scan.add(&top);
scan.init_from_db(&db)?;
scan.start(&db, &mut scan_scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
assert_eq!(scan_result.packages.len(), 4);
let c = scan_result.counts();
assert_eq!(c.buildable, 4);
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let build_result = build.start(&state, &db)?;
let bc = build_result.counts();
assert_eq!(
bc.states[Success], 4,
"expected 4 successful builds, got {}",
bc.states[Success]
);
assert_eq!(
bc.states[Failed], 0,
"expected 0 failures, got {}",
bc.states[Failed]
);
let built: Vec<&str> = build_result
.results
.iter()
.map(|r| r.pkgname.pkgname())
.collect();
assert!(built.contains(&"base-1.0"));
assert!(built.contains(&"mid-1.0"));
assert!(built.contains(&"also-base-1.0"));
assert!(built.contains(&"top-1.0"));
assert_eq!(
build_result.results.len(),
4,
"only 4 packages should have build results"
);
Ok(())
}
#[test]
fn test_pkg_up_to_date_not_found() -> Result<()> {
let h = TestHarness::new()?;
let result = bob::pkg_up_to_date(
"nonexistent-1.0",
&[],
&h.packages_dir().join("All"),
&h.pkgsrc(),
)?;
assert!(
matches!(result, Some(bob::BuildReason::PackageNotFound)),
"nonexistent package should be PackageNotFound, got {:?}",
result
);
Ok(())
}
#[test]
fn test_build_resume_no_new_work() -> Result<()> {
let h = TestHarness::new()?;
let (_, scan_result, first_build) = run_scan_and_build(&h)?;
assert_eq!(first_build.counts().states[Success], 7);
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let mut build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let cached = build.load_cached_from_db(&db)?;
assert_eq!(cached, 17, "all 17 buildable should be cached");
let result = build.start(&state, &db)?;
assert!(
result.results.is_empty(),
"second build should produce no new results, got {}",
result.results.len()
);
Ok(())
}
#[test]
fn test_single_threaded_build() -> Result<()> {
let h = TestHarness::new()?;
let content = format!(
"\
options = {{
build_threads = 1,
dbdir = \"{dbdir}\",
scan_threads = 1,
tui = false,
}}
pkgsrc = {{
basedir = \"{pkgsrc}\",
make = \"{make}\",
}}
",
dbdir = h.dbdir().display(),
pkgsrc = h.pkgsrc().display(),
make = h.make.display(),
);
fs::write(h.config_path(), content)?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scan_scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scan_scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let build_result = build.start(&state, &db)?;
let bc = build_result.counts();
assert_eq!(bc.states[Success], 7, "expected 7 successful builds");
assert_eq!(bc.states[Failed], 6, "expected 6 failed builds");
assert_eq!(bc.states[IndirectFailed], 4, "expected 4 indirect-failed");
Ok(())
}
#[test]
fn test_rebuild_after_clear() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, first) = run_scan_and_build(&h)?;
assert_eq!(first.counts().states[Success], 7);
let cleared = db.clear_builds()?;
assert!(cleared > 0, "should clear some builds");
let config = h.load_config()?;
let state = h.run_state();
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let mut build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let cached = build.load_cached_from_db(&db)?;
assert_eq!(cached, 0, "no cached results after clear");
let rebuild_result = build.start(&state, &db)?;
assert_eq!(
rebuild_result.counts().states[Success],
7,
"rebuild should succeed for same 7 packages"
);
Ok(())
}
#[test]
fn test_selective_rebuild_after_failure() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, first) = run_scan_and_build(&h)?;
let failed = db.get_failed_packages()?;
assert!(
failed.contains(&"build-fail-1.0".to_string()),
"build-fail should be in failed list"
);
assert!(db.delete_build_by_name("build-fail-1.0")?);
assert!(db.delete_build_by_name("dep-bfail-1.0")?);
h.write_pkg_makefile(&PkgDef::new("test/build-fail", "build-fail-1.0"))?;
let config = h.load_config()?;
let state = h.run_state();
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let mut build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let cached = build.load_cached_from_db(&db)?;
assert!(
cached < first.results.len(),
"should have fewer cached than total: cached={}, total={}",
cached,
first.results.len()
);
let result = build.start(&state, &db)?;
let bf_result = result
.results
.iter()
.find(|r| r.pkgname.pkgname() == "build-fail-1.0");
assert!(
bf_result.is_some(),
"build-fail should have a new build result"
);
assert!(
matches!(bf_result.map(|r| &r.state), Some(PackageState::Success)),
"fixed build-fail should succeed, got {:?}",
bf_result.map(|r| &r.state)
);
Ok(())
}
#[test]
fn test_multi_version_package() -> Result<()> {
let h = TestHarness::new()?;
let (_, _, build_result) = run_scan_and_build(&h)?;
let multi = build_result
.results
.iter()
.find(|r| r.pkgname.pkgname() == "py313-multi-1.0");
assert!(multi.is_some(), "py313-multi should have a build result");
assert!(
matches!(multi.map(|r| &r.state), Some(PackageState::Success)),
"py313-multi should succeed"
);
let tgz = h.packages_dir().join("All").join("py313-multi-1.0.tgz");
assert!(tgz.exists(), "py313-multi package should exist");
Ok(())
}
#[test]
fn test_multi_version_multiple_records_build_all_variants() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let dual_pkgs: Vec<_> = scan_result
.buildable()
.filter(|p| p.pkgpath.as_path().to_string_lossy() == "test/dual")
.map(|p| p.pkgname().pkgname().to_string())
.collect();
assert_eq!(
dual_pkgs,
vec!["py27-dual-1.0".to_string(), "py314-dual-1.0".to_string()],
"expected both dual variants in resolved buildable set"
);
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let build_result = build.start(&state, &db)?;
for pkgname in ["py27-dual-1.0", "py314-dual-1.0"] {
let built = build_result
.results
.iter()
.find(|r| r.pkgname.pkgname() == pkgname);
assert!(built.is_some(), "{pkgname} should be in build results");
assert!(
matches!(built.map(|r| &r.state), Some(PackageState::Success)),
"{pkgname} should build successfully"
);
assert!(
h.packages_dir()
.join("All")
.join(format!("{pkgname}.tgz"))
.exists(),
"{pkgname} package file should exist"
);
}
let all_status = db.get_all_package_status(true)?;
let lingering_pending: Vec<_> = all_status
.into_iter()
.filter(|p| {
p.pkgpath == "test/dual" && p.build_outcome.is_none() && p.build_reason.is_none()
})
.map(|p| p.pkgname)
.collect();
assert!(
lingering_pending.is_empty(),
"dual variants should not remain blank pending: {:?}",
lingering_pending
);
Ok(())
}
#[test]
fn test_cli_scan_marks_multi_version_variants_up_to_date() -> Result<()> {
let h = TestHarness::new()?;
run_scan_and_build(&h)?;
let db_path = h.dbdir().join("bob.db");
if db_path.exists() {
fs::remove_file(&db_path)?;
}
let bob = env!("CARGO_BIN_EXE_bob");
let scan = Command::new(bob)
.arg("-c")
.arg(h.config_path())
.arg("scan")
.output()?;
assert!(
scan.status.success(),
"bob scan failed:\nstdout:\n{}\nstderr:\n{}",
String::from_utf8_lossy(&scan.stdout),
String::from_utf8_lossy(&scan.stderr)
);
let status = Command::new(bob)
.arg("-c")
.arg(h.config_path())
.arg("status")
.output()?;
assert!(
status.status.success(),
"bob status failed:\nstdout:\n{}\nstderr:\n{}",
String::from_utf8_lossy(&status.stdout),
String::from_utf8_lossy(&status.stderr)
);
let db = h.open_db()?;
for pkgname in ["py27-dual-1.0", "py314-dual-1.0"] {
let pkg = db
.get_package_by_name(pkgname)?
.unwrap_or_else(|| panic!("{pkgname} missing from database"));
let result = db
.get_build_result(pkg.id)?
.unwrap_or_else(|| panic!("{pkgname} missing build result"));
assert!(
matches!(result.state, PackageState::UpToDate),
"{pkgname} should be up-to-date after cli scan, got {:?}",
result.state
);
}
let status_stdout = String::from_utf8_lossy(&status.stdout);
assert!(
!status_stdout.contains("py27-dual-1.0"),
"py27-dual should not remain visible in status output:\n{status_stdout}"
);
assert!(
!status_stdout.contains("py314-dual-1.0"),
"py314-dual should not remain visible in status output:\n{status_stdout}"
);
Ok(())
}
#[test]
fn test_unselected_packages_are_hidden_from_status_queries() -> Result<()> {
let h = TestHarness::new()?;
let (db, _, _) = run_scan_and_build(&h)?;
let extra: pkgsrc::ScanIndex = "\
PKGNAME=extra-1.0
PKG_LOCATION=test/extra
ALL_DEPENDS=
PKG_SKIP_REASON=
PKG_FAIL_REASON=
NO_BIN_ON_FTP=
RESTRICTED=
CATEGORIES=test
MAINTAINER=test@example.com
USE_DESTDIR=yes
BOOTSTRAP_PKG=
USERGROUP_PHASE=
SCAN_DEPENDS=
"
.parse()?;
db.store_package("test/extra", &extra)?;
let status_rows = db.get_all_package_status(true)?;
assert!(
!status_rows.iter().any(|p| p.pkgname == "extra-1.0"),
"unselected package should not appear in status rows"
);
let buildable = db.get_buildable_packages()?;
assert!(
!buildable.iter().any(|p| p.pkgname == "extra-1.0"),
"unselected package should not appear in buildable rows"
);
let loaded = db.load_resolved_packages()?;
assert!(
!loaded.iter().any(|p| p.pkgname().pkgname() == "extra-1.0"),
"unselected package should not appear in resolved rebuild set"
);
Ok(())
}
#[test]
fn test_usergroup_phase_package() -> Result<()> {
let h = TestHarness::new()?;
let (_, scan_result, _) = run_scan_and_build(&h)?;
let mid = scan_result
.buildable()
.find(|p| p.pkgpath.as_path().to_string_lossy() == "test/mid");
assert!(mid.is_some(), "mid should be in scan results");
let mid = mid.expect("mid exists");
assert_eq!(
mid.usergroup_phase(),
Some("configure"),
"mid should have USERGROUP_PHASE=configure"
);
Ok(())
}
#[test]
fn test_load_resolved_packages_matches_scan() -> Result<()> {
let h = TestHarness::new()?;
let scan_result = h.run_scan()?;
let db = h.open_db()?;
let to_depset = |name: &str, deps: &[pkgsrc::PkgName]| -> (String, Vec<String>) {
let mut v: Vec<String> = deps.iter().map(|d| d.pkgname().to_string()).collect();
v.sort();
(name.to_string(), v)
};
let expected: HashMap<String, Vec<String>> = scan_result
.buildable()
.map(|p| to_depset(p.pkgname().pkgname(), p.depends()))
.collect();
let loaded = db.load_resolved_packages()?;
let actual: HashMap<String, Vec<String>> = loaded
.iter()
.map(|p| to_depset(p.pkgname().pkgname(), p.depends()))
.collect();
assert_eq!(expected, actual);
Ok(())
}
#[test]
fn test_build_durations() -> Result<()> {
let h = TestHarness::new()?;
let (_, _, build_result) = run_scan_and_build(&h)?;
for r in &build_result.results {
match &r.state {
PackageState::Success | PackageState::Failed(_) => {
assert!(
r.build_stats.duration.as_nanos() > 0,
"{} should have non-zero duration",
r.pkgname.pkgname()
);
}
PackageState::IndirectFailed(_) => {
assert_eq!(
r.build_stats.duration,
std::time::Duration::ZERO,
"{} indirect failure should have zero duration",
r.pkgname.pkgname()
);
}
_ => {}
}
}
Ok(())
}
fn pkg_summary_gz_mtime(h: &TestHarness) -> Option<std::time::SystemTime> {
let path = h.packages_dir().join("All").join("pkg_summary.gz");
fs::metadata(path).ok().and_then(|m| m.modified().ok())
}
fn read_pkg_summary_gz(h: &TestHarness) -> Result<String> {
use flate2::read::GzDecoder;
use std::io::Read;
let path = h.packages_dir().join("All").join("pkg_summary.gz");
let file =
fs::File::open(&path).with_context(|| format!("Failed to open {}", path.display()))?;
let mut decoder = GzDecoder::new(file);
let mut content = String::new();
decoder.read_to_string(&mut content)?;
Ok(content)
}
fn count_pkg_summary_entries(content: &str) -> usize {
content
.split("\n\n")
.filter(|block| block.starts_with("PKGNAME=") || block.contains("\nPKGNAME="))
.count()
}
fn maybe_generate(db: &Database, prior: &[String], summary: &bob::BuildSummary) -> Result<bool> {
let current = db.get_successful_packages()?;
if prior != current || summary.counts().states[Success] > 0 {
bob::generate_pkg_summary(db, 2)?;
Ok(true)
} else {
Ok(false)
}
}
fn run_cached_build(
h: &TestHarness,
db: &Database,
scan_result: &ScanSummary,
) -> Result<bob::BuildSummary> {
let config = h.load_config()?;
let state = h.run_state();
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let sandbox = Sandbox::new(&config);
let scope = SandboxScope::new(sandbox, state.clone());
let mut build = Build::new(&config, pkgsrc_env, scope, scanpkgs);
build.load_cached_from_db(db)?;
build.start(&state, db)
}
#[test]
fn test_pkg_summary_generated_after_first_build() -> Result<()> {
let h = TestHarness::new()?;
let (db, _, summary) = run_scan_and_build(&h)?;
let prior: Vec<String> = vec![];
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(generated, "should generate after first build");
assert!(h.packages_dir().join("All/pkg_summary.gz").exists());
assert!(h.packages_dir().join("All/pkg_summary.zst").exists());
let content = read_pkg_summary_gz(&h)?;
assert!(!content.is_empty(), "pkg_summary.gz should have content");
let successful = db.get_successful_packages()?;
let entry_count = count_pkg_summary_entries(&content);
assert_eq!(
entry_count,
successful.len(),
"pkg_summary should have one entry per successful package"
);
for pkgname in &successful {
assert!(
content.contains(&format!("PKGNAME={}", pkgname)),
"pkg_summary should contain entry for {}",
pkgname
);
}
Ok(())
}
#[test]
fn test_pkg_summary_skipped_when_up_to_date() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("pkg_summary.gz should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
let prior = db.get_successful_packages()?;
let summary = run_cached_build(&h, &db, &scan_result)?;
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(!generated, "should not regenerate when all up-to-date");
assert_eq!(
pkg_summary_gz_mtime(&h).expect("should exist"),
mtime_before,
"pkg_summary.gz mtime should be unchanged"
);
Ok(())
}
#[test]
fn test_pkg_summary_skipped_when_rebuild_fails_again() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("pkg_summary.gz should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
let prior = db.get_successful_packages()?;
db.delete_build_by_name("build-fail-1.0")?;
let summary = run_cached_build(&h, &db, &scan_result)?;
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(
!generated,
"should not regenerate when failed rebuild fails again"
);
assert_eq!(
pkg_summary_gz_mtime(&h).expect("should exist"),
mtime_before,
);
Ok(())
}
#[test]
fn test_pkg_summary_regenerated_when_rebuild_succeeds() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("pkg_summary.gz should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
h.write_pkg_makefile(&PkgDef::new("test/build-fail", "build-fail-1.0"))?;
let prior = db.get_successful_packages()?;
assert!(
!prior.contains(&"build-fail-1.0".to_string()),
"build-fail should not be in successful set before fix"
);
db.delete_build_by_name("build-fail-1.0")?;
db.delete_build_by_name("dep-bfail-1.0")?;
let summary = run_cached_build(&h, &db, &scan_result)?;
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(
generated,
"should regenerate when failed package now succeeds"
);
let current = db.get_successful_packages()?;
assert!(current.contains(&"build-fail-1.0".to_string()));
let mtime_after = pkg_summary_gz_mtime(&h).expect("should exist");
assert!(
mtime_after > mtime_before,
"pkg_summary.gz should have newer mtime than before rebuild"
);
let content = read_pkg_summary_gz(&h)?;
assert!(
content.contains("PKGNAME=build-fail-1.0"),
"pkg_summary should now include the fixed package"
);
let entry_count = count_pkg_summary_entries(&content);
assert_eq!(
entry_count,
current.len(),
"pkg_summary should have one entry per successful package"
);
let unchanged_tgz = h.packages_dir().join("All").join("base-1.0.tgz");
if unchanged_tgz.exists() {
let tgz_mtime = fs::metadata(&unchanged_tgz)?.modified()?;
assert!(
mtime_after >= tgz_mtime,
"pkg_summary.gz should be at least as recent as unchanged .tgz files"
);
}
Ok(())
}
#[test]
fn test_pkg_summary_regenerated_when_successful_pkg_fails() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("pkg_summary.gz should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
h.write_pkg_makefile(
&PkgDef::new("test/top", "top-1.0")
.depends("mid-[0-9]*:test/mid also-base-[0-9]*:test/also-base")
.fail_at("configure", "broken"),
)?;
let prior = db.get_successful_packages()?;
assert!(prior.contains(&"top-1.0".to_string()));
db.delete_build_by_name("top-1.0")?;
let summary = run_cached_build(&h, &db, &scan_result)?;
let current = db.get_successful_packages()?;
assert!(
!current.contains(&"top-1.0".to_string()),
"top-1.0 should no longer be in successful set"
);
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(
generated,
"should regenerate when successful package now fails"
);
assert!(
pkg_summary_gz_mtime(&h).expect("should exist") > mtime_before,
"pkg_summary.gz should have newer mtime"
);
let content = read_pkg_summary_gz(&h)?;
assert!(
!content.contains("PKGNAME=top-1.0"),
"pkg_summary should no longer include the now-failed package"
);
assert_eq!(
count_pkg_summary_entries(&content),
current.len(),
"pkg_summary entry count should match successful package count"
);
Ok(())
}
#[test]
fn test_pkg_summary_regenerated_on_same_name_rebuild() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("pkg_summary.gz should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
let prior = db.get_successful_packages()?;
assert!(prior.contains(&"top-1.0".to_string()));
db.delete_build_by_name("top-1.0")?;
let summary = run_cached_build(&h, &db, &scan_result)?;
let top = summary
.results
.iter()
.find(|r| r.pkgname.pkgname() == "top-1.0");
assert!(
matches!(top.map(|r| &r.state), Some(PackageState::Success)),
"top-1.0 should have been rebuilt successfully"
);
let current = db.get_successful_packages()?;
assert_eq!(prior, current, "same set of successful package names");
assert!(
summary.counts().states[Success] > 0,
"but a new build succeeded (metadata may differ)"
);
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(
generated,
"should regenerate when package rebuilt with same name"
);
assert!(
pkg_summary_gz_mtime(&h).expect("should exist") > mtime_before,
"pkg_summary.gz should have newer mtime"
);
let rebuilt_tgz = h.packages_dir().join("All").join("top-1.0.tgz");
if rebuilt_tgz.exists() {
let tgz_mtime = fs::metadata(&rebuilt_tgz)?.modified()?;
let summary_mtime = pkg_summary_gz_mtime(&h).expect("should exist");
assert!(
summary_mtime >= tgz_mtime,
"pkg_summary.gz should be at least as recent as rebuilt .tgz"
);
}
Ok(())
}
#[test]
fn test_pkg_summary_skipped_when_all_fail() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.add(&pkgsrc::PkgPath::new("test/build-fail")?);
scan.init_from_db(&db)?;
scan.start(&db, &mut scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let summary = build.start(&state, &db)?;
assert_eq!(summary.counts().states[Success], 0);
assert!(summary.counts().states[Failed] > 0);
let prior: Vec<String> = vec![];
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(!generated, "should not generate when all packages fail");
assert!(
pkg_summary_gz_mtime(&h).is_none(),
"pkg_summary.gz should not exist"
);
Ok(())
}
#[test]
fn test_pkg_summary_regenerated_when_new_packages_added() -> Result<()> {
let h = TestHarness::new()?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.add(&pkgsrc::PkgPath::new("test/base")?);
scan.init_from_db(&db)?;
scan.start(&db, &mut scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let first_summary = build.start(&state, &db)?;
assert_eq!(first_summary.counts().states[Success], 1);
let prior_first: Vec<String> = vec![];
let generated = maybe_generate(&db, &prior_first, &first_summary)?;
assert!(generated);
let mtime_first = pkg_summary_gz_mtime(&h).expect("should exist after first build");
std::thread::sleep(std::time::Duration::from_millis(50));
let state2 = h.run_state();
let sandbox2 = Sandbox::new(&config);
let mut scope2 = SandboxScope::new(sandbox2, state2.clone());
let mut scan2 = Scan::new(&config);
scan2.add(&pkgsrc::PkgPath::new("test/top")?);
scan2.init_from_db(&db)?;
scan2.start(&db, &mut scope2)?;
let scan_result2 = scan2.resolve_with_report(&db, false)?;
let prior = db.get_successful_packages()?;
assert_eq!(
prior,
vec!["base-1.0"],
"only base should be successful so far"
);
let scanpkgs2 = scan_result2
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env2 = h.pkgsrc_env();
let build_sandbox2 = Sandbox::new(&config);
let build_scope2 = SandboxScope::new(build_sandbox2, state2.clone());
let mut build2 = Build::new(&config, pkgsrc_env2, build_scope2, scanpkgs2);
build2.load_cached_from_db(&db)?;
let second_summary = build2.start(&state2, &db)?;
assert!(
second_summary.counts().states[Success] >= 3,
"mid, also-base, top should succeed (base cached)"
);
let generated = maybe_generate(&db, &prior, &second_summary)?;
assert!(generated, "should regenerate when new packages are added");
assert!(
pkg_summary_gz_mtime(&h).expect("should exist") > mtime_first,
"pkg_summary.gz should be newer after adding packages"
);
let current = db.get_successful_packages()?;
assert!(
current.len() > prior.len(),
"successful set should have grown"
);
Ok(())
}
#[test]
fn test_pkg_summary_skipped_when_new_packages_fail() -> Result<()> {
let h = TestHarness::new()?;
let (db, _, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
let prior = db.get_successful_packages()?;
db.delete_build_by_name("build-fail-1.0")?;
db.delete_build_by_name("fail-checksum-1.0")?;
db.delete_build_by_name("fail-at-build-1.0")?;
db.delete_build_by_name("fail-install-1.0")?;
db.delete_build_by_name("fail-package-1.0")?;
let config = h.load_config()?;
let state = h.run_state();
let sandbox = Sandbox::new(&config);
let mut scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let summary = run_cached_build(&h, &db, &scan_result)?;
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(
!generated,
"should not regenerate when only new failures occur"
);
assert_eq!(
pkg_summary_gz_mtime(&h).expect("should exist"),
mtime_before,
);
Ok(())
}
#[test]
fn test_pkg_summary_skipped_when_interrupted() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
let prior = db.get_successful_packages()?;
db.delete_build_by_name("top-1.0")?;
let config = h.load_config()?;
let state = h.run_state();
state.shutdown();
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let sandbox = Sandbox::new(&config);
let scope = SandboxScope::new(sandbox, state.clone());
let mut build = Build::new(&config, pkgsrc_env, scope, scanpkgs);
build.load_cached_from_db(&db)?;
let summary = build.start(&state, &db)?;
assert!(state.interrupted(), "state should be interrupted");
assert_eq!(
pkg_summary_gz_mtime(&h).expect("should exist"),
mtime_before,
"pkg_summary.gz mtime should be unchanged after interrupt"
);
let current = db.get_successful_packages()?;
assert!(
prior != current,
"DB state should differ (top-1.0 cleared but not rebuilt)"
);
assert!(
prior != current || summary.counts().states[Success] > 0,
"condition should be true (would regenerate if not interrupted)"
);
Ok(())
}
#[test]
fn test_pkg_summary_recovered_after_interrupt() -> Result<()> {
let h = TestHarness::new()?;
let (db, scan_result, _) = run_scan_and_build(&h)?;
bob::generate_pkg_summary(&db, 2)?;
let mtime_before = pkg_summary_gz_mtime(&h).expect("should exist");
std::thread::sleep(std::time::Duration::from_millis(50));
db.delete_build_by_name("top-1.0")?;
let config = h.load_config()?;
let state = h.run_state();
state.shutdown();
let scanpkgs = scan_result
.buildable()
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let sandbox = Sandbox::new(&config);
let scope = SandboxScope::new(sandbox, state.clone());
let mut build = Build::new(&config, pkgsrc_env, scope, scanpkgs);
build.load_cached_from_db(&db)?;
let _interrupted_summary = build.start(&state, &db)?;
assert_eq!(
pkg_summary_gz_mtime(&h).expect("should exist"),
mtime_before,
"pkg_summary should be unchanged after interrupted build"
);
let prior = db.get_successful_packages()?;
assert!(!prior.contains(&"top-1.0".to_string()));
let summary = run_cached_build(&h, &db, &scan_result)?;
let generated = maybe_generate(&db, &prior, &summary)?;
assert!(generated, "should regenerate on recovery after interrupt");
assert!(
pkg_summary_gz_mtime(&h).expect("should exist") > mtime_before,
"pkg_summary.gz should be newer after recovery"
);
let current = db.get_successful_packages()?;
assert!(
current.contains(&"top-1.0".to_string()),
"top-1.0 should be back in successful set"
);
Ok(())
}
#[test]
fn test_build_orphan_stdout() -> Result<()> {
let h = TestHarness::new()?;
let pkg_dir = h.pkgsrc().join("test/orphan-stdout");
fs::create_dir_all(&pkg_dir)?;
let cat_makefile = h.pkgsrc().join("test/Makefile");
let existing = fs::read_to_string(&cat_makefile)?;
fs::write(
&cat_makefile,
existing.replace(
"chain-a chain-b chain-c chain-d",
"chain-a chain-b chain-c chain-d orphan-stdout",
),
)?;
let makefile = "\
PKGNAME=orphan-stdout-1.0
pbulk-index:
\t@printf 'PKGNAME=orphan-stdout-1.0\\nALL_DEPENDS=\\nPKG_SKIP_REASON=\\nPKG_FAIL_REASON=\\nNO_BIN_ON_FTP=\\nRESTRICTED=\\nCATEGORIES=test\\nMAINTAINER=test@example.com\\nUSE_DESTDIR=yes\\nBOOTSTRAP_PKG=yes\\nUSERGROUP_PHASE=\\nSCAN_DEPENDS=\\n'
clean checksum configure stage-install create-usergroup:
\t@true
all:
\t@sleep 3600 &
stage-package-create:
\t@mkdir -p ${.CURDIR}/pkg
\t@d=$$(mktemp -d) && \\\n\
\tprintf '@name orphan-stdout-1.0\\n' > \"$$d/+CONTENTS\" && \\\n\
\tprintf 'Test package\\n' > \"$$d/+COMMENT\" && \\\n\
\tprintf 'Test package description\\n' > \"$$d/+DESC\" && \\\n\
\tprintf '0\\n' > \"$$d/+SIZE_PKG\" && \\\n\
\tprintf 'BUILD_DATE=%s\\nCATEGORIES=test\\nMACHINE_ARCH=x86_64\\nOPSYS=Test\\nOS_VERSION=1.0\\nPKGPATH=test/orphan-stdout\\nPKGTOOLS_VERSION=20210710\\n' \
\"$$(date '+%Y-%m-%d %H:%M:%S %z')\" > \"$$d/+BUILD_INFO\" && \\\n\
\t(cd \"$$d\" && COPYFILE_DISABLE=1 tar czf \"${.CURDIR}/pkg/orphan-stdout-1.0.tgz\" \
+CONTENTS +COMMENT +DESC +SIZE_PKG +BUILD_INFO) && \\\n\
\trm -rf \"$$d\"
show-var:
\t@case \"${VARNAME}\" in STAGE_PKGFILE) echo \"${.CURDIR}/pkg/orphan-stdout-1.0.tgz\" ;; esac
show-vars:
\t@for v in ${VARNAMES}; do \\\n\
\t case \"$$v\" in \\\n\
\t WRKDIR) echo \"${.CURDIR}/work\" ;; \\\n\
\t *) echo ;; \\\n\
\t esac; \\\n\
\tdone
";
fs::write(pkg_dir.join("Makefile"), makefile)?;
let config = h.load_config()?;
let db = h.open_db()?;
let state = h.run_state();
let state_clone = state.clone();
std::thread::spawn(move || {
std::thread::sleep(std::time::Duration::from_secs(30));
state_clone.shutdown();
});
let sandbox = Sandbox::new(&config);
let mut scan_scope = SandboxScope::new(sandbox, state.clone());
let mut scan = Scan::new(&config);
scan.init_from_db(&db)?;
scan.start(&db, &mut scan_scope)?;
let scan_result = scan.resolve_with_report(&db, false)?;
let scanpkgs = scan_result
.buildable()
.filter(|p| p.pkgpath.as_path().to_string_lossy() == "test/orphan-stdout")
.map(|p| (p.pkgname().clone(), p.clone()))
.collect();
let pkgsrc_env = h.pkgsrc_env();
let build_sandbox = Sandbox::new(&config);
let build_scope = SandboxScope::new(build_sandbox, state.clone());
let build = Build::new(&config, pkgsrc_env, build_scope, scanpkgs);
let build_result = build.start(&state, &db)?;
assert_eq!(
build_result.counts().states[Success],
1,
"orphan-stdout package should build successfully"
);
Ok(())
}