1use crate::scan::ResolvedIndex;
67use crate::scan::ScanFailure;
68use crate::status::{self, StatusMessage};
69use crate::tui::{MultiProgress, format_duration};
70use crate::{Config, RunContext, Sandbox};
71use anyhow::{Context, bail};
72use glob::Pattern;
73use indexmap::IndexMap;
74use pkgsrc::{PkgName, PkgPath};
75use std::collections::{HashMap, HashSet};
76use std::fs;
77use std::path::{Path, PathBuf};
78use std::process::Command;
79use std::sync::atomic::{AtomicBool, Ordering};
80use std::sync::{Arc, Mutex, mpsc, mpsc::Sender};
81use std::time::{Duration, Instant};
82use tracing::{debug, error, info, trace, warn};
83
84fn format_scan_index(idx: &ResolvedIndex) -> String {
86 idx.to_string()
87}
88
89#[derive(Clone, Debug, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
94pub enum BuildOutcome {
95 Success,
97 Failed(String),
101 UpToDate,
104 PreFailed(String),
109 IndirectFailed(String),
113 IndirectPreFailed(String),
117}
118
119#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
123pub struct BuildResult {
124 pub pkgname: PkgName,
126 pub pkgpath: Option<PkgPath>,
128 pub outcome: BuildOutcome,
130 pub duration: Duration,
132 pub log_dir: Option<PathBuf>,
137}
138
139#[derive(Clone, Debug)]
159pub struct BuildSummary {
160 pub duration: Duration,
162 pub results: Vec<BuildResult>,
164 pub scan_failed: Vec<ScanFailure>,
166}
167
168impl BuildSummary {
169 pub fn success_count(&self) -> usize {
171 self.results
172 .iter()
173 .filter(|r| matches!(r.outcome, BuildOutcome::Success))
174 .count()
175 }
176
177 pub fn failed_count(&self) -> usize {
179 self.results
180 .iter()
181 .filter(|r| matches!(r.outcome, BuildOutcome::Failed(_)))
182 .count()
183 }
184
185 pub fn up_to_date_count(&self) -> usize {
187 self.results
188 .iter()
189 .filter(|r| matches!(r.outcome, BuildOutcome::UpToDate))
190 .count()
191 }
192
193 pub fn prefailed_count(&self) -> usize {
195 self.results
196 .iter()
197 .filter(|r| matches!(r.outcome, BuildOutcome::PreFailed(_)))
198 .count()
199 }
200
201 pub fn indirect_failed_count(&self) -> usize {
203 self.results
204 .iter()
205 .filter(|r| matches!(r.outcome, BuildOutcome::IndirectFailed(_)))
206 .count()
207 }
208
209 pub fn indirect_prefailed_count(&self) -> usize {
211 self.results
212 .iter()
213 .filter(|r| matches!(r.outcome, BuildOutcome::IndirectPreFailed(_)))
214 .count()
215 }
216
217 pub fn scan_failed_count(&self) -> usize {
219 self.scan_failed.len()
220 }
221
222 pub fn failed(&self) -> Vec<&BuildResult> {
224 self.results
225 .iter()
226 .filter(|r| matches!(r.outcome, BuildOutcome::Failed(_)))
227 .collect()
228 }
229
230 pub fn succeeded(&self) -> Vec<&BuildResult> {
232 self.results
233 .iter()
234 .filter(|r| matches!(r.outcome, BuildOutcome::Success))
235 .collect()
236 }
237
238 pub fn up_to_date(&self) -> Vec<&BuildResult> {
240 self.results
241 .iter()
242 .filter(|r| matches!(r.outcome, BuildOutcome::UpToDate))
243 .collect()
244 }
245
246 pub fn prefailed(&self) -> Vec<&BuildResult> {
248 self.results
249 .iter()
250 .filter(|r| matches!(r.outcome, BuildOutcome::PreFailed(_)))
251 .collect()
252 }
253
254 pub fn indirect_failed(&self) -> Vec<&BuildResult> {
256 self.results
257 .iter()
258 .filter(|r| matches!(r.outcome, BuildOutcome::IndirectFailed(_)))
259 .collect()
260 }
261
262 pub fn indirect_prefailed(&self) -> Vec<&BuildResult> {
264 self.results
265 .iter()
266 .filter(|r| matches!(r.outcome, BuildOutcome::IndirectPreFailed(_)))
267 .collect()
268 }
269}
270
271#[derive(Debug, Default)]
272pub struct Build {
273 config: Config,
275 sandbox: Sandbox,
277 scanpkgs: IndexMap<PkgName, ResolvedIndex>,
279 cached: IndexMap<PkgName, BuildResult>,
281}
282
283#[derive(Debug)]
284struct PackageBuild {
285 id: usize,
286 config: Config,
287 pkginfo: ResolvedIndex,
288 sandbox: Sandbox,
289}
290
291struct MakeQuery<'a> {
293 config: &'a Config,
294 sandbox: &'a Sandbox,
295 sandbox_id: usize,
296 pkgpath: &'a PkgPath,
297 env: &'a HashMap<String, String>,
298}
299
300impl<'a> MakeQuery<'a> {
301 fn new(
302 config: &'a Config,
303 sandbox: &'a Sandbox,
304 sandbox_id: usize,
305 pkgpath: &'a PkgPath,
306 env: &'a HashMap<String, String>,
307 ) -> Self {
308 Self { config, sandbox, sandbox_id, pkgpath, env }
309 }
310
311 fn var(&self, name: &str) -> Option<String> {
313 let pkgdir = self.config.pkgsrc().join(self.pkgpath.as_path());
314
315 let mut cmd = if self.sandbox.enabled() {
316 let mut c = Command::new("/usr/sbin/chroot");
317 c.arg(self.sandbox.path(self.sandbox_id)).arg(self.config.make());
318 c
319 } else {
320 Command::new(self.config.make())
321 };
322
323 cmd.arg("-C")
324 .arg(&pkgdir)
325 .arg("show-var")
326 .arg(format!("VARNAME={}", name));
327
328 for (key, value) in self.env {
330 cmd.env(key, value);
331 }
332
333 let output = cmd.output().ok()?;
334
335 if !output.status.success() {
336 return None;
337 }
338
339 let value = String::from_utf8_lossy(&output.stdout).trim().to_string();
340
341 if value.is_empty() { None } else { Some(value) }
342 }
343
344 fn var_path(&self, name: &str) -> Option<PathBuf> {
346 self.var(name).map(PathBuf::from)
347 }
348
349 fn wrkdir(&self) -> Option<PathBuf> {
351 self.var_path("WRKDIR")
352 }
353
354 #[allow(dead_code)]
356 fn wrksrc(&self) -> Option<PathBuf> {
357 self.var_path("WRKSRC")
358 }
359
360 #[allow(dead_code)]
362 fn destdir(&self) -> Option<PathBuf> {
363 self.var_path("DESTDIR")
364 }
365
366 #[allow(dead_code)]
368 fn prefix(&self) -> Option<PathBuf> {
369 self.var_path("PREFIX")
370 }
371
372 fn resolve_path(&self, path: &Path) -> PathBuf {
375 if self.sandbox.enabled() {
376 self.sandbox
377 .path(self.sandbox_id)
378 .join(path.strip_prefix("/").unwrap_or(path))
379 } else {
380 path.to_path_buf()
381 }
382 }
383}
384
385#[derive(Debug)]
387enum PackageBuildResult {
388 Success,
390 Failed,
392 Skipped,
394}
395
396impl PackageBuild {
397 fn build(
398 &self,
399 status_tx: &Sender<ChannelCommand>,
400 ) -> anyhow::Result<PackageBuildResult> {
401 let pkgname = self.pkginfo.pkgname.pkgname();
402 info!(pkgname = %pkgname,
403 sandbox_id = self.id,
404 "Starting package build"
405 );
406
407 let Some(pkgpath) = &self.pkginfo.pkg_location else {
408 error!(pkgname = %pkgname, "Could not get PKGPATH for package");
409 bail!("Could not get PKGPATH for {}", pkgname);
410 };
411
412 let logdir = self.config.logdir();
413
414 let mut envs = self.config.script_env();
416
417 if let Some(path) = self.config.script("pkg-up-to-date") {
419 envs.push((
420 "PKG_UP_TO_DATE".to_string(),
421 format!("{}", path.display()),
422 ));
423 }
424
425 let pkg_env = match self.config.get_pkg_env(&self.pkginfo) {
427 Ok(env) => {
428 for (key, value) in &env {
429 envs.push((key.clone(), value.clone()));
430 }
431 env
432 }
433 Err(e) => {
434 error!(pkgname = %pkgname, error = %e, "Failed to get env from Lua config");
435 HashMap::new()
436 }
437 };
438
439 let patterns = self.config.save_wrkdir_patterns();
441 if !patterns.is_empty() {
442 envs.push(("SKIP_CLEAN".to_string(), "1".to_string()));
443 }
444
445 let Some(pkg_build_script) = self.config.script("pkg-build") else {
446 error!(pkgname = %pkgname, "No pkg-build script defined");
447 bail!("No pkg-build script defined");
448 };
449
450 let stdin_data = format_scan_index(&self.pkginfo);
452
453 debug!(pkgname = %pkgname,
454 env_count = envs.len(),
455 "Executing build scripts"
456 );
457 trace!(pkgname = %pkgname,
458 envs = ?envs,
459 stdin = %stdin_data,
460 "Build environment variables"
461 );
462
463 if let Some(pre_build) = self.config.script("pre-build") {
465 debug!(pkgname = %pkgname, "Running pre-build script");
466 let child = self.sandbox.execute(
467 self.id,
468 pre_build,
469 envs.clone(),
470 None,
471 None,
472 )?;
473 let output = child
474 .wait_with_output()
475 .context("Failed to wait for pre-build")?;
476 if !output.status.success() {
477 warn!(pkgname = %pkgname, exit_code = ?output.status.code(), "pre-build script failed");
478 }
479 }
480
481 let (mut status_reader, status_writer) =
483 status::channel().context("Failed to create status channel")?;
484 let status_fd = status_writer.fd();
485
486 let (mut output_reader, output_writer) = status::output_channel()
487 .context("Failed to create output channel")?;
488 let output_fd = output_writer.fd();
489
490 envs.push(("bob_output_fd".to_string(), output_fd.to_string()));
492
493 let mut child = self.sandbox.execute(
494 self.id,
495 pkg_build_script,
496 envs.clone(),
497 Some(&stdin_data),
498 Some(status_fd),
499 )?;
500
501 status_writer.close();
503 output_writer.close();
504
505 let mut was_skipped = false;
507
508 loop {
510 for msg in status_reader.read_all() {
512 match msg {
513 StatusMessage::Stage(stage) => {
514 let _ = status_tx.send(ChannelCommand::StageUpdate(
515 self.id,
516 Some(stage),
517 ));
518 }
519 StatusMessage::Skipped => {
520 was_skipped = true;
521 }
522 }
523 }
524
525 let output_lines = output_reader.read_all_lines();
527 if !output_lines.is_empty() {
528 let _ = status_tx
529 .send(ChannelCommand::OutputLines(self.id, output_lines));
530 }
531
532 match child.try_wait() {
534 Ok(Some(_status)) => break,
535 Ok(None) => {
536 std::thread::sleep(Duration::from_millis(10));
538 }
539 Err(e) => {
540 return Err(e).context("Failed to wait for pkg-build");
541 }
542 }
543 }
544
545 let remaining = output_reader.read_all_lines();
547 if !remaining.is_empty() {
548 let _ =
549 status_tx.send(ChannelCommand::OutputLines(self.id, remaining));
550 }
551
552 let _ = status_tx.send(ChannelCommand::StageUpdate(self.id, None));
554
555 let status =
557 child.wait().context("Failed to get pkg-build exit status")?;
558
559 let result = if was_skipped {
560 info!(pkgname = %pkgname,
561 "pkg-build skipped (up-to-date)"
562 );
563 PackageBuildResult::Skipped
564 } else {
565 match status.code() {
566 Some(0) => {
567 info!(pkgname = %pkgname,
568 "pkg-build completed successfully"
569 );
570 PackageBuildResult::Success
571 }
572 Some(code) => {
573 error!(pkgname = %pkgname,
574 exit_code = code,
575 "pkg-build failed"
576 );
577
578 if !patterns.is_empty() {
580 self.save_wrkdir_files(
581 pkgname, pkgpath, logdir, patterns, &pkg_env,
582 );
583 self.run_clean(pkgpath);
584 }
585 PackageBuildResult::Failed
586 }
587 None => {
588 warn!(pkgname = %pkgname,
590 "pkg-build terminated by signal"
591 );
592 PackageBuildResult::Failed
593 }
594 }
595 };
596
597 if let Some(post_build) = self.config.script("post-build") {
599 debug!(pkgname = %pkgname, "Running post-build script");
600 if let Ok(child) =
601 self.sandbox.execute(self.id, post_build, envs, None, None)
602 {
603 match child.wait_with_output() {
604 Ok(output) if !output.status.success() => {
605 warn!(pkgname = %pkgname, exit_code = ?output.status.code(), "post-build script failed");
606 }
607 Err(e) => {
608 warn!(pkgname = %pkgname, error = %e, "Failed to wait for post-build");
609 }
610 _ => {}
611 }
612 }
613 }
614
615 Ok(result)
616 }
617
618 fn save_wrkdir_files(
620 &self,
621 pkgname: &str,
622 pkgpath: &PkgPath,
623 logdir: &Path,
624 patterns: &[String],
625 pkg_env: &HashMap<String, String>,
626 ) {
627 let make = MakeQuery::new(
628 &self.config,
629 &self.sandbox,
630 self.id,
631 pkgpath,
632 pkg_env,
633 );
634
635 let wrkdir = match make.wrkdir() {
637 Some(w) => w,
638 None => {
639 debug!(pkgname = %pkgname, "Could not determine WRKDIR, skipping file save");
640 return;
641 }
642 };
643
644 let wrkdir_path = make.resolve_path(&wrkdir);
646
647 if !wrkdir_path.exists() {
648 debug!(pkgname = %pkgname,
649 wrkdir = %wrkdir_path.display(),
650 "WRKDIR does not exist, skipping file save"
651 );
652 return;
653 }
654
655 let save_dir = logdir.join(pkgname).join("wrkdir-files");
656 if let Err(e) = fs::create_dir_all(&save_dir) {
657 warn!(pkgname = %pkgname,
658 error = %e,
659 "Failed to create wrkdir-files directory"
660 );
661 return;
662 }
663
664 let compiled_patterns: Vec<Pattern> = patterns
666 .iter()
667 .filter_map(|p| {
668 Pattern::new(p).ok().or_else(|| {
669 warn!(pattern = %p, "Invalid glob pattern");
670 None
671 })
672 })
673 .collect();
674
675 if compiled_patterns.is_empty() {
676 return;
677 }
678
679 let mut saved_count = 0;
681 if let Err(e) = walk_and_save(
682 &wrkdir_path,
683 &wrkdir_path,
684 &save_dir,
685 &compiled_patterns,
686 &mut saved_count,
687 ) {
688 warn!(pkgname = %pkgname,
689 error = %e,
690 "Error while saving wrkdir files"
691 );
692 }
693
694 if saved_count > 0 {
695 info!(pkgname = %pkgname,
696 count = saved_count,
697 dest = %save_dir.display(),
698 "Saved wrkdir files"
699 );
700 }
701 }
702
703 fn run_clean(&self, pkgpath: &PkgPath) {
705 let pkgdir = self.config.pkgsrc().join(pkgpath.as_path());
706
707 let result = if self.sandbox.enabled() {
708 Command::new("/usr/sbin/chroot")
709 .arg(self.sandbox.path(self.id))
710 .arg(self.config.make())
711 .arg("-C")
712 .arg(&pkgdir)
713 .arg("clean")
714 .stdout(std::process::Stdio::null())
715 .stderr(std::process::Stdio::null())
716 .status()
717 } else {
718 Command::new(self.config.make())
719 .arg("-C")
720 .arg(&pkgdir)
721 .arg("clean")
722 .stdout(std::process::Stdio::null())
723 .stderr(std::process::Stdio::null())
724 .status()
725 };
726
727 if let Err(e) = result {
728 debug!(error = %e, "Failed to run bmake clean");
729 }
730 }
731}
732
733fn walk_and_save(
735 base: &Path,
736 current: &Path,
737 save_dir: &Path,
738 patterns: &[Pattern],
739 saved_count: &mut usize,
740) -> std::io::Result<()> {
741 if !current.is_dir() {
742 return Ok(());
743 }
744
745 for entry in fs::read_dir(current)? {
746 let entry = entry?;
747 let path = entry.path();
748
749 if path.is_dir() {
750 walk_and_save(base, &path, save_dir, patterns, saved_count)?;
751 } else if path.is_file() {
752 let rel_path = path.strip_prefix(base).unwrap_or(&path);
754 let rel_str = rel_path.to_string_lossy();
755
756 for pattern in patterns {
758 if pattern.matches(&rel_str)
759 || pattern.matches(
760 path.file_name()
761 .unwrap_or_default()
762 .to_string_lossy()
763 .as_ref(),
764 )
765 {
766 let dest_path = save_dir.join(rel_path);
768 if let Some(parent) = dest_path.parent() {
769 fs::create_dir_all(parent)?;
770 }
771
772 if let Err(e) = fs::copy(&path, &dest_path) {
774 warn!(src = %path.display(),
775 dest = %dest_path.display(),
776 error = %e,
777 "Failed to copy file"
778 );
779 } else {
780 debug!(src = %path.display(),
781 dest = %dest_path.display(),
782 "Saved wrkdir file"
783 );
784 *saved_count += 1;
785 }
786 break; }
788 }
789 }
790 }
791
792 Ok(())
793}
794
795#[derive(Debug)]
799enum ChannelCommand {
800 ClientReady(usize),
804 ComeBackLater,
808 JobData(Box<PackageBuild>),
812 JobSuccess(PkgName, Duration),
816 JobFailed(PkgName, Duration),
820 JobSkipped(PkgName),
824 JobError((PkgName, Duration, anyhow::Error)),
828 Quit,
832 Shutdown,
836 StageUpdate(usize, Option<String>),
840 OutputLines(usize, Vec<String>),
844}
845
846#[derive(Debug)]
850enum BuildStatus {
851 Available(PkgName),
855 NoneAvailable,
860 Done,
864}
865
866#[derive(Clone, Debug)]
867struct BuildJobs {
868 scanpkgs: IndexMap<PkgName, ResolvedIndex>,
869 incoming: HashMap<PkgName, HashSet<PkgName>>,
870 running: HashSet<PkgName>,
871 done: HashSet<PkgName>,
872 failed: HashSet<PkgName>,
873 results: Vec<BuildResult>,
874 logdir: PathBuf,
875 #[allow(dead_code)]
877 cached_count: usize,
878}
879
880impl BuildJobs {
881 fn mark_success(&mut self, pkgname: &PkgName, duration: Duration) {
885 self.mark_done(pkgname, BuildOutcome::Success, duration);
886 }
887
888 fn mark_up_to_date(&mut self, pkgname: &PkgName) {
892 self.mark_done(pkgname, BuildOutcome::UpToDate, Duration::ZERO);
893 }
894
895 fn mark_done(
896 &mut self,
897 pkgname: &PkgName,
898 outcome: BuildOutcome,
899 duration: Duration,
900 ) {
901 for dep in self.incoming.values_mut() {
907 if dep.contains(pkgname) {
908 dep.remove(pkgname);
909 }
910 }
911 self.done.insert(pkgname.clone());
916
917 let scanpkg = self.scanpkgs.get(pkgname);
919 let log_dir = Some(self.logdir.join(pkgname.pkgname()));
920 self.results.push(BuildResult {
921 pkgname: pkgname.clone(),
922 pkgpath: scanpkg.and_then(|s| s.pkg_location.clone()),
923 outcome,
924 duration,
925 log_dir,
926 });
927 }
928
929 fn mark_failure(&mut self, pkgname: &PkgName, duration: Duration) {
933 let mut broken: HashSet<PkgName> = HashSet::new();
934 let mut to_check: Vec<PkgName> = vec![];
935 to_check.push(pkgname.clone());
936 loop {
941 let Some(badpkg) = to_check.pop() else {
943 break;
944 };
945 if broken.contains(&badpkg) {
947 continue;
948 }
949 for (pkg, deps) in &self.incoming {
950 if deps.contains(&badpkg) {
951 to_check.push(pkg.clone());
952 }
953 }
954 broken.insert(badpkg);
955 }
956 let is_original = |p: &PkgName| p == pkgname;
963 for pkg in broken {
964 self.incoming.remove(&pkg);
965 self.failed.insert(pkg.clone());
966
967 let scanpkg = self.scanpkgs.get(&pkg);
969 let log_dir = Some(self.logdir.join(pkg.pkgname()));
970 let (outcome, dur) = if is_original(&pkg) {
971 (BuildOutcome::Failed("Build failed".to_string()), duration)
972 } else {
973 (
974 BuildOutcome::IndirectFailed(pkgname.pkgname().to_string()),
975 Duration::ZERO,
976 )
977 };
978 self.results.push(BuildResult {
979 pkgname: pkg,
980 pkgpath: scanpkg.and_then(|s| s.pkg_location.clone()),
981 outcome,
982 duration: dur,
983 log_dir,
984 });
985 }
986 }
987
988 #[allow(dead_code)]
993 fn mark_prefailed(&mut self, pkgname: &PkgName, reason: String) {
994 let mut broken: HashSet<PkgName> = HashSet::new();
995 let mut to_check: Vec<PkgName> = vec![];
996 to_check.push(pkgname.clone());
997
998 loop {
999 let Some(badpkg) = to_check.pop() else {
1000 break;
1001 };
1002 if broken.contains(&badpkg) {
1003 continue;
1004 }
1005 for (pkg, deps) in &self.incoming {
1006 if deps.contains(&badpkg) {
1007 to_check.push(pkg.clone());
1008 }
1009 }
1010 broken.insert(badpkg);
1011 }
1012
1013 let is_original = |p: &PkgName| p == pkgname;
1014 for pkg in broken {
1015 self.incoming.remove(&pkg);
1016 self.failed.insert(pkg.clone());
1017
1018 let scanpkg = self.scanpkgs.get(&pkg);
1019 let log_dir = Some(self.logdir.join(pkg.pkgname()));
1020 let outcome = if is_original(&pkg) {
1021 BuildOutcome::PreFailed(reason.clone())
1022 } else {
1023 BuildOutcome::IndirectPreFailed(pkgname.pkgname().to_string())
1024 };
1025 self.results.push(BuildResult {
1026 pkgname: pkg,
1027 pkgpath: scanpkg.and_then(|s| s.pkg_location.clone()),
1028 outcome,
1029 duration: Duration::ZERO,
1030 log_dir,
1031 });
1032 }
1033 }
1034
1035 fn get_next_build(&self) -> BuildStatus {
1039 if self.incoming.is_empty() {
1043 return BuildStatus::Done;
1044 }
1045
1046 let mut pkgs: Vec<(PkgName, usize)> = self
1053 .incoming
1054 .iter()
1055 .filter(|(_, v)| v.is_empty())
1056 .map(|(k, _)| {
1057 (
1058 k.clone(),
1059 self.scanpkgs
1060 .get(k)
1061 .unwrap()
1062 .pbulk_weight
1063 .clone()
1064 .unwrap_or("100".to_string())
1065 .parse()
1066 .unwrap_or(100),
1067 )
1068 })
1069 .collect();
1070
1071 if pkgs.is_empty() {
1077 return BuildStatus::NoneAvailable;
1078 }
1079
1080 pkgs.sort_by_key(|&(_, weight)| std::cmp::Reverse(weight));
1084 BuildStatus::Available(pkgs[0].0.clone())
1085 }
1086}
1087
1088impl Build {
1089 pub fn new(
1090 config: &Config,
1091 scanpkgs: IndexMap<PkgName, ResolvedIndex>,
1092 ) -> Build {
1093 let sandbox = Sandbox::new(config);
1094 info!(
1095 package_count = scanpkgs.len(),
1096 sandbox_enabled = sandbox.enabled(),
1097 build_threads = config.build_threads(),
1098 "Creating new Build instance"
1099 );
1100 for (pkgname, index) in &scanpkgs {
1101 debug!(pkgname = %pkgname.pkgname(),
1102 pkgpath = ?index.pkg_location,
1103 depends_count = index.depends.len(),
1104 depends = ?index.depends.iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
1105 "Package in build queue"
1106 );
1107 }
1108 Build {
1109 config: config.clone(),
1110 sandbox,
1111 scanpkgs,
1112 cached: IndexMap::new(),
1113 }
1114 }
1115
1116 pub fn load_cached(
1122 &mut self,
1123 cached: IndexMap<PkgName, BuildResult>,
1124 ) -> usize {
1125 let mut count = 0;
1126 for (pkgname, result) in cached {
1127 if self.scanpkgs.contains_key(&pkgname) {
1129 self.cached.insert(pkgname, result);
1130 count += 1;
1131 }
1132 }
1133 info!(cached_count = count, "Loaded cached build results");
1134 count
1135 }
1136
1137 pub fn cached(&self) -> &IndexMap<PkgName, BuildResult> {
1139 &self.cached
1140 }
1141
1142 pub fn start(&mut self, ctx: &RunContext) -> anyhow::Result<BuildSummary> {
1143 let started = Instant::now();
1144
1145 info!(package_count = self.scanpkgs.len(), "Build::start() called");
1146
1147 let shutdown_flag = Arc::clone(&ctx.shutdown);
1148 let stats = ctx.stats.clone();
1149
1150 debug!("Populating BuildJobs from scanpkgs");
1154 let mut incoming: HashMap<PkgName, HashSet<PkgName>> = HashMap::new();
1155 for (pkgname, index) in &self.scanpkgs {
1156 let mut deps: HashSet<PkgName> = HashSet::new();
1157 for dep in &index.depends {
1158 deps.insert(dep.clone());
1159 }
1160 trace!(pkgname = %pkgname.pkgname(),
1161 deps_count = deps.len(),
1162 deps = ?deps.iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
1163 "Adding package to incoming build queue"
1164 );
1165 incoming.insert(pkgname.clone(), deps);
1166 }
1167
1168 let mut done: HashSet<PkgName> = HashSet::new();
1172 let mut failed: HashSet<PkgName> = HashSet::new();
1173 let mut results: Vec<BuildResult> = Vec::new();
1174 let mut cached_count = 0usize;
1175
1176 for (pkgname, result) in &self.cached {
1177 match result.outcome {
1178 BuildOutcome::Success | BuildOutcome::UpToDate => {
1179 incoming.remove(pkgname);
1181 done.insert(pkgname.clone());
1182 for deps in incoming.values_mut() {
1184 deps.remove(pkgname);
1185 }
1186 results.push(result.clone());
1187 cached_count += 1;
1188 }
1189 BuildOutcome::Failed(_)
1190 | BuildOutcome::PreFailed(_)
1191 | BuildOutcome::IndirectFailed(_)
1192 | BuildOutcome::IndirectPreFailed(_) => {
1193 incoming.remove(pkgname);
1195 failed.insert(pkgname.clone());
1196 results.push(result.clone());
1197 cached_count += 1;
1198 }
1199 }
1200 }
1201
1202 if cached_count > 0 {
1203 println!("Loaded {} cached build results", cached_count);
1204 }
1205
1206 info!(
1207 incoming_count = incoming.len(),
1208 scanpkgs_count = self.scanpkgs.len(),
1209 cached_count = cached_count,
1210 "BuildJobs populated"
1211 );
1212
1213 let running: HashSet<PkgName> = HashSet::new();
1214 let logdir = self.config.logdir().clone();
1215 let jobs = BuildJobs {
1216 scanpkgs: self.scanpkgs.clone(),
1217 incoming,
1218 running,
1219 done,
1220 failed,
1221 results,
1222 logdir,
1223 cached_count,
1224 };
1225
1226 if self.sandbox.enabled() {
1228 println!("Creating sandboxes...");
1229 for i in 0..self.config.build_threads() {
1230 if let Err(e) = self.sandbox.create(i) {
1231 for j in (0..=i).rev() {
1233 if let Err(destroy_err) = self.sandbox.destroy(j) {
1234 eprintln!(
1235 "Warning: failed to destroy sandbox {}: {}",
1236 j, destroy_err
1237 );
1238 }
1239 }
1240 return Err(e);
1241 }
1242 }
1243 }
1244
1245 println!("Building packages...");
1246
1247 let progress = Arc::new(Mutex::new(
1249 MultiProgress::new(
1250 "Building",
1251 "Built",
1252 self.scanpkgs.len(),
1253 self.config.build_threads(),
1254 )
1255 .expect("Failed to initialize progress display"),
1256 ));
1257
1258 if cached_count > 0 {
1260 if let Ok(mut p) = progress.lock() {
1261 p.state_mut().cached = cached_count;
1262 }
1263 }
1264
1265 let stop_refresh = Arc::new(AtomicBool::new(false));
1267
1268 let progress_refresh = Arc::clone(&progress);
1270 let stop_flag = Arc::clone(&stop_refresh);
1271 let shutdown_for_refresh = Arc::clone(&shutdown_flag);
1272 let refresh_thread = std::thread::spawn(move || {
1273 while !stop_flag.load(Ordering::Relaxed)
1274 && !shutdown_for_refresh.load(Ordering::SeqCst)
1275 {
1276 if let Ok(mut p) = progress_refresh.lock() {
1277 let _ = p.poll_events();
1279 let _ = p.render_throttled();
1280 }
1281 std::thread::sleep(Duration::from_millis(50));
1282 }
1283 });
1284
1285 let (manager_tx, manager_rx) = mpsc::channel::<ChannelCommand>();
1290
1291 let mut threads = vec![];
1297 let mut clients: HashMap<usize, Sender<ChannelCommand>> =
1298 HashMap::new();
1299 for i in 0..self.config.build_threads() {
1300 let (client_tx, client_rx) = mpsc::channel::<ChannelCommand>();
1301 clients.insert(i, client_tx);
1302 let manager_tx = manager_tx.clone();
1303 let thread = std::thread::spawn(move || {
1304 loop {
1305 if manager_tx.send(ChannelCommand::ClientReady(i)).is_err()
1307 {
1308 break;
1309 }
1310
1311 let Ok(msg) = client_rx.recv() else {
1312 break;
1313 };
1314
1315 match msg {
1316 ChannelCommand::ComeBackLater => {
1317 std::thread::sleep(Duration::from_millis(100));
1318 continue;
1319 }
1320 ChannelCommand::JobData(pkg) => {
1321 let pkgname = pkg.pkginfo.pkgname.clone();
1322 let build_start = Instant::now();
1323 match pkg.build(&manager_tx) {
1324 Ok(PackageBuildResult::Success) => {
1325 let duration = build_start.elapsed();
1326 let _ = manager_tx.send(
1327 ChannelCommand::JobSuccess(
1328 pkgname, duration,
1329 ),
1330 );
1331 }
1332 Ok(PackageBuildResult::Skipped) => {
1333 let _ = manager_tx.send(
1334 ChannelCommand::JobSkipped(pkgname),
1335 );
1336 }
1337 Ok(PackageBuildResult::Failed) => {
1338 let duration = build_start.elapsed();
1339 let _ = manager_tx.send(
1340 ChannelCommand::JobFailed(
1341 pkgname, duration,
1342 ),
1343 );
1344 }
1345 Err(e) => {
1346 let duration = build_start.elapsed();
1347 let _ = manager_tx.send(
1348 ChannelCommand::JobError((
1349 pkgname, duration, e,
1350 )),
1351 );
1352 }
1353 }
1354 continue;
1355 }
1356 ChannelCommand::Quit | ChannelCommand::Shutdown => {
1357 break;
1358 }
1359 _ => todo!(),
1360 }
1361 }
1362 });
1363 threads.push(thread);
1364 }
1365
1366 let config = self.config.clone();
1371 let sandbox = self.sandbox.clone();
1372 let progress_clone = Arc::clone(&progress);
1373 let shutdown_for_manager = Arc::clone(&shutdown_flag);
1374 let stats_for_manager = stats.clone();
1375 let (results_tx, results_rx) = mpsc::channel::<Vec<BuildResult>>();
1376 let (interrupted_tx, interrupted_rx) = mpsc::channel::<bool>();
1377 let manager = std::thread::spawn(move || {
1378 let mut clients = clients.clone();
1379 let config = config.clone();
1380 let sandbox = sandbox.clone();
1381 let mut jobs = jobs.clone();
1382 let mut was_interrupted = false;
1383 let stats = stats_for_manager;
1384
1385 let mut thread_packages: HashMap<usize, PkgName> = HashMap::new();
1387
1388 loop {
1389 if shutdown_for_manager.load(Ordering::SeqCst) {
1391 if let Ok(mut p) = progress_clone.lock() {
1393 p.state_mut().suppress();
1394 }
1395 for (_, client) in clients.drain() {
1397 let _ = client.send(ChannelCommand::Shutdown);
1398 }
1399 was_interrupted = true;
1400 break;
1401 }
1402
1403 let command =
1405 match manager_rx.recv_timeout(Duration::from_millis(50)) {
1406 Ok(cmd) => cmd,
1407 Err(mpsc::RecvTimeoutError::Timeout) => continue,
1408 Err(mpsc::RecvTimeoutError::Disconnected) => break,
1409 };
1410
1411 match command {
1412 ChannelCommand::ClientReady(c) => {
1413 let client = clients.get(&c).unwrap();
1414 match jobs.get_next_build() {
1415 BuildStatus::Available(pkg) => {
1416 let pkginfo = jobs.scanpkgs.get(&pkg).unwrap();
1417 jobs.incoming.remove(&pkg);
1418 jobs.running.insert(pkg.clone());
1419
1420 thread_packages.insert(c, pkg.clone());
1422 if let Ok(mut p) = progress_clone.lock() {
1423 p.clear_output_buffer(c);
1424 p.state_mut()
1425 .set_worker_active(c, pkg.pkgname());
1426 let _ = p.render_throttled();
1427 }
1428
1429 let _ = client.send(ChannelCommand::JobData(
1430 Box::new(PackageBuild {
1431 id: c,
1432 config: config.clone(),
1433 pkginfo: pkginfo.clone(),
1434 sandbox: sandbox.clone(),
1435 }),
1436 ));
1437 }
1438 BuildStatus::NoneAvailable => {
1439 if let Ok(mut p) = progress_clone.lock() {
1440 p.clear_output_buffer(c);
1441 p.state_mut().set_worker_idle(c);
1442 let _ = p.render_throttled();
1443 }
1444 let _ =
1445 client.send(ChannelCommand::ComeBackLater);
1446 }
1447 BuildStatus::Done => {
1448 if let Ok(mut p) = progress_clone.lock() {
1449 p.clear_output_buffer(c);
1450 p.state_mut().set_worker_idle(c);
1451 let _ = p.render_throttled();
1452 }
1453 let _ = client.send(ChannelCommand::Quit);
1454 clients.remove(&c);
1455 if clients.is_empty() {
1456 break;
1457 }
1458 }
1459 };
1460 }
1461 ChannelCommand::JobSuccess(pkgname, duration) => {
1462 if shutdown_for_manager.load(Ordering::SeqCst) {
1464 continue;
1465 }
1466
1467 if let Some(ref s) = stats {
1469 let pkgpath = jobs
1470 .scanpkgs
1471 .get(&pkgname)
1472 .and_then(|idx| idx.pkg_location.as_ref())
1473 .map(|p| {
1474 p.as_path().to_string_lossy().to_string()
1475 });
1476 s.build(
1477 pkgname.pkgname(),
1478 pkgpath.as_deref(),
1479 duration,
1480 "success",
1481 );
1482 }
1483
1484 jobs.mark_success(&pkgname, duration);
1485 jobs.running.remove(&pkgname);
1486
1487 if let Ok(mut p) = progress_clone.lock() {
1489 let _ = p.print_status(&format!(
1490 " Built {} ({})",
1491 pkgname.pkgname(),
1492 format_duration(duration)
1493 ));
1494 p.state_mut().increment_completed();
1495 for (tid, pkg) in &thread_packages {
1496 if pkg == &pkgname {
1497 p.clear_output_buffer(*tid);
1498 p.state_mut().set_worker_idle(*tid);
1499 break;
1500 }
1501 }
1502 let _ = p.render_throttled();
1503 }
1504 }
1505 ChannelCommand::JobSkipped(pkgname) => {
1506 if shutdown_for_manager.load(Ordering::SeqCst) {
1508 continue;
1509 }
1510
1511 if let Some(ref s) = stats {
1513 let pkgpath = jobs
1514 .scanpkgs
1515 .get(&pkgname)
1516 .and_then(|idx| idx.pkg_location.as_ref())
1517 .map(|p| {
1518 p.as_path().to_string_lossy().to_string()
1519 });
1520 s.build(
1521 pkgname.pkgname(),
1522 pkgpath.as_deref(),
1523 Duration::ZERO,
1524 "skipped",
1525 );
1526 }
1527
1528 jobs.mark_up_to_date(&pkgname);
1529 jobs.running.remove(&pkgname);
1530
1531 if let Ok(mut p) = progress_clone.lock() {
1533 let _ = p.print_status(&format!(
1534 " Skipped {} (up-to-date)",
1535 pkgname.pkgname()
1536 ));
1537 p.state_mut().increment_skipped();
1538 for (tid, pkg) in &thread_packages {
1539 if pkg == &pkgname {
1540 p.clear_output_buffer(*tid);
1541 p.state_mut().set_worker_idle(*tid);
1542 break;
1543 }
1544 }
1545 let _ = p.render_throttled();
1546 }
1547 }
1548 ChannelCommand::JobFailed(pkgname, duration) => {
1549 if shutdown_for_manager.load(Ordering::SeqCst) {
1551 continue;
1552 }
1553
1554 if let Some(ref s) = stats {
1556 let pkgpath = jobs
1557 .scanpkgs
1558 .get(&pkgname)
1559 .and_then(|idx| idx.pkg_location.as_ref())
1560 .map(|p| {
1561 p.as_path().to_string_lossy().to_string()
1562 });
1563 s.build(
1564 pkgname.pkgname(),
1565 pkgpath.as_deref(),
1566 duration,
1567 "failed",
1568 );
1569 }
1570
1571 jobs.mark_failure(&pkgname, duration);
1572 jobs.running.remove(&pkgname);
1573
1574 if let Ok(mut p) = progress_clone.lock() {
1576 let _ = p.print_status(&format!(
1577 " Failed {} ({})",
1578 pkgname.pkgname(),
1579 format_duration(duration)
1580 ));
1581 p.state_mut().increment_failed();
1582 for (tid, pkg) in &thread_packages {
1583 if pkg == &pkgname {
1584 p.clear_output_buffer(*tid);
1585 p.state_mut().set_worker_idle(*tid);
1586 break;
1587 }
1588 }
1589 let _ = p.render_throttled();
1590 }
1591 }
1592 ChannelCommand::JobError((pkgname, duration, e)) => {
1593 if shutdown_for_manager.load(Ordering::SeqCst) {
1595 continue;
1596 }
1597
1598 if let Some(ref s) = stats {
1600 let pkgpath = jobs
1601 .scanpkgs
1602 .get(&pkgname)
1603 .and_then(|idx| idx.pkg_location.as_ref())
1604 .map(|p| {
1605 p.as_path().to_string_lossy().to_string()
1606 });
1607 s.build(
1608 pkgname.pkgname(),
1609 pkgpath.as_deref(),
1610 duration,
1611 "error",
1612 );
1613 }
1614
1615 jobs.mark_failure(&pkgname, duration);
1616 jobs.running.remove(&pkgname);
1617
1618 if let Ok(mut p) = progress_clone.lock() {
1620 let _ = p.print_status(&format!(
1621 " Failed {} ({})",
1622 pkgname.pkgname(),
1623 format_duration(duration)
1624 ));
1625 p.state_mut().increment_failed();
1626 for (tid, pkg) in &thread_packages {
1627 if pkg == &pkgname {
1628 p.clear_output_buffer(*tid);
1629 p.state_mut().set_worker_idle(*tid);
1630 break;
1631 }
1632 }
1633 let _ = p.render_throttled();
1634 }
1635 tracing::error!(error = %e, pkgname = %pkgname.pkgname(), "Build error");
1636 }
1637 ChannelCommand::StageUpdate(tid, stage) => {
1638 if let Ok(mut p) = progress_clone.lock() {
1639 p.state_mut()
1640 .set_worker_stage(tid, stage.as_deref());
1641 let _ = p.render_throttled();
1642 }
1643 }
1644 ChannelCommand::OutputLines(tid, lines) => {
1645 if let Ok(mut p) = progress_clone.lock() {
1646 if let Some(buf) = p.output_buffer_mut(tid) {
1647 for line in lines {
1648 buf.push(line);
1649 }
1650 }
1651 }
1652 }
1653 _ => {}
1654 }
1655 }
1656
1657 let _ = results_tx.send(jobs.results);
1659 let _ = interrupted_tx.send(was_interrupted);
1660 });
1661
1662 threads.push(manager);
1663 for thread in threads {
1664 thread.join().expect("thread panicked");
1665 }
1666
1667 stop_refresh.store(true, Ordering::Relaxed);
1669 let _ = refresh_thread.join();
1670
1671 let was_interrupted = interrupted_rx.recv().unwrap_or(false);
1673
1674 if let Ok(mut p) = progress.lock() {
1676 if was_interrupted {
1677 let _ = p.finish_interrupted();
1678 } else {
1679 let _ = p.finish();
1680 }
1681 }
1682
1683 let results = results_rx.recv().unwrap_or_default();
1685 let summary = BuildSummary {
1686 duration: started.elapsed(),
1687 results,
1688 scan_failed: Vec::new(),
1689 };
1690
1691 if self.sandbox.enabled() {
1692 self.sandbox.destroy_all(self.config.build_threads())?;
1693 }
1694
1695 Ok(summary)
1696 }
1697}