bob/
build.rs

1/*
2 * Copyright (c) 2025 Jonathan Perkin <jonathan@perkin.org.uk>
3 *
4 * Permission to use, copy, modify, and distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17//! Parallel package builds.
18//!
19//! This module provides the [`Build`] struct for building packages in parallel
20//! across multiple sandboxes. Packages are scheduled using a dependency graph
21//! to ensure correct build order.
22//!
23//! # Build Process
24//!
25//! 1. Create build sandboxes (one per `build_threads`)
26//! 2. Execute pre-build script in each sandbox
27//! 3. Build packages in parallel, respecting dependencies
28//! 4. Execute post-build script after each package
29//! 5. Destroy sandboxes and generate report
30//!
31//! # Build Phases
32//!
33//! Each package goes through these phases in turn:
34//!
35//! - `pre-clean` - Clean any previous build artifacts
36//! - `depends` - Install required dependencies
37//! - `checksum` - Verify distfile checksums
38//! - `configure` - Configure the build
39//! - `build` - Compile the package
40//! - `install` - Install to staging area
41//! - `package` - Create binary package
42//! - `deinstall` - Test package removal (non-bootstrap only)
43//! - `clean` - Clean up build artifacts
44//!
45//! # Example
46//!
47//! ```no_run
48//! use bob::{Build, BuildOptions, Config, Database, RunContext, Scan};
49//! use std::sync::Arc;
50//! use std::sync::atomic::AtomicBool;
51//!
52//! let config = Config::load(None, false)?;
53//! let db_path = config.logdir().join("bob").join("bob.db");
54//! let db = Database::open(&db_path)?;
55//! let mut scan = Scan::new(&config);
56//! // Add packages...
57//! let ctx = RunContext::new(Arc::new(AtomicBool::new(false)));
58//! scan.start(&ctx, &db)?;
59//! let result = scan.resolve(&db)?;
60//!
61//! let mut build = Build::new(&config, result.buildable, BuildOptions::default());
62//! let summary = build.start(&ctx, &db)?;
63//!
64//! println!("Built {} packages", summary.success_count());
65//! # Ok::<(), anyhow::Error>(())
66//! ```
67
68use crate::scan::ResolvedIndex;
69use crate::scan::ScanFailure;
70use crate::tui::{MultiProgress, format_duration};
71use crate::{Config, RunContext, Sandbox};
72use anyhow::{Context, bail};
73use glob::Pattern;
74use indexmap::IndexMap;
75use pkgsrc::{PkgName, PkgPath};
76use std::collections::{HashMap, HashSet, VecDeque};
77use std::fs::{self, File, OpenOptions};
78use std::path::{Path, PathBuf};
79use std::process::{Command, ExitStatus, Stdio};
80use std::sync::atomic::{AtomicBool, Ordering};
81use std::sync::{Arc, Mutex, mpsc, mpsc::Sender};
82use std::time::{Duration, Instant};
83use tracing::{debug, error, info, trace, warn};
84
85/// Build stages in order of execution.
86#[derive(Debug, Clone, Copy, PartialEq, Eq)]
87enum Stage {
88    PreClean,
89    Depends,
90    Checksum,
91    Configure,
92    Build,
93    Install,
94    Package,
95    Deinstall,
96    Clean,
97}
98
99impl Stage {
100    fn as_str(&self) -> &'static str {
101        match self {
102            Stage::PreClean => "pre-clean",
103            Stage::Depends => "depends",
104            Stage::Checksum => "checksum",
105            Stage::Configure => "configure",
106            Stage::Build => "build",
107            Stage::Install => "install",
108            Stage::Package => "package",
109            Stage::Deinstall => "deinstall",
110            Stage::Clean => "clean",
111        }
112    }
113}
114
115/// Result of a package build.
116#[derive(Debug)]
117enum PkgBuildResult {
118    Success,
119    Failed,
120    Skipped,
121}
122
123/// How to run a command.
124#[derive(Debug, Clone, Copy)]
125enum RunAs {
126    Root,
127    User,
128}
129
130/// Callback for status updates during build.
131trait BuildCallback: Send {
132    fn stage(&mut self, stage: &str);
133}
134
135/// Package builder that executes build stages.
136struct PkgBuilder<'a> {
137    config: &'a Config,
138    sandbox: &'a Sandbox,
139    sandbox_id: usize,
140    pkginfo: &'a ResolvedIndex,
141    logdir: PathBuf,
142    build_user: Option<String>,
143    envs: Vec<(String, String)>,
144    output_tx: Option<Sender<ChannelCommand>>,
145    options: &'a BuildOptions,
146}
147
148impl<'a> PkgBuilder<'a> {
149    fn new(
150        config: &'a Config,
151        sandbox: &'a Sandbox,
152        sandbox_id: usize,
153        pkginfo: &'a ResolvedIndex,
154        envs: Vec<(String, String)>,
155        output_tx: Option<Sender<ChannelCommand>>,
156        options: &'a BuildOptions,
157    ) -> Self {
158        let logdir = config.logdir().join(pkginfo.pkgname.pkgname());
159        let build_user = config.build_user().map(|s| s.to_string());
160        Self {
161            config,
162            sandbox,
163            sandbox_id,
164            pkginfo,
165            logdir,
166            build_user,
167            envs,
168            output_tx,
169            options,
170        }
171    }
172
173    /// Run a command in the sandbox and capture its stdout.
174    fn run_cmd(&self, cmd: &Path, args: &[&str]) -> Option<String> {
175        let mut command = self.sandbox.command(self.sandbox_id, cmd);
176        command.args(args);
177        self.apply_envs(&mut command, &[]);
178        match command.output() {
179            Ok(output) if output.status.success() => {
180                Some(String::from_utf8_lossy(&output.stdout).into_owned())
181            }
182            Ok(output) => {
183                let stderr = String::from_utf8_lossy(&output.stderr);
184                debug!(
185                    cmd = %cmd.display(),
186                    exit_code = ?output.status.code(),
187                    stderr = %stderr.trim(),
188                    "command failed"
189                );
190                None
191            }
192            Err(e) => {
193                debug!(cmd = %cmd.display(), error = %e, "command execution error");
194                None
195            }
196        }
197    }
198
199    /// Check if the package is already up-to-date.
200    fn check_up_to_date(&self) -> bool {
201        let pkgname = self.pkginfo.pkgname.pkgname();
202        let pkgfile =
203            self.config.packages().join("All").join(format!("{}.tgz", pkgname));
204
205        // Check if package file exists
206        if !pkgfile.exists() {
207            debug!(pkgname, path = %pkgfile.display(), "package file not found");
208            return false;
209        }
210
211        let pkgfile_str = pkgfile.to_string_lossy();
212        let pkg_info = self.config.pkgtools().join("pkg_info");
213        let pkg_admin = self.config.pkgtools().join("pkg_admin");
214
215        // Get BUILD_INFO and verify source files
216        let Some(build_info) = self.run_cmd(&pkg_info, &["-qb", &pkgfile_str])
217        else {
218            debug!(pkgname, "pkg_info -qb failed or returned empty");
219            return false;
220        };
221        debug!(
222            pkgname,
223            lines = build_info.lines().count(),
224            "checking BUILD_INFO"
225        );
226
227        for line in build_info.lines() {
228            let Some((file, file_id)) = line.split_once(':') else {
229                continue;
230            };
231            let file_id = file_id.trim();
232            if file.is_empty() || file_id.is_empty() {
233                continue;
234            }
235
236            let src_file = self.config.pkgsrc().join(file);
237            if !src_file.exists() {
238                debug!(pkgname, file, "source file missing");
239                return false;
240            }
241
242            if file_id.starts_with("$NetBSD") {
243                // CVS ID comparison - extract $NetBSD...$ from actual file
244                let Ok(content) = std::fs::read_to_string(&src_file) else {
245                    return false;
246                };
247                let id = content.lines().find_map(|line| {
248                    if let Some(start) = line.find("$NetBSD") {
249                        if let Some(end) = line[start + 1..].find('$') {
250                            return Some(&line[start..start + 1 + end + 1]);
251                        }
252                    }
253                    None
254                });
255                if id != Some(file_id) {
256                    debug!(pkgname, file, "CVS ID mismatch");
257                    return false;
258                }
259            } else {
260                // Hash comparison
261                let src_file_str = src_file.to_string_lossy();
262                let Some(hash) =
263                    self.run_cmd(&pkg_admin, &["digest", &src_file_str])
264                else {
265                    debug!(pkgname, file, "pkg_admin digest failed");
266                    return false;
267                };
268                let hash = hash.trim();
269                if hash != file_id {
270                    debug!(
271                        pkgname,
272                        file,
273                        path = %src_file.display(),
274                        expected = file_id,
275                        actual = hash,
276                        "hash mismatch"
277                    );
278                    return false;
279                }
280            }
281        }
282
283        // Get package dependencies and verify
284        let Some(pkg_deps) = self.run_cmd(&pkg_info, &["-qN", &pkgfile_str])
285        else {
286            return false;
287        };
288
289        // Build sets of recorded vs expected dependencies
290        let recorded_deps: HashSet<&str> = pkg_deps
291            .lines()
292            .map(|l| l.trim())
293            .filter(|l| !l.is_empty())
294            .collect();
295        let expected_deps: HashSet<&str> =
296            self.pkginfo.depends.iter().map(|d| d.pkgname()).collect();
297
298        // If dependency list has changed in any way, rebuild
299        if recorded_deps != expected_deps {
300            debug!(
301                pkgname,
302                recorded = recorded_deps.len(),
303                expected = expected_deps.len(),
304                "dependency list changed"
305            );
306            return false;
307        }
308
309        let pkgfile_mtime = match pkgfile.metadata().and_then(|m| m.modified())
310        {
311            Ok(t) => t,
312            Err(_) => return false,
313        };
314
315        // Check each dependency package exists and is not newer
316        for dep in &recorded_deps {
317            let dep_pkg =
318                self.config.packages().join("All").join(format!("{}.tgz", dep));
319            if !dep_pkg.exists() {
320                debug!(pkgname, dep, "dependency package missing");
321                return false;
322            }
323
324            let dep_mtime = match dep_pkg.metadata().and_then(|m| m.modified())
325            {
326                Ok(t) => t,
327                Err(_) => return false,
328            };
329
330            if dep_mtime > pkgfile_mtime {
331                debug!(pkgname, dep, "dependency is newer");
332                return false;
333            }
334        }
335
336        debug!(pkgname, "package is up-to-date");
337        true
338    }
339
340    /// Run the full build process.
341    fn build<C: BuildCallback>(
342        &self,
343        callback: &mut C,
344    ) -> anyhow::Result<PkgBuildResult> {
345        let pkgname = self.pkginfo.pkgname.pkgname();
346        let Some(pkgpath) = &self.pkginfo.pkg_location else {
347            bail!("Could not get PKGPATH for {}", pkgname);
348        };
349
350        // Check if package is already up-to-date (skip check if force rebuild)
351        if !self.options.force_rebuild && self.check_up_to_date() {
352            return Ok(PkgBuildResult::Skipped);
353        }
354
355        // Clean up and create log directory
356        if self.logdir.exists() {
357            fs::remove_dir_all(&self.logdir)?;
358        }
359        fs::create_dir_all(&self.logdir)?;
360
361        // Create work.log and chown to build_user if set
362        let work_log = self.logdir.join("work.log");
363        File::create(&work_log)?;
364        if let Some(ref user) = self.build_user {
365            let bob_log = File::options()
366                .create(true)
367                .append(true)
368                .open(self.logdir.join("bob.log"))?;
369            let bob_log_err = bob_log.try_clone()?;
370            let _ = Command::new("chown")
371                .arg(user)
372                .arg(&work_log)
373                .stdout(bob_log)
374                .stderr(bob_log_err)
375                .status();
376        }
377
378        let pkgdir = self.config.pkgsrc().join(pkgpath.as_path());
379
380        // Pre-clean
381        callback.stage(Stage::PreClean.as_str());
382        self.run_make_stage(
383            Stage::PreClean,
384            &pkgdir,
385            &["clean"],
386            RunAs::Root,
387            false,
388        )?;
389
390        // Install dependencies
391        if !self.pkginfo.depends.is_empty() {
392            callback.stage(Stage::Depends.as_str());
393            let _ = self.write_stage(Stage::Depends);
394            if !self.install_dependencies()? {
395                return Ok(PkgBuildResult::Failed);
396            }
397        }
398
399        // Checksum
400        callback.stage(Stage::Checksum.as_str());
401        if !self.run_make_stage(
402            Stage::Checksum,
403            &pkgdir,
404            &["checksum"],
405            RunAs::Root,
406            true,
407        )? {
408            return Ok(PkgBuildResult::Failed);
409        }
410
411        // Configure
412        callback.stage(Stage::Configure.as_str());
413        let configure_log = self.logdir.join("configure.log");
414        if !self.run_usergroup_if_needed(
415            Stage::Configure,
416            &pkgdir,
417            &configure_log,
418        )? {
419            return Ok(PkgBuildResult::Failed);
420        }
421        if !self.run_make_stage(
422            Stage::Configure,
423            &pkgdir,
424            &["configure"],
425            self.build_run_as(),
426            true,
427        )? {
428            return Ok(PkgBuildResult::Failed);
429        }
430
431        // Build
432        callback.stage(Stage::Build.as_str());
433        let build_log = self.logdir.join("build.log");
434        if !self.run_usergroup_if_needed(Stage::Build, &pkgdir, &build_log)? {
435            return Ok(PkgBuildResult::Failed);
436        }
437        if !self.run_make_stage(
438            Stage::Build,
439            &pkgdir,
440            &["all"],
441            self.build_run_as(),
442            true,
443        )? {
444            return Ok(PkgBuildResult::Failed);
445        }
446
447        // Install
448        callback.stage(Stage::Install.as_str());
449        let install_log = self.logdir.join("install.log");
450        if !self.run_usergroup_if_needed(
451            Stage::Install,
452            &pkgdir,
453            &install_log,
454        )? {
455            return Ok(PkgBuildResult::Failed);
456        }
457        if !self.run_make_stage(
458            Stage::Install,
459            &pkgdir,
460            &["stage-install"],
461            self.build_run_as(),
462            true,
463        )? {
464            return Ok(PkgBuildResult::Failed);
465        }
466
467        // Package
468        callback.stage(Stage::Package.as_str());
469        if !self.run_make_stage(
470            Stage::Package,
471            &pkgdir,
472            &["stage-package-create"],
473            RunAs::Root,
474            true,
475        )? {
476            return Ok(PkgBuildResult::Failed);
477        }
478
479        // Get the package file path
480        let pkgfile = self.get_make_var(&pkgdir, "STAGE_PKGFILE")?;
481
482        // Test package install (unless bootstrap package)
483        let is_bootstrap = self.pkginfo.bootstrap_pkg.as_deref() == Some("yes");
484        if !is_bootstrap {
485            if !self.pkg_add(&pkgfile)? {
486                return Ok(PkgBuildResult::Failed);
487            }
488
489            // Test package deinstall
490            callback.stage(Stage::Deinstall.as_str());
491            let _ = self.write_stage(Stage::Deinstall);
492            if !self.pkg_delete(pkgname)? {
493                return Ok(PkgBuildResult::Failed);
494            }
495        }
496
497        // Save package to packages directory
498        let packages_dir = self.config.packages().join("All");
499        fs::create_dir_all(&packages_dir)?;
500        let dest = packages_dir.join(
501            Path::new(&pkgfile)
502                .file_name()
503                .context("Invalid package file path")?,
504        );
505        // pkgfile is a path inside the sandbox; prepend sandbox path for host access
506        let host_pkgfile = if self.sandbox.enabled() {
507            self.sandbox
508                .path(self.sandbox_id)
509                .join(pkgfile.trim_start_matches('/'))
510        } else {
511            PathBuf::from(&pkgfile)
512        };
513        fs::copy(&host_pkgfile, &dest)?;
514
515        // Clean
516        callback.stage(Stage::Clean.as_str());
517        let _ = self.run_make_stage(
518            Stage::Clean,
519            &pkgdir,
520            &["clean"],
521            RunAs::Root,
522            false,
523        );
524
525        // Remove log directory on success
526        let _ = fs::remove_dir_all(&self.logdir);
527
528        Ok(PkgBuildResult::Success)
529    }
530
531    /// Determine how to run build commands.
532    fn build_run_as(&self) -> RunAs {
533        if self.build_user.is_some() { RunAs::User } else { RunAs::Root }
534    }
535
536    /// Write the current stage to a .stage file.
537    fn write_stage(&self, stage: Stage) -> anyhow::Result<()> {
538        let stage_file = self.logdir.join(".stage");
539        fs::write(&stage_file, stage.as_str())?;
540        Ok(())
541    }
542
543    /// Run a make stage with output logging.
544    fn run_make_stage(
545        &self,
546        stage: Stage,
547        pkgdir: &Path,
548        targets: &[&str],
549        run_as: RunAs,
550        include_make_flags: bool,
551    ) -> anyhow::Result<bool> {
552        // Write stage to .stage file
553        let _ = self.write_stage(stage);
554
555        let logfile = self.logdir.join(format!("{}.log", stage.as_str()));
556        let work_log = self.logdir.join("work.log");
557
558        let owned_args =
559            self.make_args(pkgdir, targets, include_make_flags, &work_log);
560
561        // Convert to slice of &str for the command
562        let args: Vec<&str> = owned_args.iter().map(|s| s.as_str()).collect();
563
564        debug!(stage = stage.as_str(), targets = ?targets, "Running make stage");
565
566        let status = self.run_command_logged(
567            self.config.make(),
568            &args,
569            run_as,
570            &logfile,
571        )?;
572
573        Ok(status.success())
574    }
575
576    /// Run a command with output logged to a file.
577    fn run_command_logged(
578        &self,
579        cmd: &Path,
580        args: &[&str],
581        run_as: RunAs,
582        logfile: &Path,
583    ) -> anyhow::Result<ExitStatus> {
584        self.run_command_logged_with_env(cmd, args, run_as, logfile, &[])
585    }
586
587    fn run_command_logged_with_env(
588        &self,
589        cmd: &Path,
590        args: &[&str],
591        run_as: RunAs,
592        logfile: &Path,
593        extra_envs: &[(&str, &str)],
594    ) -> anyhow::Result<ExitStatus> {
595        use std::io::{BufRead, BufReader, Write};
596
597        let mut log =
598            OpenOptions::new().create(true).append(true).open(logfile)?;
599
600        // Write command being executed to the log file
601        let _ = writeln!(log, "=> {:?} {:?}", cmd, args);
602        let _ = log.flush();
603
604        // Use tee-style pipe handling when output_tx is available for live view.
605        // Otherwise use direct file redirection.
606        if let Some(ref output_tx) = self.output_tx {
607            // Wrap command in shell to merge stdout/stderr with 2>&1, like the
608            // shell script's run_log function does.
609            let shell_cmd =
610                self.build_shell_command(cmd, args, run_as, extra_envs);
611            let mut child = self
612                .sandbox
613                .command(self.sandbox_id, Path::new("/bin/sh"))
614                .arg("-c")
615                .arg(&shell_cmd)
616                .stdout(Stdio::piped())
617                .stderr(Stdio::null())
618                .spawn()
619                .context("Failed to spawn shell command")?;
620
621            let stdout = child.stdout.take().unwrap();
622            let output_tx = output_tx.clone();
623            let sandbox_id = self.sandbox_id;
624
625            // Spawn thread to read from pipe and tee to file + output channel.
626            // Batch lines and throttle sends to reduce channel overhead.
627            let tee_handle = std::thread::spawn(move || {
628                let mut reader = BufReader::new(stdout);
629                let mut buf = Vec::new();
630                let mut batch = Vec::with_capacity(50);
631                let mut last_send = Instant::now();
632                let send_interval = Duration::from_millis(100);
633
634                loop {
635                    buf.clear();
636                    match reader.read_until(b'\n', &mut buf) {
637                        Ok(0) => break,
638                        Ok(_) => {}
639                        Err(_) => break,
640                    };
641                    // Write raw bytes to log file to preserve original output
642                    let _ = log.write_all(&buf);
643                    // Convert to lossy UTF-8 for live view
644                    let line = String::from_utf8_lossy(&buf);
645                    let line = line.trim_end_matches('\n').to_string();
646                    batch.push(line);
647
648                    // Send batch if interval elapsed or batch is large
649                    if last_send.elapsed() >= send_interval || batch.len() >= 50
650                    {
651                        let _ = output_tx.send(ChannelCommand::OutputLines(
652                            sandbox_id,
653                            std::mem::take(&mut batch),
654                        ));
655                        last_send = Instant::now();
656                    }
657                }
658
659                // Send remaining lines
660                if !batch.is_empty() {
661                    let _ = output_tx
662                        .send(ChannelCommand::OutputLines(sandbox_id, batch));
663                }
664            });
665
666            // Wait for command to exit
667            let status = child.wait()?;
668
669            // Reader thread will exit when pipe closes (process exits)
670            let _ = tee_handle.join();
671
672            trace!(cmd = ?cmd, status = ?status, "Command completed");
673            Ok(status)
674        } else {
675            let status =
676                self.spawn_command_to_file(cmd, args, run_as, extra_envs, log)?;
677            trace!(cmd = ?cmd, status = ?status, "Command completed");
678            Ok(status)
679        }
680    }
681
682    /// Spawn a command with stdout/stderr redirected to a file.
683    fn spawn_command_to_file(
684        &self,
685        cmd: &Path,
686        args: &[&str],
687        run_as: RunAs,
688        extra_envs: &[(&str, &str)],
689        log: File,
690    ) -> anyhow::Result<ExitStatus> {
691        // Clone file handle for stderr (stdout and stderr both go to same file)
692        let log_err = log.try_clone()?;
693
694        match run_as {
695            RunAs::Root => {
696                let mut command = self.sandbox.command(self.sandbox_id, cmd);
697                command.args(args);
698                self.apply_envs(&mut command, extra_envs);
699                command
700                    .stdout(Stdio::from(log))
701                    .stderr(Stdio::from(log_err))
702                    .status()
703                    .with_context(|| format!("Failed to run {}", cmd.display()))
704            }
705            RunAs::User => {
706                let user = self.build_user.as_ref().unwrap();
707                let mut parts = Vec::with_capacity(args.len() + 1);
708                parts.push(cmd.display().to_string());
709                parts.extend(args.iter().map(|arg| arg.to_string()));
710                let inner_cmd = parts
711                    .iter()
712                    .map(|part| Self::shell_escape(part))
713                    .collect::<Vec<_>>()
714                    .join(" ");
715                let mut command =
716                    self.sandbox.command(self.sandbox_id, Path::new("su"));
717                command.arg(user).arg("-c").arg(&inner_cmd);
718                self.apply_envs(&mut command, extra_envs);
719                command
720                    .stdout(Stdio::from(log))
721                    .stderr(Stdio::from(log_err))
722                    .status()
723                    .context("Failed to run su command")
724            }
725        }
726    }
727
728    /// Get a make variable value.
729    fn get_make_var(
730        &self,
731        pkgdir: &Path,
732        varname: &str,
733    ) -> anyhow::Result<String> {
734        let mut cmd = self.sandbox.command(self.sandbox_id, self.config.make());
735        self.apply_envs(&mut cmd, &[]);
736
737        let work_log = self.logdir.join("work.log");
738        let make_args = self.make_args(
739            pkgdir,
740            &["show-var", &format!("VARNAME={}", varname)],
741            true,
742            &work_log,
743        );
744
745        let bob_log = File::options()
746            .create(true)
747            .append(true)
748            .open(self.logdir.join("bob.log"))?;
749        let output =
750            cmd.args(&make_args).stderr(Stdio::from(bob_log)).output()?;
751
752        if !output.status.success() {
753            bail!("Failed to get make variable {}", varname);
754        }
755
756        Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
757    }
758
759    /// Install package dependencies.
760    fn install_dependencies(&self) -> anyhow::Result<bool> {
761        let deps: Vec<String> =
762            self.pkginfo.depends.iter().map(|d| d.to_string()).collect();
763
764        let pkg_path = self.config.packages().join("All");
765        let logfile = self.logdir.join("depends.log");
766
767        let mut args = vec![];
768        for dep in &deps {
769            args.push(dep.as_str());
770        }
771
772        let status = self.run_pkg_add_with_path(&args, &pkg_path, &logfile)?;
773        Ok(status.success())
774    }
775
776    /// Run pkg_add with PKG_PATH set.
777    fn run_pkg_add_with_path(
778        &self,
779        packages: &[&str],
780        pkg_path: &Path,
781        logfile: &Path,
782    ) -> anyhow::Result<ExitStatus> {
783        let pkg_add = self.config.pkgtools().join("pkg_add");
784        let pkg_path_value = pkg_path.to_string_lossy().to_string();
785        let extra_envs = [("PKG_PATH", pkg_path_value.as_str())];
786
787        self.run_command_logged_with_env(
788            &pkg_add,
789            packages,
790            RunAs::Root,
791            logfile,
792            &extra_envs,
793        )
794    }
795
796    /// Install a package file.
797    fn pkg_add(&self, pkgfile: &str) -> anyhow::Result<bool> {
798        let pkg_add = self.config.pkgtools().join("pkg_add");
799        let logfile = self.logdir.join("package.log");
800
801        let status = self.run_command_logged(
802            &pkg_add,
803            &[pkgfile],
804            RunAs::Root,
805            &logfile,
806        )?;
807
808        Ok(status.success())
809    }
810
811    /// Delete an installed package.
812    fn pkg_delete(&self, pkgname: &str) -> anyhow::Result<bool> {
813        let pkg_delete = self.config.pkgtools().join("pkg_delete");
814        let logfile = self.logdir.join("deinstall.log");
815
816        let status = self.run_command_logged(
817            &pkg_delete,
818            &[pkgname],
819            RunAs::Root,
820            &logfile,
821        )?;
822
823        Ok(status.success())
824    }
825
826    /// Run create-usergroup if needed based on usergroup_phase.
827    fn run_usergroup_if_needed(
828        &self,
829        stage: Stage,
830        pkgdir: &Path,
831        logfile: &Path,
832    ) -> anyhow::Result<bool> {
833        let usergroup_phase =
834            self.pkginfo.usergroup_phase.as_deref().unwrap_or("");
835
836        let should_run = match stage {
837            Stage::Configure => usergroup_phase.ends_with("configure"),
838            Stage::Build => usergroup_phase.ends_with("build"),
839            Stage::Install => usergroup_phase == "pre-install",
840            _ => false,
841        };
842
843        if !should_run {
844            return Ok(true);
845        }
846
847        let mut args = vec!["-C", pkgdir.to_str().unwrap(), "create-usergroup"];
848        if stage == Stage::Configure {
849            args.push("clean");
850        }
851
852        let status = self.run_command_logged(
853            self.config.make(),
854            &args,
855            RunAs::Root,
856            logfile,
857        )?;
858        Ok(status.success())
859    }
860
861    fn make_args(
862        &self,
863        pkgdir: &Path,
864        targets: &[&str],
865        include_make_flags: bool,
866        work_log: &Path,
867    ) -> Vec<String> {
868        let mut owned_args: Vec<String> =
869            vec!["-C".to_string(), pkgdir.to_str().unwrap().to_string()];
870        owned_args.extend(targets.iter().map(|s| s.to_string()));
871
872        if include_make_flags {
873            owned_args.push("BATCH=1".to_string());
874            owned_args.push("DEPENDS_TARGET=/nonexistent".to_string());
875
876            if let Some(ref multi_version) = self.pkginfo.multi_version {
877                for flag in multi_version {
878                    owned_args.push(flag.clone());
879                }
880            }
881
882            owned_args.push(format!("WRKLOG={}", work_log.display()));
883        }
884
885        owned_args
886    }
887
888    fn apply_envs(&self, cmd: &mut Command, extra_envs: &[(&str, &str)]) {
889        for (key, value) in &self.envs {
890            cmd.env(key, value);
891        }
892        for (key, value) in extra_envs {
893            cmd.env(key, value);
894        }
895    }
896
897    fn shell_escape(value: &str) -> String {
898        if value.is_empty() {
899            return "''".to_string();
900        }
901        if value
902            .chars()
903            .all(|c| c.is_ascii_alphanumeric() || "-_.,/:=+@".contains(c))
904        {
905            return value.to_string();
906        }
907        let escaped = value.replace('\'', "'\\''");
908        format!("'{}'", escaped)
909    }
910
911    /// Build a shell command string with environment, run_as handling, and 2>&1.
912    fn build_shell_command(
913        &self,
914        cmd: &Path,
915        args: &[&str],
916        run_as: RunAs,
917        extra_envs: &[(&str, &str)],
918    ) -> String {
919        let mut parts = Vec::new();
920
921        // Add environment variables
922        for (key, value) in &self.envs {
923            parts.push(format!("{}={}", key, Self::shell_escape(value)));
924        }
925        for (key, value) in extra_envs {
926            parts.push(format!("{}={}", key, Self::shell_escape(value)));
927        }
928
929        // Build the actual command
930        let cmd_str = Self::shell_escape(&cmd.to_string_lossy());
931        let args_str: Vec<String> =
932            args.iter().map(|a| Self::shell_escape(a)).collect();
933
934        match run_as {
935            RunAs::Root => {
936                parts.push(cmd_str);
937                parts.extend(args_str);
938            }
939            RunAs::User => {
940                let user = self.build_user.as_ref().unwrap();
941                let inner_cmd = std::iter::once(cmd_str)
942                    .chain(args_str)
943                    .collect::<Vec<_>>()
944                    .join(" ");
945                parts.push("su".to_string());
946                parts.push(Self::shell_escape(user));
947                parts.push("-c".to_string());
948                parts.push(Self::shell_escape(&inner_cmd));
949            }
950        }
951
952        // Merge stdout/stderr
953        parts.push("2>&1".to_string());
954        parts.join(" ")
955    }
956}
957
958/// Callback adapter that sends build updates through a channel.
959struct ChannelCallback<'a> {
960    sandbox_id: usize,
961    status_tx: &'a Sender<ChannelCommand>,
962}
963
964impl<'a> ChannelCallback<'a> {
965    fn new(sandbox_id: usize, status_tx: &'a Sender<ChannelCommand>) -> Self {
966        Self { sandbox_id, status_tx }
967    }
968}
969
970impl<'a> BuildCallback for ChannelCallback<'a> {
971    fn stage(&mut self, stage: &str) {
972        let _ = self.status_tx.send(ChannelCommand::StageUpdate(
973            self.sandbox_id,
974            Some(stage.to_string()),
975        ));
976    }
977}
978
979/// Outcome of a package build attempt.
980///
981/// Used in [`BuildResult`] to indicate whether the build succeeded, failed,
982/// or was skipped.
983#[derive(Clone, Debug, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
984pub enum BuildOutcome {
985    /// Package built and packaged successfully.
986    Success,
987    /// Package build failed.
988    ///
989    /// The string contains the failure reason (e.g., "Failed in build phase").
990    Failed(String),
991    /// Package did not need to be built - we already have a binary package
992    /// for this revision.
993    UpToDate,
994    /// Package is marked with PKG_SKIP_REASON or PKG_FAIL_REASON so cannot
995    /// be built.
996    ///
997    /// The string contains the skip/fail reason.
998    PreFailed(String),
999    /// Package depends on a different package that has Failed.
1000    ///
1001    /// The string contains the name of the failed dependency.
1002    IndirectFailed(String),
1003    /// Package depends on a different package that has PreFailed.
1004    ///
1005    /// The string contains the name of the pre-failed dependency.
1006    IndirectPreFailed(String),
1007}
1008
1009/// Result of building a single package.
1010///
1011/// Contains the outcome, timing, and log location for a package build.
1012#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
1013pub struct BuildResult {
1014    /// Package name with version (e.g., `mutt-2.2.12`).
1015    pub pkgname: PkgName,
1016    /// Package path in pkgsrc (e.g., `mail/mutt`).
1017    pub pkgpath: Option<PkgPath>,
1018    /// Build outcome (success, failure, or skipped).
1019    pub outcome: BuildOutcome,
1020    /// Time spent building this package.
1021    pub duration: Duration,
1022    /// Path to build logs directory, if available.
1023    ///
1024    /// For failed builds, this contains `pre-clean.log`, `build.log`, etc.
1025    /// Successful builds clean up their log directories.
1026    pub log_dir: Option<PathBuf>,
1027}
1028
1029/// Summary of an entire build run.
1030///
1031/// Contains timing information and results for all packages.
1032///
1033/// # Example
1034///
1035/// ```no_run
1036/// # use bob::BuildSummary;
1037/// # fn example(summary: &BuildSummary) {
1038/// println!("Succeeded: {}", summary.success_count());
1039/// println!("Failed: {}", summary.failed_count());
1040/// println!("Up-to-date: {}", summary.up_to_date_count());
1041/// println!("Duration: {:?}", summary.duration);
1042///
1043/// for result in summary.failed() {
1044///     println!("  {} failed", result.pkgname.pkgname());
1045/// }
1046/// # }
1047/// ```
1048#[derive(Clone, Debug)]
1049pub struct BuildSummary {
1050    /// Total duration of the build run.
1051    pub duration: Duration,
1052    /// Results for each package.
1053    pub results: Vec<BuildResult>,
1054    /// Packages that failed to scan (bmake pbulk-index failed).
1055    pub scan_failed: Vec<ScanFailure>,
1056}
1057
1058impl BuildSummary {
1059    /// Count of successfully built packages.
1060    pub fn success_count(&self) -> usize {
1061        self.results
1062            .iter()
1063            .filter(|r| matches!(r.outcome, BuildOutcome::Success))
1064            .count()
1065    }
1066
1067    /// Count of failed packages (direct build failures only).
1068    pub fn failed_count(&self) -> usize {
1069        self.results
1070            .iter()
1071            .filter(|r| matches!(r.outcome, BuildOutcome::Failed(_)))
1072            .count()
1073    }
1074
1075    /// Count of up-to-date packages (already have binary package).
1076    pub fn up_to_date_count(&self) -> usize {
1077        self.results
1078            .iter()
1079            .filter(|r| matches!(r.outcome, BuildOutcome::UpToDate))
1080            .count()
1081    }
1082
1083    /// Count of pre-failed packages (PKG_SKIP_REASON/PKG_FAIL_REASON).
1084    pub fn prefailed_count(&self) -> usize {
1085        self.results
1086            .iter()
1087            .filter(|r| matches!(r.outcome, BuildOutcome::PreFailed(_)))
1088            .count()
1089    }
1090
1091    /// Count of indirect failed packages (depend on Failed).
1092    pub fn indirect_failed_count(&self) -> usize {
1093        self.results
1094            .iter()
1095            .filter(|r| matches!(r.outcome, BuildOutcome::IndirectFailed(_)))
1096            .count()
1097    }
1098
1099    /// Count of indirect pre-failed packages (depend on PreFailed).
1100    pub fn indirect_prefailed_count(&self) -> usize {
1101        self.results
1102            .iter()
1103            .filter(|r| matches!(r.outcome, BuildOutcome::IndirectPreFailed(_)))
1104            .count()
1105    }
1106
1107    /// Count of packages that failed to scan.
1108    pub fn scan_failed_count(&self) -> usize {
1109        self.scan_failed.len()
1110    }
1111
1112    /// Get all failed results (direct build failures only).
1113    pub fn failed(&self) -> Vec<&BuildResult> {
1114        self.results
1115            .iter()
1116            .filter(|r| matches!(r.outcome, BuildOutcome::Failed(_)))
1117            .collect()
1118    }
1119
1120    /// Get all successful results.
1121    pub fn succeeded(&self) -> Vec<&BuildResult> {
1122        self.results
1123            .iter()
1124            .filter(|r| matches!(r.outcome, BuildOutcome::Success))
1125            .collect()
1126    }
1127
1128    /// Get all up-to-date results.
1129    pub fn up_to_date(&self) -> Vec<&BuildResult> {
1130        self.results
1131            .iter()
1132            .filter(|r| matches!(r.outcome, BuildOutcome::UpToDate))
1133            .collect()
1134    }
1135
1136    /// Get all pre-failed results.
1137    pub fn prefailed(&self) -> Vec<&BuildResult> {
1138        self.results
1139            .iter()
1140            .filter(|r| matches!(r.outcome, BuildOutcome::PreFailed(_)))
1141            .collect()
1142    }
1143
1144    /// Get all indirect failed results.
1145    pub fn indirect_failed(&self) -> Vec<&BuildResult> {
1146        self.results
1147            .iter()
1148            .filter(|r| matches!(r.outcome, BuildOutcome::IndirectFailed(_)))
1149            .collect()
1150    }
1151
1152    /// Get all indirect pre-failed results.
1153    pub fn indirect_prefailed(&self) -> Vec<&BuildResult> {
1154        self.results
1155            .iter()
1156            .filter(|r| matches!(r.outcome, BuildOutcome::IndirectPreFailed(_)))
1157            .collect()
1158    }
1159}
1160
1161/// Options that control build behavior.
1162#[derive(Clone, Debug, Default)]
1163pub struct BuildOptions {
1164    /// Force rebuild even if package is up-to-date.
1165    pub force_rebuild: bool,
1166}
1167
1168#[derive(Debug, Default)]
1169pub struct Build {
1170    /// Parsed [`Config`].
1171    config: Config,
1172    /// [`Sandbox`] configuration.
1173    sandbox: Sandbox,
1174    /// List of packages to build, as input from Scan::resolve.
1175    scanpkgs: IndexMap<PkgName, ResolvedIndex>,
1176    /// Cached build results from previous run.
1177    cached: IndexMap<PkgName, BuildResult>,
1178    /// Build options.
1179    options: BuildOptions,
1180}
1181
1182#[derive(Debug)]
1183struct PackageBuild {
1184    id: usize,
1185    config: Config,
1186    pkginfo: ResolvedIndex,
1187    sandbox: Sandbox,
1188    options: BuildOptions,
1189}
1190
1191/// Helper for querying bmake variables with the correct environment.
1192struct MakeQuery<'a> {
1193    config: &'a Config,
1194    sandbox: &'a Sandbox,
1195    sandbox_id: usize,
1196    pkgpath: &'a PkgPath,
1197    env: &'a HashMap<String, String>,
1198}
1199
1200impl<'a> MakeQuery<'a> {
1201    fn new(
1202        config: &'a Config,
1203        sandbox: &'a Sandbox,
1204        sandbox_id: usize,
1205        pkgpath: &'a PkgPath,
1206        env: &'a HashMap<String, String>,
1207    ) -> Self {
1208        Self { config, sandbox, sandbox_id, pkgpath, env }
1209    }
1210
1211    /// Query a bmake variable value.
1212    fn var(&self, name: &str) -> Option<String> {
1213        let pkgdir = self.config.pkgsrc().join(self.pkgpath.as_path());
1214
1215        let mut cmd = self.sandbox.command(self.sandbox_id, self.config.make());
1216        cmd.arg("-C")
1217            .arg(&pkgdir)
1218            .arg("show-var")
1219            .arg(format!("VARNAME={}", name));
1220
1221        // Pass env vars that may affect the variable value
1222        for (key, value) in self.env {
1223            cmd.env(key, value);
1224        }
1225
1226        cmd.stderr(Stdio::null());
1227
1228        let output = cmd.output().ok()?;
1229
1230        if !output.status.success() {
1231            return None;
1232        }
1233
1234        let value = String::from_utf8_lossy(&output.stdout).trim().to_string();
1235
1236        if value.is_empty() { None } else { Some(value) }
1237    }
1238
1239    /// Query a bmake variable and return as PathBuf.
1240    fn var_path(&self, name: &str) -> Option<PathBuf> {
1241        self.var(name).map(PathBuf::from)
1242    }
1243
1244    /// Get the WRKDIR for this package.
1245    fn wrkdir(&self) -> Option<PathBuf> {
1246        self.var_path("WRKDIR")
1247    }
1248
1249    /// Get the WRKSRC for this package.
1250    #[allow(dead_code)]
1251    fn wrksrc(&self) -> Option<PathBuf> {
1252        self.var_path("WRKSRC")
1253    }
1254
1255    /// Get the DESTDIR for this package.
1256    #[allow(dead_code)]
1257    fn destdir(&self) -> Option<PathBuf> {
1258        self.var_path("DESTDIR")
1259    }
1260
1261    /// Get the PREFIX for this package.
1262    #[allow(dead_code)]
1263    fn prefix(&self) -> Option<PathBuf> {
1264        self.var_path("PREFIX")
1265    }
1266
1267    /// Resolve a path to its actual location on the host filesystem.
1268    /// If sandboxed, prepends the sandbox root path.
1269    fn resolve_path(&self, path: &Path) -> PathBuf {
1270        if self.sandbox.enabled() {
1271            self.sandbox
1272                .path(self.sandbox_id)
1273                .join(path.strip_prefix("/").unwrap_or(path))
1274        } else {
1275            path.to_path_buf()
1276        }
1277    }
1278}
1279
1280/// Result of a single package build attempt.
1281#[derive(Debug)]
1282enum PackageBuildResult {
1283    /// Build succeeded
1284    Success,
1285    /// Build failed
1286    Failed,
1287    /// Package was up-to-date, skipped
1288    Skipped,
1289}
1290
1291impl PackageBuild {
1292    fn build(
1293        &self,
1294        status_tx: &Sender<ChannelCommand>,
1295    ) -> anyhow::Result<PackageBuildResult> {
1296        let pkgname = self.pkginfo.pkgname.pkgname();
1297        info!(pkgname = %pkgname,
1298            sandbox_id = self.id,
1299            "Starting package build"
1300        );
1301
1302        let Some(pkgpath) = &self.pkginfo.pkg_location else {
1303            error!(pkgname = %pkgname, "Could not get PKGPATH for package");
1304            bail!("Could not get PKGPATH for {}", pkgname);
1305        };
1306
1307        let logdir = self.config.logdir();
1308
1309        // Get env vars from Lua config for wrkdir saving and build environment
1310        let pkg_env = match self.config.get_pkg_env(&self.pkginfo) {
1311            Ok(env) => env,
1312            Err(e) => {
1313                error!(pkgname = %pkgname, error = %e, "Failed to get env from Lua config");
1314                HashMap::new()
1315            }
1316        };
1317
1318        let mut envs = self.config.script_env();
1319        for (key, value) in &pkg_env {
1320            envs.push((key.clone(), value.clone()));
1321        }
1322
1323        let patterns = self.config.save_wrkdir_patterns();
1324
1325        // Run pre-build script if defined (always runs)
1326        if let Some(pre_build) = self.config.script("pre-build") {
1327            debug!(pkgname = %pkgname, "Running pre-build script");
1328            let child = self.sandbox.execute(
1329                self.id,
1330                pre_build,
1331                envs.clone(),
1332                None,
1333                None,
1334            )?;
1335            let output = child
1336                .wait_with_output()
1337                .context("Failed to wait for pre-build")?;
1338            if !output.status.success() {
1339                warn!(pkgname = %pkgname, exit_code = ?output.status.code(), "pre-build script failed");
1340            }
1341        }
1342
1343        // Run the build using PkgBuilder
1344        let builder = PkgBuilder::new(
1345            &self.config,
1346            &self.sandbox,
1347            self.id,
1348            &self.pkginfo,
1349            envs.clone(),
1350            Some(status_tx.clone()),
1351            &self.options,
1352        );
1353
1354        let mut callback = ChannelCallback::new(self.id, status_tx);
1355        let result = builder.build(&mut callback);
1356
1357        // Clear stage display
1358        let _ = status_tx.send(ChannelCommand::StageUpdate(self.id, None));
1359
1360        let result = match &result {
1361            Ok(PkgBuildResult::Success) => {
1362                info!(pkgname = %pkgname, "package build completed successfully");
1363                PackageBuildResult::Success
1364            }
1365            Ok(PkgBuildResult::Skipped) => {
1366                info!(pkgname = %pkgname, "package build skipped (up-to-date)");
1367                PackageBuildResult::Skipped
1368            }
1369            Ok(PkgBuildResult::Failed) => {
1370                error!(pkgname = %pkgname, "package build failed");
1371                // Show cleanup stage to user
1372                let _ = status_tx.send(ChannelCommand::StageUpdate(
1373                    self.id,
1374                    Some("cleanup".to_string()),
1375                ));
1376                // Kill any orphaned processes in the sandbox before cleanup.
1377                // Failed builds may leave processes running that would block
1378                // subsequent commands like bmake show-var or bmake clean.
1379                debug!(pkgname = %pkgname, "Calling kill_processes_by_id");
1380                let kill_start = Instant::now();
1381                self.sandbox.kill_processes_by_id(self.id);
1382                debug!(pkgname = %pkgname, elapsed_ms = kill_start.elapsed().as_millis(), "kill_processes_by_id completed");
1383                // Save wrkdir files matching configured patterns, then clean up
1384                if !patterns.is_empty() {
1385                    debug!(pkgname = %pkgname, "Calling save_wrkdir_files");
1386                    let save_start = Instant::now();
1387                    self.save_wrkdir_files(
1388                        pkgname, pkgpath, logdir, patterns, &pkg_env,
1389                    );
1390                    debug!(pkgname = %pkgname, elapsed_ms = save_start.elapsed().as_millis(), "save_wrkdir_files completed");
1391                    debug!(pkgname = %pkgname, "Calling run_clean");
1392                    let clean_start = Instant::now();
1393                    self.run_clean(pkgpath, &envs);
1394                    debug!(pkgname = %pkgname, elapsed_ms = clean_start.elapsed().as_millis(), "run_clean completed");
1395                } else {
1396                    debug!(pkgname = %pkgname, "Calling run_clean (no patterns)");
1397                    let clean_start = Instant::now();
1398                    self.run_clean(pkgpath, &envs);
1399                    debug!(pkgname = %pkgname, elapsed_ms = clean_start.elapsed().as_millis(), "run_clean completed");
1400                }
1401                PackageBuildResult::Failed
1402            }
1403            Err(e) => {
1404                error!(pkgname = %pkgname, error = %e, "package build error");
1405                // Show cleanup stage to user
1406                let _ = status_tx.send(ChannelCommand::StageUpdate(
1407                    self.id,
1408                    Some("cleanup".to_string()),
1409                ));
1410                // Kill any orphaned processes in the sandbox before cleanup.
1411                // Failed builds may leave processes running that would block
1412                // subsequent commands like bmake show-var or bmake clean.
1413                debug!(pkgname = %pkgname, "Calling kill_processes_by_id");
1414                let kill_start = Instant::now();
1415                self.sandbox.kill_processes_by_id(self.id);
1416                debug!(pkgname = %pkgname, elapsed_ms = kill_start.elapsed().as_millis(), "kill_processes_by_id completed");
1417                // Save wrkdir files matching configured patterns, then clean up
1418                if !patterns.is_empty() {
1419                    debug!(pkgname = %pkgname, "Calling save_wrkdir_files");
1420                    let save_start = Instant::now();
1421                    self.save_wrkdir_files(
1422                        pkgname, pkgpath, logdir, patterns, &pkg_env,
1423                    );
1424                    debug!(pkgname = %pkgname, elapsed_ms = save_start.elapsed().as_millis(), "save_wrkdir_files completed");
1425                    debug!(pkgname = %pkgname, "Calling run_clean");
1426                    let clean_start = Instant::now();
1427                    self.run_clean(pkgpath, &envs);
1428                    debug!(pkgname = %pkgname, elapsed_ms = clean_start.elapsed().as_millis(), "run_clean completed");
1429                } else {
1430                    debug!(pkgname = %pkgname, "Calling run_clean (no patterns)");
1431                    let clean_start = Instant::now();
1432                    self.run_clean(pkgpath, &envs);
1433                    debug!(pkgname = %pkgname, elapsed_ms = clean_start.elapsed().as_millis(), "run_clean completed");
1434                }
1435                PackageBuildResult::Failed
1436            }
1437        };
1438
1439        // Run post-build script if defined (always runs regardless of result)
1440        if let Some(post_build) = self.config.script("post-build") {
1441            debug!(pkgname = %pkgname, script = %post_build.display(), "Running post-build script");
1442            match self.sandbox.execute(self.id, post_build, envs, None, None) {
1443                Ok(child) => {
1444                    debug!(pkgname = %pkgname, pid = ?child.id(), "post-build spawned, waiting");
1445                    match child.wait_with_output() {
1446                        Ok(output) => {
1447                            debug!(pkgname = %pkgname, exit_code = ?output.status.code(), "post-build completed");
1448                            if !output.status.success() {
1449                                warn!(pkgname = %pkgname, exit_code = ?output.status.code(), "post-build script failed");
1450                            }
1451                        }
1452                        Err(e) => {
1453                            warn!(pkgname = %pkgname, error = %e, "Failed to wait for post-build");
1454                        }
1455                    }
1456                }
1457                Err(e) => {
1458                    warn!(pkgname = %pkgname, error = %e, "Failed to spawn post-build script");
1459                }
1460            }
1461        }
1462
1463        Ok(result)
1464    }
1465
1466    /// Save files matching patterns from WRKDIR to logdir on build failure.
1467    fn save_wrkdir_files(
1468        &self,
1469        pkgname: &str,
1470        pkgpath: &PkgPath,
1471        logdir: &Path,
1472        patterns: &[String],
1473        pkg_env: &HashMap<String, String>,
1474    ) {
1475        let make = MakeQuery::new(
1476            &self.config,
1477            &self.sandbox,
1478            self.id,
1479            pkgpath,
1480            pkg_env,
1481        );
1482
1483        // Get WRKDIR
1484        let wrkdir = match make.wrkdir() {
1485            Some(w) => w,
1486            None => {
1487                debug!(pkgname = %pkgname, "Could not determine WRKDIR, skipping file save");
1488                return;
1489            }
1490        };
1491
1492        // Resolve to actual filesystem path
1493        let wrkdir_path = make.resolve_path(&wrkdir);
1494
1495        if !wrkdir_path.exists() {
1496            debug!(pkgname = %pkgname,
1497                wrkdir = %wrkdir_path.display(),
1498                "WRKDIR does not exist, skipping file save"
1499            );
1500            return;
1501        }
1502
1503        let save_dir = logdir.join(pkgname).join("wrkdir-files");
1504        if let Err(e) = fs::create_dir_all(&save_dir) {
1505            warn!(pkgname = %pkgname,
1506                error = %e,
1507                "Failed to create wrkdir-files directory"
1508            );
1509            return;
1510        }
1511
1512        // Compile glob patterns
1513        let compiled_patterns: Vec<Pattern> = patterns
1514            .iter()
1515            .filter_map(|p| {
1516                Pattern::new(p).ok().or_else(|| {
1517                    warn!(pattern = %p, "Invalid glob pattern");
1518                    None
1519                })
1520            })
1521            .collect();
1522
1523        if compiled_patterns.is_empty() {
1524            return;
1525        }
1526
1527        // Walk the wrkdir and find matching files
1528        let mut saved_count = 0;
1529        if let Err(e) = walk_and_save(
1530            &wrkdir_path,
1531            &wrkdir_path,
1532            &save_dir,
1533            &compiled_patterns,
1534            &mut saved_count,
1535        ) {
1536            warn!(pkgname = %pkgname,
1537                error = %e,
1538                "Error while saving wrkdir files"
1539            );
1540        }
1541
1542        if saved_count > 0 {
1543            info!(pkgname = %pkgname,
1544                count = saved_count,
1545                dest = %save_dir.display(),
1546                "Saved wrkdir files"
1547            );
1548        }
1549    }
1550
1551    /// Run bmake clean for a package.
1552    fn run_clean(&self, pkgpath: &PkgPath, envs: &[(String, String)]) {
1553        let pkgdir = self.config.pkgsrc().join(pkgpath.as_path());
1554
1555        let mut cmd = self.sandbox.command(self.id, self.config.make());
1556        cmd.arg("-C").arg(&pkgdir).arg("clean");
1557        for (key, value) in envs {
1558            cmd.env(key, value);
1559        }
1560        let result = cmd
1561            .stdout(std::process::Stdio::null())
1562            .stderr(std::process::Stdio::null())
1563            .status();
1564
1565        if let Err(e) = result {
1566            debug!(error = %e, "Failed to run bmake clean");
1567        }
1568    }
1569}
1570
1571/// Recursively walk a directory and save files matching patterns.
1572fn walk_and_save(
1573    base: &Path,
1574    current: &Path,
1575    save_dir: &Path,
1576    patterns: &[Pattern],
1577    saved_count: &mut usize,
1578) -> std::io::Result<()> {
1579    if !current.is_dir() {
1580        return Ok(());
1581    }
1582
1583    for entry in fs::read_dir(current)? {
1584        let entry = entry?;
1585        let path = entry.path();
1586
1587        if path.is_dir() {
1588            walk_and_save(base, &path, save_dir, patterns, saved_count)?;
1589        } else if path.is_file() {
1590            // Get relative path from base
1591            let rel_path = path.strip_prefix(base).unwrap_or(&path);
1592            let rel_str = rel_path.to_string_lossy();
1593
1594            // Check if any pattern matches
1595            for pattern in patterns {
1596                if pattern.matches(&rel_str)
1597                    || pattern.matches(
1598                        path.file_name()
1599                            .unwrap_or_default()
1600                            .to_string_lossy()
1601                            .as_ref(),
1602                    )
1603                {
1604                    // Create destination directory
1605                    let dest_path = save_dir.join(rel_path);
1606                    if let Some(parent) = dest_path.parent() {
1607                        fs::create_dir_all(parent)?;
1608                    }
1609
1610                    // Copy the file
1611                    if let Err(e) = fs::copy(&path, &dest_path) {
1612                        warn!(src = %path.display(),
1613                            dest = %dest_path.display(),
1614                            error = %e,
1615                            "Failed to copy file"
1616                        );
1617                    } else {
1618                        debug!(src = %path.display(),
1619                            dest = %dest_path.display(),
1620                            "Saved wrkdir file"
1621                        );
1622                        *saved_count += 1;
1623                    }
1624                    break; // Don't copy same file multiple times
1625                }
1626            }
1627        }
1628    }
1629
1630    Ok(())
1631}
1632
1633/**
1634 * Commands sent between the manager and clients.
1635 */
1636#[derive(Debug)]
1637enum ChannelCommand {
1638    /**
1639     * Client (with specified identifier) indicating they are ready for work.
1640     */
1641    ClientReady(usize),
1642    /**
1643     * Manager has no work available at the moment, try again later.
1644     */
1645    ComeBackLater,
1646    /**
1647     * Manager directing a client to build a specific package.
1648     */
1649    JobData(Box<PackageBuild>),
1650    /**
1651     * Client returning a successful package build with duration.
1652     */
1653    JobSuccess(PkgName, Duration),
1654    /**
1655     * Client returning a failed package build with duration.
1656     */
1657    JobFailed(PkgName, Duration),
1658    /**
1659     * Client returning a skipped package (up-to-date).
1660     */
1661    JobSkipped(PkgName),
1662    /**
1663     * Client returning an error during the package build.
1664     */
1665    JobError((PkgName, Duration, anyhow::Error)),
1666    /**
1667     * Manager directing a client to quit.
1668     */
1669    Quit,
1670    /**
1671     * Shutdown signal - workers should stop immediately.
1672     */
1673    Shutdown,
1674    /**
1675     * Client reporting a stage update for a build.
1676     */
1677    StageUpdate(usize, Option<String>),
1678    /**
1679     * Client reporting output lines from a build.
1680     */
1681    OutputLines(usize, Vec<String>),
1682}
1683
1684/**
1685 * Return the current build job status.
1686 */
1687#[derive(Debug)]
1688enum BuildStatus {
1689    /**
1690     * The next package ordered by priority is available for building.
1691     */
1692    Available(PkgName),
1693    /**
1694     * No packages are currently available for building, i.e. all remaining
1695     * packages have at least one dependency that is still unavailable.
1696     */
1697    NoneAvailable,
1698    /**
1699     * All package builds have been completed.
1700     */
1701    Done,
1702}
1703
1704#[derive(Clone, Debug)]
1705struct BuildJobs {
1706    scanpkgs: IndexMap<PkgName, ResolvedIndex>,
1707    incoming: HashMap<PkgName, HashSet<PkgName>>,
1708    /// Reverse dependency map: package -> packages that depend on it.
1709    /// Precomputed for O(1) lookup in mark_failure instead of O(n) scan.
1710    reverse_deps: HashMap<PkgName, HashSet<PkgName>>,
1711    /// Effective weight: package's PBULK_WEIGHT + sum of weights of all
1712    /// transitive dependents. Precomputed for efficient build ordering.
1713    effective_weights: HashMap<PkgName, usize>,
1714    running: HashSet<PkgName>,
1715    done: HashSet<PkgName>,
1716    failed: HashSet<PkgName>,
1717    results: Vec<BuildResult>,
1718    logdir: PathBuf,
1719    /// Number of packages loaded from cache.
1720    #[allow(dead_code)]
1721    cached_count: usize,
1722}
1723
1724impl BuildJobs {
1725    /**
1726     * Mark a package as successful and remove it from pending dependencies.
1727     */
1728    fn mark_success(&mut self, pkgname: &PkgName, duration: Duration) {
1729        self.mark_done(pkgname, BuildOutcome::Success, duration);
1730    }
1731
1732    fn mark_up_to_date(&mut self, pkgname: &PkgName) {
1733        self.mark_done(pkgname, BuildOutcome::UpToDate, Duration::ZERO);
1734    }
1735
1736    /**
1737     * Mark a package as done and remove it from pending dependencies.
1738     */
1739    fn mark_done(
1740        &mut self,
1741        pkgname: &PkgName,
1742        outcome: BuildOutcome,
1743        duration: Duration,
1744    ) {
1745        /*
1746         * Remove the package from the list of dependencies in all
1747         * packages it is listed in.  Once a package has no outstanding
1748         * dependencies remaining it is ready for building.
1749         */
1750        for dep in self.incoming.values_mut() {
1751            if dep.contains(pkgname) {
1752                dep.remove(pkgname);
1753            }
1754        }
1755        /*
1756         * The package was already removed from "incoming" when it started
1757         * building, so we only need to add it to "done".
1758         */
1759        self.done.insert(pkgname.clone());
1760
1761        // Record the result
1762        let scanpkg = self.scanpkgs.get(pkgname);
1763        let log_dir = Some(self.logdir.join(pkgname.pkgname()));
1764        self.results.push(BuildResult {
1765            pkgname: pkgname.clone(),
1766            pkgpath: scanpkg.and_then(|s| s.pkg_location.clone()),
1767            outcome,
1768            duration,
1769            log_dir,
1770        });
1771    }
1772
1773    /**
1774     * Recursively mark a package and its dependents as failed.
1775     */
1776    fn mark_failure(&mut self, pkgname: &PkgName, duration: Duration) {
1777        debug!(pkgname = %pkgname.pkgname(), "mark_failure called");
1778        let start = std::time::Instant::now();
1779        let mut broken: HashSet<PkgName> = HashSet::new();
1780        let mut to_check: Vec<PkgName> = vec![];
1781        to_check.push(pkgname.clone());
1782        /*
1783         * Starting with the original failed package, recursively loop through
1784         * adding any packages that depend on it, adding them to broken.
1785         * Uses precomputed reverse_deps for O(1) lookup instead of O(n) scan.
1786         */
1787        loop {
1788            /* No packages left to check, we're done. */
1789            let Some(badpkg) = to_check.pop() else {
1790                break;
1791            };
1792            /* Already checked this package. */
1793            if broken.contains(&badpkg) {
1794                continue;
1795            }
1796            /* Add all packages that depend on this one. */
1797            if let Some(dependents) = self.reverse_deps.get(&badpkg) {
1798                for pkg in dependents {
1799                    to_check.push(pkg.clone());
1800                }
1801            }
1802            broken.insert(badpkg);
1803        }
1804        debug!(pkgname = %pkgname.pkgname(), broken_count = broken.len(), elapsed_ms = start.elapsed().as_millis(), "mark_failure found broken packages");
1805        /*
1806         * We now have a full HashSet of affected packages.  Remove them from
1807         * incoming and move to failed.  The original failed package will
1808         * already be removed from incoming, we rely on .remove() accepting
1809         * this.
1810         */
1811        let is_original = |p: &PkgName| p == pkgname;
1812        for pkg in broken {
1813            self.incoming.remove(&pkg);
1814            self.failed.insert(pkg.clone());
1815
1816            // Record the result
1817            let scanpkg = self.scanpkgs.get(&pkg);
1818            let log_dir = Some(self.logdir.join(pkg.pkgname()));
1819            let (outcome, dur) = if is_original(&pkg) {
1820                (BuildOutcome::Failed("Build failed".to_string()), duration)
1821            } else {
1822                (
1823                    BuildOutcome::IndirectFailed(pkgname.pkgname().to_string()),
1824                    Duration::ZERO,
1825                )
1826            };
1827            self.results.push(BuildResult {
1828                pkgname: pkg,
1829                pkgpath: scanpkg.and_then(|s| s.pkg_location.clone()),
1830                outcome,
1831                duration: dur,
1832                log_dir,
1833            });
1834        }
1835        debug!(pkgname = %pkgname.pkgname(), total_results = self.results.len(), elapsed_ms = start.elapsed().as_millis(), "mark_failure completed");
1836    }
1837
1838    /**
1839     * Recursively mark a package as pre-failed and its dependents as
1840     * indirect-pre-failed.
1841     */
1842    #[allow(dead_code)]
1843    fn mark_prefailed(&mut self, pkgname: &PkgName, reason: String) {
1844        let mut broken: HashSet<PkgName> = HashSet::new();
1845        let mut to_check: Vec<PkgName> = vec![];
1846        to_check.push(pkgname.clone());
1847
1848        loop {
1849            let Some(badpkg) = to_check.pop() else {
1850                break;
1851            };
1852            if broken.contains(&badpkg) {
1853                continue;
1854            }
1855            for (pkg, deps) in &self.incoming {
1856                if deps.contains(&badpkg) {
1857                    to_check.push(pkg.clone());
1858                }
1859            }
1860            broken.insert(badpkg);
1861        }
1862
1863        let is_original = |p: &PkgName| p == pkgname;
1864        for pkg in broken {
1865            self.incoming.remove(&pkg);
1866            self.failed.insert(pkg.clone());
1867
1868            let scanpkg = self.scanpkgs.get(&pkg);
1869            let log_dir = Some(self.logdir.join(pkg.pkgname()));
1870            let outcome = if is_original(&pkg) {
1871                BuildOutcome::PreFailed(reason.clone())
1872            } else {
1873                BuildOutcome::IndirectPreFailed(pkgname.pkgname().to_string())
1874            };
1875            self.results.push(BuildResult {
1876                pkgname: pkg,
1877                pkgpath: scanpkg.and_then(|s| s.pkg_location.clone()),
1878                outcome,
1879                duration: Duration::ZERO,
1880                log_dir,
1881            });
1882        }
1883    }
1884
1885    /**
1886     * Get next package status.
1887     */
1888    fn get_next_build(&self) -> BuildStatus {
1889        /*
1890         * If incoming is empty then we're done.
1891         */
1892        if self.incoming.is_empty() {
1893            return BuildStatus::Done;
1894        }
1895
1896        /*
1897         * Get all packages in incoming that are cleared for building, ordered
1898         * by effective weight (own weight + transitive dependents' weights).
1899         */
1900        let mut pkgs: Vec<(PkgName, usize)> = self
1901            .incoming
1902            .iter()
1903            .filter(|(_, v)| v.is_empty())
1904            .map(|(k, _)| {
1905                (k.clone(), *self.effective_weights.get(k).unwrap_or(&100))
1906            })
1907            .collect();
1908
1909        /*
1910         * If no packages are returned then we're still waiting for
1911         * dependencies to finish.  Clients should keep retrying until this
1912         * changes.
1913         */
1914        if pkgs.is_empty() {
1915            return BuildStatus::NoneAvailable;
1916        }
1917
1918        /*
1919         * Order packages by build weight and return the highest.
1920         */
1921        pkgs.sort_by_key(|&(_, weight)| std::cmp::Reverse(weight));
1922        BuildStatus::Available(pkgs[0].0.clone())
1923    }
1924}
1925
1926impl Build {
1927    pub fn new(
1928        config: &Config,
1929        scanpkgs: IndexMap<PkgName, ResolvedIndex>,
1930        options: BuildOptions,
1931    ) -> Build {
1932        let sandbox = Sandbox::new(config);
1933        info!(
1934            package_count = scanpkgs.len(),
1935            sandbox_enabled = sandbox.enabled(),
1936            build_threads = config.build_threads(),
1937            ?options,
1938            "Creating new Build instance"
1939        );
1940        for (pkgname, index) in &scanpkgs {
1941            debug!(pkgname = %pkgname.pkgname(),
1942                pkgpath = ?index.pkg_location,
1943                depends_count = index.depends.len(),
1944                depends = ?index.depends.iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
1945                "Package in build queue"
1946            );
1947        }
1948        Build {
1949            config: config.clone(),
1950            sandbox,
1951            scanpkgs,
1952            cached: IndexMap::new(),
1953            options,
1954        }
1955    }
1956
1957    /// Load cached build results from database.
1958    ///
1959    /// Returns the number of packages loaded from cache. Only loads results
1960    /// for packages that are in our build queue.
1961    pub fn load_cached_from_db(
1962        &mut self,
1963        db: &crate::db::Database,
1964    ) -> anyhow::Result<usize> {
1965        let mut count = 0;
1966        for pkgname in self.scanpkgs.keys() {
1967            if let Some(pkg) = db.get_package_by_name(pkgname.pkgname())? {
1968                if let Some(result) = db.get_build_result(pkg.id)? {
1969                    self.cached.insert(pkgname.clone(), result);
1970                    count += 1;
1971                }
1972            }
1973        }
1974        if count > 0 {
1975            info!(
1976                cached_count = count,
1977                "Loaded cached build results from database"
1978            );
1979        }
1980        Ok(count)
1981    }
1982
1983    /// Access completed build results.
1984    pub fn cached(&self) -> &IndexMap<PkgName, BuildResult> {
1985        &self.cached
1986    }
1987
1988    pub fn start(
1989        &mut self,
1990        ctx: &RunContext,
1991        db: &crate::db::Database,
1992    ) -> anyhow::Result<BuildSummary> {
1993        let started = Instant::now();
1994
1995        info!(package_count = self.scanpkgs.len(), "Build::start() called");
1996
1997        let shutdown_flag = Arc::clone(&ctx.shutdown);
1998        let stats = ctx.stats.clone();
1999
2000        /*
2001         * Populate BuildJobs.
2002         */
2003        debug!("Populating BuildJobs from scanpkgs");
2004        let mut incoming: HashMap<PkgName, HashSet<PkgName>> = HashMap::new();
2005        let mut reverse_deps: HashMap<PkgName, HashSet<PkgName>> =
2006            HashMap::new();
2007        for (pkgname, index) in &self.scanpkgs {
2008            let mut deps: HashSet<PkgName> = HashSet::new();
2009            for dep in &index.depends {
2010                // Only track dependencies that are in our build queue.
2011                // Dependencies outside scanpkgs are assumed to already be
2012                // installed (from a previous build) or will cause the build
2013                // to fail at runtime.
2014                if !self.scanpkgs.contains_key(dep) {
2015                    continue;
2016                }
2017                deps.insert(dep.clone());
2018                // Build reverse dependency map: dep -> packages that depend on it
2019                reverse_deps
2020                    .entry(dep.clone())
2021                    .or_default()
2022                    .insert(pkgname.clone());
2023            }
2024            trace!(pkgname = %pkgname.pkgname(),
2025                deps_count = deps.len(),
2026                deps = ?deps.iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
2027                "Adding package to incoming build queue"
2028            );
2029            incoming.insert(pkgname.clone(), deps);
2030        }
2031
2032        /*
2033         * Process cached build results.
2034         */
2035        let mut done: HashSet<PkgName> = HashSet::new();
2036        let mut failed: HashSet<PkgName> = HashSet::new();
2037        let results: Vec<BuildResult> = Vec::new();
2038        let mut cached_count = 0usize;
2039
2040        for (pkgname, result) in &self.cached {
2041            match result.outcome {
2042                BuildOutcome::Success | BuildOutcome::UpToDate => {
2043                    // Completed package - remove from incoming, add to done
2044                    incoming.remove(pkgname);
2045                    done.insert(pkgname.clone());
2046                    // Remove from deps of other packages
2047                    for deps in incoming.values_mut() {
2048                        deps.remove(pkgname);
2049                    }
2050                    // Don't add to results - already in database
2051                    cached_count += 1;
2052                }
2053                BuildOutcome::Failed(_)
2054                | BuildOutcome::PreFailed(_)
2055                | BuildOutcome::IndirectFailed(_)
2056                | BuildOutcome::IndirectPreFailed(_) => {
2057                    // Failed package - remove from incoming, add to failed
2058                    incoming.remove(pkgname);
2059                    failed.insert(pkgname.clone());
2060                    // Don't add to results - already in database
2061                    cached_count += 1;
2062                }
2063            }
2064        }
2065
2066        /*
2067         * Propagate cached failures: any package in incoming that depends on
2068         * a failed package must also be marked as failed.
2069         */
2070        loop {
2071            let mut newly_failed: Vec<PkgName> = Vec::new();
2072            for (pkgname, deps) in &incoming {
2073                for dep in deps {
2074                    if failed.contains(dep) {
2075                        newly_failed.push(pkgname.clone());
2076                        break;
2077                    }
2078                }
2079            }
2080            if newly_failed.is_empty() {
2081                break;
2082            }
2083            for pkgname in newly_failed {
2084                incoming.remove(&pkgname);
2085                failed.insert(pkgname);
2086            }
2087        }
2088
2089        if cached_count > 0 {
2090            println!("Loaded {} cached build results", cached_count);
2091        }
2092
2093        info!(
2094            incoming_count = incoming.len(),
2095            scanpkgs_count = self.scanpkgs.len(),
2096            cached_count = cached_count,
2097            "BuildJobs populated"
2098        );
2099
2100        if incoming.is_empty() {
2101            return Ok(BuildSummary {
2102                duration: started.elapsed(),
2103                results,
2104                scan_failed: Vec::new(),
2105            });
2106        }
2107
2108        /*
2109         * Compute effective weights for build ordering.  The effective weight
2110         * is the package's own PBULK_WEIGHT plus the sum of weights of all
2111         * packages that transitively depend on it.  This prioritises building
2112         * packages that unblock the most downstream work.
2113         */
2114        let get_weight = |pkg: &PkgName| -> usize {
2115            self.scanpkgs
2116                .get(pkg)
2117                .and_then(|idx| idx.pbulk_weight.as_ref())
2118                .and_then(|w| w.parse().ok())
2119                .unwrap_or(100)
2120        };
2121
2122        let mut effective_weights: HashMap<PkgName, usize> = HashMap::new();
2123        let mut pending: HashMap<&PkgName, usize> = incoming
2124            .keys()
2125            .map(|p| (p, reverse_deps.get(p).map_or(0, |s| s.len())))
2126            .collect();
2127        let mut queue: VecDeque<&PkgName> =
2128            pending.iter().filter(|(_, c)| **c == 0).map(|(&p, _)| p).collect();
2129        while let Some(pkg) = queue.pop_front() {
2130            let mut total = get_weight(pkg);
2131            if let Some(dependents) = reverse_deps.get(pkg) {
2132                for dep in dependents {
2133                    total += effective_weights.get(dep).unwrap_or(&0);
2134                }
2135            }
2136            effective_weights.insert(pkg.clone(), total);
2137            for dep in incoming.get(pkg).iter().flat_map(|s| s.iter()) {
2138                if let Some(c) = pending.get_mut(dep) {
2139                    *c -= 1;
2140                    if *c == 0 {
2141                        queue.push_back(dep);
2142                    }
2143                }
2144            }
2145        }
2146
2147        let running: HashSet<PkgName> = HashSet::new();
2148        let logdir = self.config.logdir().clone();
2149        let jobs = BuildJobs {
2150            scanpkgs: self.scanpkgs.clone(),
2151            incoming,
2152            reverse_deps,
2153            effective_weights,
2154            running,
2155            done,
2156            failed,
2157            results,
2158            logdir,
2159            cached_count,
2160        };
2161
2162        // Create sandboxes before starting progress display
2163        if self.sandbox.enabled() {
2164            println!("Creating sandboxes...");
2165            for i in 0..self.config.build_threads() {
2166                if let Err(e) = self.sandbox.create(i) {
2167                    // Rollback: destroy sandboxes including the failed one (may be partial)
2168                    for j in (0..=i).rev() {
2169                        if let Err(destroy_err) = self.sandbox.destroy(j) {
2170                            eprintln!(
2171                                "Warning: failed to destroy sandbox {}: {}",
2172                                j, destroy_err
2173                            );
2174                        }
2175                    }
2176                    return Err(e);
2177                }
2178            }
2179        }
2180
2181        println!("Building packages...");
2182
2183        // Set up multi-line progress display using ratatui inline viewport
2184        let progress = Arc::new(Mutex::new(
2185            MultiProgress::new(
2186                "Building",
2187                "Built",
2188                self.scanpkgs.len(),
2189                self.config.build_threads(),
2190            )
2191            .expect("Failed to initialize progress display"),
2192        ));
2193
2194        // Mark cached packages in progress display
2195        if cached_count > 0 {
2196            if let Ok(mut p) = progress.lock() {
2197                p.state_mut().cached = cached_count;
2198            }
2199        }
2200
2201        // Flag to stop the refresh thread
2202        let stop_refresh = Arc::new(AtomicBool::new(false));
2203
2204        // Spawn a thread to periodically refresh the display (for timer updates)
2205        let progress_refresh = Arc::clone(&progress);
2206        let stop_flag = Arc::clone(&stop_refresh);
2207        let shutdown_for_refresh = Arc::clone(&shutdown_flag);
2208        let refresh_thread = std::thread::spawn(move || {
2209            while !stop_flag.load(Ordering::Relaxed)
2210                && !shutdown_for_refresh.load(Ordering::SeqCst)
2211            {
2212                if let Ok(mut p) = progress_refresh.lock() {
2213                    // Check for keyboard events (like 'v' for view toggle)
2214                    let _ = p.poll_events();
2215                    let _ = p.render_throttled();
2216                }
2217                std::thread::sleep(Duration::from_millis(50));
2218            }
2219        });
2220
2221        /*
2222         * Configure a mananger channel.  This is used for clients to indicate
2223         * to the manager that they are ready for work.
2224         */
2225        let (manager_tx, manager_rx) = mpsc::channel::<ChannelCommand>();
2226
2227        /*
2228         * Client threads.  Each client has its own channel to the manager,
2229         * with the client sending ready status on the manager channel, and
2230         * receiving instructions on its private channel.
2231         */
2232        let mut threads = vec![];
2233        let mut clients: HashMap<usize, Sender<ChannelCommand>> =
2234            HashMap::new();
2235        for i in 0..self.config.build_threads() {
2236            let (client_tx, client_rx) = mpsc::channel::<ChannelCommand>();
2237            clients.insert(i, client_tx);
2238            let manager_tx = manager_tx.clone();
2239            let thread = std::thread::spawn(move || {
2240                loop {
2241                    // Use send() which can fail if receiver is dropped (manager shutdown)
2242                    if manager_tx.send(ChannelCommand::ClientReady(i)).is_err()
2243                    {
2244                        break;
2245                    }
2246
2247                    let Ok(msg) = client_rx.recv() else {
2248                        break;
2249                    };
2250
2251                    match msg {
2252                        ChannelCommand::ComeBackLater => {
2253                            std::thread::sleep(Duration::from_millis(100));
2254                            continue;
2255                        }
2256                        ChannelCommand::JobData(pkg) => {
2257                            let pkgname = pkg.pkginfo.pkgname.clone();
2258                            trace!(pkgname = %pkgname.pkgname(), worker = i, "Worker starting build");
2259                            let build_start = Instant::now();
2260                            let result = pkg.build(&manager_tx);
2261                            let duration = build_start.elapsed();
2262                            trace!(pkgname = %pkgname.pkgname(), worker = i, elapsed_ms = duration.as_millis(), "Worker build() returned");
2263                            match result {
2264                                Ok(PackageBuildResult::Success) => {
2265                                    trace!(pkgname = %pkgname.pkgname(), "Worker sending JobSuccess");
2266                                    let _ = manager_tx.send(
2267                                        ChannelCommand::JobSuccess(
2268                                            pkgname, duration,
2269                                        ),
2270                                    );
2271                                }
2272                                Ok(PackageBuildResult::Skipped) => {
2273                                    trace!(pkgname = %pkgname.pkgname(), "Worker sending JobSkipped");
2274                                    let _ = manager_tx.send(
2275                                        ChannelCommand::JobSkipped(pkgname),
2276                                    );
2277                                }
2278                                Ok(PackageBuildResult::Failed) => {
2279                                    trace!(pkgname = %pkgname.pkgname(), "Worker sending JobFailed");
2280                                    let _ = manager_tx.send(
2281                                        ChannelCommand::JobFailed(
2282                                            pkgname, duration,
2283                                        ),
2284                                    );
2285                                }
2286                                Err(e) => {
2287                                    trace!(pkgname = %pkgname.pkgname(), "Worker sending JobError");
2288                                    let _ = manager_tx.send(
2289                                        ChannelCommand::JobError((
2290                                            pkgname, duration, e,
2291                                        )),
2292                                    );
2293                                }
2294                            }
2295                            continue;
2296                        }
2297                        ChannelCommand::Quit | ChannelCommand::Shutdown => {
2298                            break;
2299                        }
2300                        _ => todo!(),
2301                    }
2302                }
2303            });
2304            threads.push(thread);
2305        }
2306
2307        /*
2308         * Manager thread.  Read incoming commands from clients and reply
2309         * accordingly.  Returns the build results via a channel.
2310         */
2311        let config = self.config.clone();
2312        let sandbox = self.sandbox.clone();
2313        let options = self.options.clone();
2314        let progress_clone = Arc::clone(&progress);
2315        let shutdown_for_manager = Arc::clone(&shutdown_flag);
2316        let stats_for_manager = stats.clone();
2317        let (results_tx, results_rx) = mpsc::channel::<Vec<BuildResult>>();
2318        let (interrupted_tx, interrupted_rx) = mpsc::channel::<bool>();
2319        // Channel for completed results to save immediately
2320        let (completed_tx, completed_rx) = mpsc::channel::<BuildResult>();
2321        let manager = std::thread::spawn(move || {
2322            let mut clients = clients.clone();
2323            let config = config.clone();
2324            let sandbox = sandbox.clone();
2325            let mut jobs = jobs.clone();
2326            let mut was_interrupted = false;
2327            let stats = stats_for_manager;
2328
2329            // Track which thread is building which package
2330            let mut thread_packages: HashMap<usize, PkgName> = HashMap::new();
2331
2332            loop {
2333                // Check shutdown flag periodically
2334                if shutdown_for_manager.load(Ordering::SeqCst) {
2335                    // Suppress all further output
2336                    if let Ok(mut p) = progress_clone.lock() {
2337                        p.state_mut().suppress();
2338                    }
2339                    // Send shutdown to all remaining clients
2340                    for (_, client) in clients.drain() {
2341                        let _ = client.send(ChannelCommand::Shutdown);
2342                    }
2343                    was_interrupted = true;
2344                    break;
2345                }
2346
2347                // Use recv_timeout to check shutdown flag periodically
2348                let command =
2349                    match manager_rx.recv_timeout(Duration::from_millis(50)) {
2350                        Ok(cmd) => cmd,
2351                        Err(mpsc::RecvTimeoutError::Timeout) => continue,
2352                        Err(mpsc::RecvTimeoutError::Disconnected) => break,
2353                    };
2354
2355                match command {
2356                    ChannelCommand::ClientReady(c) => {
2357                        let client = clients.get(&c).unwrap();
2358                        match jobs.get_next_build() {
2359                            BuildStatus::Available(pkg) => {
2360                                let pkginfo = jobs.scanpkgs.get(&pkg).unwrap();
2361                                jobs.incoming.remove(&pkg);
2362                                jobs.running.insert(pkg.clone());
2363
2364                                // Update thread progress
2365                                thread_packages.insert(c, pkg.clone());
2366                                if let Ok(mut p) = progress_clone.lock() {
2367                                    p.clear_output_buffer(c);
2368                                    p.state_mut()
2369                                        .set_worker_active(c, pkg.pkgname());
2370                                    let _ = p.render_throttled();
2371                                }
2372
2373                                let _ = client.send(ChannelCommand::JobData(
2374                                    Box::new(PackageBuild {
2375                                        id: c,
2376                                        config: config.clone(),
2377                                        pkginfo: pkginfo.clone(),
2378                                        sandbox: sandbox.clone(),
2379                                        options: options.clone(),
2380                                    }),
2381                                ));
2382                            }
2383                            BuildStatus::NoneAvailable => {
2384                                if let Ok(mut p) = progress_clone.lock() {
2385                                    p.clear_output_buffer(c);
2386                                    p.state_mut().set_worker_idle(c);
2387                                    let _ = p.render_throttled();
2388                                }
2389                                let _ =
2390                                    client.send(ChannelCommand::ComeBackLater);
2391                            }
2392                            BuildStatus::Done => {
2393                                if let Ok(mut p) = progress_clone.lock() {
2394                                    p.clear_output_buffer(c);
2395                                    p.state_mut().set_worker_idle(c);
2396                                    let _ = p.render_throttled();
2397                                }
2398                                let _ = client.send(ChannelCommand::Quit);
2399                                clients.remove(&c);
2400                                if clients.is_empty() {
2401                                    break;
2402                                }
2403                            }
2404                        };
2405                    }
2406                    ChannelCommand::JobSuccess(pkgname, duration) => {
2407                        // Record stats even if shutting down
2408                        if let Some(ref s) = stats {
2409                            let pkgpath = jobs
2410                                .scanpkgs
2411                                .get(&pkgname)
2412                                .and_then(|idx| idx.pkg_location.as_ref())
2413                                .map(|p| {
2414                                    p.as_path().to_string_lossy().to_string()
2415                                });
2416                            s.build(
2417                                pkgname.pkgname(),
2418                                pkgpath.as_deref(),
2419                                duration,
2420                                "success",
2421                            );
2422                        }
2423
2424                        jobs.mark_success(&pkgname, duration);
2425                        jobs.running.remove(&pkgname);
2426
2427                        // Send result for immediate saving
2428                        if let Some(result) = jobs.results.last() {
2429                            let _ = completed_tx.send(result.clone());
2430                        }
2431
2432                        // Don't update UI if we're shutting down
2433                        if shutdown_for_manager.load(Ordering::SeqCst) {
2434                            continue;
2435                        }
2436
2437                        // Find which thread completed and mark idle
2438                        if let Ok(mut p) = progress_clone.lock() {
2439                            let _ = p.print_status(&format!(
2440                                "       Built {} ({})",
2441                                pkgname.pkgname(),
2442                                format_duration(duration)
2443                            ));
2444                            p.state_mut().increment_completed();
2445                            for (tid, pkg) in &thread_packages {
2446                                if pkg == &pkgname {
2447                                    p.clear_output_buffer(*tid);
2448                                    p.state_mut().set_worker_idle(*tid);
2449                                    break;
2450                                }
2451                            }
2452                            let _ = p.render_throttled();
2453                        }
2454                    }
2455                    ChannelCommand::JobSkipped(pkgname) => {
2456                        // Record stats even if shutting down
2457                        if let Some(ref s) = stats {
2458                            let pkgpath = jobs
2459                                .scanpkgs
2460                                .get(&pkgname)
2461                                .and_then(|idx| idx.pkg_location.as_ref())
2462                                .map(|p| {
2463                                    p.as_path().to_string_lossy().to_string()
2464                                });
2465                            s.build(
2466                                pkgname.pkgname(),
2467                                pkgpath.as_deref(),
2468                                Duration::ZERO,
2469                                "skipped",
2470                            );
2471                        }
2472
2473                        jobs.mark_up_to_date(&pkgname);
2474                        jobs.running.remove(&pkgname);
2475
2476                        // Send result for immediate saving
2477                        if let Some(result) = jobs.results.last() {
2478                            let _ = completed_tx.send(result.clone());
2479                        }
2480
2481                        // Don't update UI if we're shutting down
2482                        if shutdown_for_manager.load(Ordering::SeqCst) {
2483                            continue;
2484                        }
2485
2486                        // Find which thread completed and mark idle
2487                        if let Ok(mut p) = progress_clone.lock() {
2488                            let _ = p.print_status(&format!(
2489                                "     Skipped {} (up-to-date)",
2490                                pkgname.pkgname()
2491                            ));
2492                            p.state_mut().increment_skipped();
2493                            for (tid, pkg) in &thread_packages {
2494                                if pkg == &pkgname {
2495                                    p.clear_output_buffer(*tid);
2496                                    p.state_mut().set_worker_idle(*tid);
2497                                    break;
2498                                }
2499                            }
2500                            let _ = p.render_throttled();
2501                        }
2502                    }
2503                    ChannelCommand::JobFailed(pkgname, duration) => {
2504                        // Record stats even if shutting down
2505                        if let Some(ref s) = stats {
2506                            let pkgpath = jobs
2507                                .scanpkgs
2508                                .get(&pkgname)
2509                                .and_then(|idx| idx.pkg_location.as_ref())
2510                                .map(|p| {
2511                                    p.as_path().to_string_lossy().to_string()
2512                                });
2513                            s.build(
2514                                pkgname.pkgname(),
2515                                pkgpath.as_deref(),
2516                                duration,
2517                                "failed",
2518                            );
2519                        }
2520
2521                        let results_before = jobs.results.len();
2522                        jobs.mark_failure(&pkgname, duration);
2523                        jobs.running.remove(&pkgname);
2524
2525                        // Send all new results for immediate saving
2526                        for result in jobs.results.iter().skip(results_before) {
2527                            let _ = completed_tx.send(result.clone());
2528                        }
2529
2530                        // Don't update UI if we're shutting down
2531                        if shutdown_for_manager.load(Ordering::SeqCst) {
2532                            continue;
2533                        }
2534
2535                        // Find which thread failed and mark idle
2536                        if let Ok(mut p) = progress_clone.lock() {
2537                            let _ = p.print_status(&format!(
2538                                "      Failed {} ({})",
2539                                pkgname.pkgname(),
2540                                format_duration(duration)
2541                            ));
2542                            p.state_mut().increment_failed();
2543                            for (tid, pkg) in &thread_packages {
2544                                if pkg == &pkgname {
2545                                    p.clear_output_buffer(*tid);
2546                                    p.state_mut().set_worker_idle(*tid);
2547                                    break;
2548                                }
2549                            }
2550                            let _ = p.render_throttled();
2551                        }
2552                    }
2553                    ChannelCommand::JobError((pkgname, duration, e)) => {
2554                        // Record stats even if shutting down
2555                        if let Some(ref s) = stats {
2556                            let pkgpath = jobs
2557                                .scanpkgs
2558                                .get(&pkgname)
2559                                .and_then(|idx| idx.pkg_location.as_ref())
2560                                .map(|p| {
2561                                    p.as_path().to_string_lossy().to_string()
2562                                });
2563                            s.build(
2564                                pkgname.pkgname(),
2565                                pkgpath.as_deref(),
2566                                duration,
2567                                "error",
2568                            );
2569                        }
2570
2571                        let results_before = jobs.results.len();
2572                        jobs.mark_failure(&pkgname, duration);
2573                        jobs.running.remove(&pkgname);
2574
2575                        // Send all new results for immediate saving
2576                        for result in jobs.results.iter().skip(results_before) {
2577                            let _ = completed_tx.send(result.clone());
2578                        }
2579
2580                        // Don't update UI if we're shutting down
2581                        if shutdown_for_manager.load(Ordering::SeqCst) {
2582                            tracing::error!(error = %e, pkgname = %pkgname.pkgname(), "Build error");
2583                            continue;
2584                        }
2585
2586                        // Find which thread errored and mark idle
2587                        if let Ok(mut p) = progress_clone.lock() {
2588                            let _ = p.print_status(&format!(
2589                                "      Failed {} ({})",
2590                                pkgname.pkgname(),
2591                                format_duration(duration)
2592                            ));
2593                            p.state_mut().increment_failed();
2594                            for (tid, pkg) in &thread_packages {
2595                                if pkg == &pkgname {
2596                                    p.clear_output_buffer(*tid);
2597                                    p.state_mut().set_worker_idle(*tid);
2598                                    break;
2599                                }
2600                            }
2601                            let _ = p.render_throttled();
2602                        }
2603                        tracing::error!(error = %e, pkgname = %pkgname.pkgname(), "Build error");
2604                    }
2605                    ChannelCommand::StageUpdate(tid, stage) => {
2606                        if let Ok(mut p) = progress_clone.lock() {
2607                            p.state_mut()
2608                                .set_worker_stage(tid, stage.as_deref());
2609                            let _ = p.render_throttled();
2610                        }
2611                    }
2612                    ChannelCommand::OutputLines(tid, lines) => {
2613                        if let Ok(mut p) = progress_clone.lock() {
2614                            if let Some(buf) = p.output_buffer_mut(tid) {
2615                                for line in lines {
2616                                    buf.push(line);
2617                                }
2618                            }
2619                        }
2620                    }
2621                    _ => {}
2622                }
2623            }
2624
2625            // Send results and interrupted status back
2626            debug!(
2627                result_count = jobs.results.len(),
2628                "Manager sending results back"
2629            );
2630            let _ = results_tx.send(jobs.results);
2631            let _ = interrupted_tx.send(was_interrupted);
2632        });
2633
2634        threads.push(manager);
2635        debug!("Waiting for worker threads to complete");
2636        let join_start = Instant::now();
2637        for thread in threads {
2638            thread.join().expect("thread panicked");
2639        }
2640        debug!(
2641            elapsed_ms = join_start.elapsed().as_millis(),
2642            "Worker threads completed"
2643        );
2644
2645        // Save all completed results to database immediately
2646        let mut saved_count = 0;
2647        while let Ok(result) = completed_rx.try_recv() {
2648            if let Err(e) = db.store_build_by_name(&result) {
2649                warn!(
2650                    pkgname = %result.pkgname.pkgname(),
2651                    error = %e,
2652                    "Failed to save build result"
2653                );
2654            } else {
2655                saved_count += 1;
2656            }
2657        }
2658        if saved_count > 0 {
2659            debug!(saved_count, "Saved build results to database");
2660        }
2661
2662        // Stop the refresh thread
2663        stop_refresh.store(true, Ordering::Relaxed);
2664        let _ = refresh_thread.join();
2665
2666        // Check if we were interrupted
2667        let was_interrupted = interrupted_rx.recv().unwrap_or(false);
2668
2669        // Print appropriate summary
2670        if let Ok(mut p) = progress.lock() {
2671            if was_interrupted {
2672                let _ = p.finish_interrupted();
2673            } else {
2674                let _ = p.finish();
2675            }
2676        }
2677
2678        // Collect results from manager
2679        debug!("Collecting results from manager");
2680        let results = results_rx.recv().unwrap_or_default();
2681        debug!(result_count = results.len(), "Collected results from manager");
2682        let summary = BuildSummary {
2683            duration: started.elapsed(),
2684            results,
2685            scan_failed: Vec::new(),
2686        };
2687
2688        if self.sandbox.enabled() {
2689            debug!("Destroying sandboxes");
2690            let destroy_start = Instant::now();
2691            self.sandbox.destroy_all(self.config.build_threads())?;
2692            debug!(
2693                elapsed_ms = destroy_start.elapsed().as_millis(),
2694                "Sandboxes destroyed"
2695            );
2696        }
2697
2698        Ok(summary)
2699    }
2700}