bob/
build.rs

1/*
2 * Copyright (c) 2026 Jonathan Perkin <jonathan@perkin.org.uk>
3 *
4 * Permission to use, copy, modify, and distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17//! Parallel package builds.
18//!
19//! This module provides the [`Build`] struct for building packages in parallel
20//! across multiple sandboxes. Packages are scheduled using a dependency graph
21//! to ensure correct build order.
22//!
23//! # Build Process
24//!
25//! 1. Create build sandboxes (one per `build_threads`)
26//! 2. Execute pre-build script in each sandbox
27//! 3. Build packages in parallel, respecting dependencies
28//! 4. Execute post-build script after each package
29//! 5. Destroy sandboxes and generate report
30//!
31//! # Build Phases
32//!
33//! Each package goes through these phases in turn:
34//!
35//! - `pre-clean` - Clean any previous build artifacts
36//! - `depends` - Install required dependencies
37//! - `checksum` - Verify distfile checksums
38//! - `configure` - Configure the build
39//! - `build` - Compile the package
40//! - `install` - Install to staging area
41//! - `package` - Create binary package
42//! - `deinstall` - Test package removal (non-bootstrap only)
43//! - `clean` - Clean up build artifacts
44
45use crate::config::PkgsrcEnv;
46use crate::sandbox::{
47    SHUTDOWN_POLL_INTERVAL, SandboxScope, wait_with_shutdown,
48};
49use crate::scan::{ResolvedPackage, SkipReason, SkippedCounts};
50use crate::tui::{MultiProgress, REFRESH_INTERVAL, format_duration};
51use crate::{Config, RunContext, Sandbox};
52use anyhow::{Context, bail};
53use crossterm::event;
54use glob::Pattern;
55use indexmap::IndexMap;
56use pkgsrc::{PkgName, PkgPath};
57use std::collections::{HashMap, HashSet, VecDeque};
58use std::fs::{self, File, OpenOptions};
59use std::path::{Path, PathBuf};
60use std::process::{Command, ExitStatus, Stdio};
61use std::sync::atomic::{AtomicBool, Ordering};
62use std::sync::{Arc, Mutex, mpsc, mpsc::Sender};
63use std::time::{Duration, Instant};
64use tracing::{debug, error, info, info_span, trace, warn};
65
66/// How often to batch and send build output lines to the UI channel.
67/// This is the floor on log display responsiveness — output cannot appear
68/// faster than this regardless of UI refresh rate. 100ms (10fps) is
69/// imperceptible for build logs while reducing channel overhead.
70const OUTPUT_BATCH_INTERVAL: Duration = Duration::from_millis(100);
71
72/// How long a worker thread sleeps when told no work is available.
73/// This prevents busy-spinning when all pending builds are blocked on
74/// dependencies. 100ms balances responsiveness with CPU efficiency.
75const WORKER_BACKOFF_INTERVAL: Duration = Duration::from_millis(100);
76
77/// Build stages in order of execution.
78#[derive(Debug, Clone, Copy, PartialEq, Eq)]
79enum Stage {
80    PreClean,
81    Depends,
82    Checksum,
83    Configure,
84    Build,
85    Install,
86    Package,
87    Deinstall,
88    Clean,
89}
90
91impl Stage {
92    fn as_str(&self) -> &'static str {
93        match self {
94            Stage::PreClean => "pre-clean",
95            Stage::Depends => "depends",
96            Stage::Checksum => "checksum",
97            Stage::Configure => "configure",
98            Stage::Build => "build",
99            Stage::Install => "install",
100            Stage::Package => "package",
101            Stage::Deinstall => "deinstall",
102            Stage::Clean => "clean",
103        }
104    }
105}
106
107/// Result of a package build.
108#[derive(Debug)]
109enum PkgBuildResult {
110    Success,
111    Failed,
112    Skipped,
113}
114
115/// How to run a command.
116#[derive(Debug, Clone, Copy)]
117enum RunAs {
118    Root,
119    User,
120}
121
122/// Callback for status updates during build.
123trait BuildCallback: Send {
124    fn stage(&mut self, stage: &str);
125}
126
127/// Session-level build data shared across all package builds.
128#[derive(Debug)]
129struct BuildSession {
130    config: Config,
131    pkgsrc_env: PkgsrcEnv,
132    sandbox: Sandbox,
133    options: BuildOptions,
134    shutdown: Arc<AtomicBool>,
135}
136
137/// Package builder that executes build stages.
138struct PkgBuilder<'a> {
139    session: &'a BuildSession,
140    sandbox_id: usize,
141    pkginfo: &'a ResolvedPackage,
142    logdir: PathBuf,
143    build_user: Option<String>,
144    envs: Vec<(String, String)>,
145    output_tx: Option<Sender<ChannelCommand>>,
146}
147
148impl<'a> PkgBuilder<'a> {
149    fn new(
150        session: &'a BuildSession,
151        sandbox_id: usize,
152        pkginfo: &'a ResolvedPackage,
153        envs: Vec<(String, String)>,
154        output_tx: Option<Sender<ChannelCommand>>,
155    ) -> Self {
156        let logdir =
157            session.config.logdir().join(pkginfo.index.pkgname.pkgname());
158        let build_user = session.config.build_user().map(|s| s.to_string());
159        Self {
160            session,
161            sandbox_id,
162            pkginfo,
163            logdir,
164            build_user,
165            envs,
166            output_tx,
167        }
168    }
169
170    /// Run a command in the sandbox and capture its stdout.
171    fn run_cmd(&self, cmd: &Path, args: &[&str]) -> Option<String> {
172        let mut command = self.session.sandbox.command(self.sandbox_id, cmd);
173        command.args(args);
174        self.apply_envs(&mut command, &[]);
175        match command.output() {
176            Ok(output) if output.status.success() => {
177                Some(String::from_utf8_lossy(&output.stdout).into_owned())
178            }
179            Ok(output) => {
180                let stderr = String::from_utf8_lossy(&output.stderr);
181                debug!(
182                    cmd = %cmd.display(),
183                    exit_code = ?output.status.code(),
184                    stderr = %stderr.trim(),
185                    "command failed"
186                );
187                None
188            }
189            Err(e) => {
190                debug!(cmd = %cmd.display(), error = %e, "command execution error");
191                None
192            }
193        }
194    }
195
196    /// Check if the package is already up-to-date.
197    fn check_up_to_date(&self) -> anyhow::Result<bool> {
198        let pkgname = self.pkginfo.index.pkgname.pkgname();
199        let pkgfile = self
200            .session
201            .pkgsrc_env
202            .packages
203            .join("All")
204            .join(format!("{}.tgz", pkgname));
205
206        // Check if package file exists
207        if !pkgfile.exists() {
208            debug!(path = %pkgfile.display(), "Package file not found");
209            return Ok(false);
210        }
211
212        let pkgfile_str = pkgfile.to_string_lossy();
213        let pkg_info = self.session.pkgsrc_env.pkgtools.join("pkg_info");
214        let pkg_admin = self.session.pkgsrc_env.pkgtools.join("pkg_admin");
215
216        // Get BUILD_INFO and verify source files
217        let Some(build_info) = self.run_cmd(&pkg_info, &["-qb", &pkgfile_str])
218        else {
219            debug!("pkg_info -qb failed or returned empty");
220            return Ok(false);
221        };
222        debug!(lines = build_info.lines().count(), "Checking BUILD_INFO");
223
224        for line in build_info.lines() {
225            let Some((file, file_id)) = line.split_once(':') else {
226                continue;
227            };
228            let file_id = file_id.trim();
229            if file.is_empty() || file_id.is_empty() {
230                continue;
231            }
232
233            let src_file = self.session.config.pkgsrc().join(file);
234            if !src_file.exists() {
235                debug!(file, "Source file missing");
236                return Ok(false);
237            }
238
239            if file_id.starts_with("$NetBSD") {
240                // CVS ID comparison - extract $NetBSD...$ from actual file
241                let Ok(content) = std::fs::read_to_string(&src_file) else {
242                    return Ok(false);
243                };
244                let id = content.lines().find_map(|line| {
245                    if let Some(start) = line.find("$NetBSD") {
246                        if let Some(end) = line[start + 1..].find('$') {
247                            return Some(&line[start..start + 1 + end + 1]);
248                        }
249                    }
250                    None
251                });
252                if id != Some(file_id) {
253                    debug!(file, "CVS ID mismatch");
254                    return Ok(false);
255                }
256            } else {
257                // Hash comparison
258                let src_file_str = src_file.to_string_lossy();
259                let Some(hash) =
260                    self.run_cmd(&pkg_admin, &["digest", &src_file_str])
261                else {
262                    debug!(file, "pkg_admin digest failed");
263                    return Ok(false);
264                };
265                let hash = hash.trim();
266                if hash != file_id {
267                    debug!(
268                        file,
269                        path = %src_file.display(),
270                        expected = file_id,
271                        actual = hash,
272                        "Hash mismatch"
273                    );
274                    return Ok(false);
275                }
276            }
277        }
278
279        // Get package dependencies and verify
280        let Some(pkg_deps) = self.run_cmd(&pkg_info, &["-qN", &pkgfile_str])
281        else {
282            return Ok(false);
283        };
284
285        // Build sets of recorded vs expected dependencies
286        let recorded_deps: HashSet<&str> = pkg_deps
287            .lines()
288            .map(|l| l.trim())
289            .filter(|l| !l.is_empty())
290            .collect();
291        let expected_deps: HashSet<&str> =
292            self.pkginfo.depends().iter().map(|d| d.pkgname()).collect();
293
294        // If dependency list has changed in any way, rebuild
295        if recorded_deps != expected_deps {
296            debug!(
297                recorded = recorded_deps.len(),
298                expected = expected_deps.len(),
299                "Dependency list changed"
300            );
301            return Ok(false);
302        }
303
304        let pkgfile_mtime = match pkgfile.metadata().and_then(|m| m.modified())
305        {
306            Ok(t) => t,
307            Err(_) => return Ok(false),
308        };
309
310        // Check each dependency package exists and is not newer
311        for dep in &recorded_deps {
312            let dep_pkg = self
313                .session
314                .pkgsrc_env
315                .packages
316                .join("All")
317                .join(format!("{}.tgz", dep));
318            if !dep_pkg.exists() {
319                debug!(dep, "Dependency package missing");
320                return Ok(false);
321            }
322
323            let dep_mtime = match dep_pkg.metadata().and_then(|m| m.modified())
324            {
325                Ok(t) => t,
326                Err(_) => return Ok(false),
327            };
328
329            if dep_mtime > pkgfile_mtime {
330                debug!(dep, "Dependency is newer");
331                return Ok(false);
332            }
333        }
334
335        debug!("Package is up-to-date");
336        Ok(true)
337    }
338
339    /// Run the full build process.
340    fn build<C: BuildCallback>(
341        &self,
342        callback: &mut C,
343    ) -> anyhow::Result<PkgBuildResult> {
344        let pkgname_str = self.pkginfo.pkgname().pkgname();
345        let pkgpath = &self.pkginfo.pkgpath;
346
347        // Check if package is already up-to-date (skip check if force rebuild)
348        if !self.session.options.force_rebuild && self.check_up_to_date()? {
349            return Ok(PkgBuildResult::Skipped);
350        }
351
352        // Clean up and create log directory
353        if self.logdir.exists() {
354            fs::remove_dir_all(&self.logdir)?;
355        }
356        fs::create_dir_all(&self.logdir)?;
357
358        // Create work.log and chown to build_user if set
359        let work_log = self.logdir.join("work.log");
360        File::create(&work_log)?;
361        if let Some(ref user) = self.build_user {
362            let bob_log = File::options()
363                .create(true)
364                .append(true)
365                .open(self.logdir.join("bob.log"))?;
366            let bob_log_err = bob_log.try_clone()?;
367            let _ = Command::new("chown")
368                .arg(user)
369                .arg(&work_log)
370                .stdout(bob_log)
371                .stderr(bob_log_err)
372                .status();
373        }
374
375        let pkgdir = self.session.config.pkgsrc().join(pkgpath.as_path());
376
377        // Pre-clean
378        callback.stage(Stage::PreClean.as_str());
379        self.run_make_stage(
380            Stage::PreClean,
381            &pkgdir,
382            &["clean"],
383            RunAs::Root,
384            false,
385        )?;
386
387        // Install dependencies
388        if !self.pkginfo.depends().is_empty() {
389            callback.stage(Stage::Depends.as_str());
390            let _ = self.write_stage(Stage::Depends);
391            if !self.install_dependencies()? {
392                return Ok(PkgBuildResult::Failed);
393            }
394        }
395
396        // Checksum
397        callback.stage(Stage::Checksum.as_str());
398        if !self.run_make_stage(
399            Stage::Checksum,
400            &pkgdir,
401            &["checksum"],
402            RunAs::Root,
403            true,
404        )? {
405            return Ok(PkgBuildResult::Failed);
406        }
407
408        // Configure
409        callback.stage(Stage::Configure.as_str());
410        let configure_log = self.logdir.join("configure.log");
411        if !self.run_usergroup_if_needed(
412            Stage::Configure,
413            &pkgdir,
414            &configure_log,
415        )? {
416            return Ok(PkgBuildResult::Failed);
417        }
418        if !self.run_make_stage(
419            Stage::Configure,
420            &pkgdir,
421            &["configure"],
422            self.build_run_as(),
423            true,
424        )? {
425            return Ok(PkgBuildResult::Failed);
426        }
427
428        // Build
429        callback.stage(Stage::Build.as_str());
430        let build_log = self.logdir.join("build.log");
431        if !self.run_usergroup_if_needed(Stage::Build, &pkgdir, &build_log)? {
432            return Ok(PkgBuildResult::Failed);
433        }
434        if !self.run_make_stage(
435            Stage::Build,
436            &pkgdir,
437            &["all"],
438            self.build_run_as(),
439            true,
440        )? {
441            return Ok(PkgBuildResult::Failed);
442        }
443
444        // Install
445        callback.stage(Stage::Install.as_str());
446        let install_log = self.logdir.join("install.log");
447        if !self.run_usergroup_if_needed(
448            Stage::Install,
449            &pkgdir,
450            &install_log,
451        )? {
452            return Ok(PkgBuildResult::Failed);
453        }
454        if !self.run_make_stage(
455            Stage::Install,
456            &pkgdir,
457            &["stage-install"],
458            self.build_run_as(),
459            true,
460        )? {
461            return Ok(PkgBuildResult::Failed);
462        }
463
464        // Package
465        callback.stage(Stage::Package.as_str());
466        if !self.run_make_stage(
467            Stage::Package,
468            &pkgdir,
469            &["stage-package-create"],
470            RunAs::Root,
471            true,
472        )? {
473            return Ok(PkgBuildResult::Failed);
474        }
475
476        // Get the package file path
477        let pkgfile = self.get_make_var(&pkgdir, "STAGE_PKGFILE")?;
478
479        // Test package install (unless bootstrap package)
480        let is_bootstrap = self.pkginfo.bootstrap_pkg() == Some("yes");
481        if !is_bootstrap {
482            if !self.pkg_add(&pkgfile)? {
483                return Ok(PkgBuildResult::Failed);
484            }
485
486            // Test package deinstall
487            callback.stage(Stage::Deinstall.as_str());
488            let _ = self.write_stage(Stage::Deinstall);
489            if !self.pkg_delete(pkgname_str)? {
490                return Ok(PkgBuildResult::Failed);
491            }
492        }
493
494        // Save package to packages directory
495        let packages_dir = self.session.pkgsrc_env.packages.join("All");
496        fs::create_dir_all(&packages_dir)?;
497        let dest = packages_dir.join(
498            Path::new(&pkgfile)
499                .file_name()
500                .context("Invalid package file path")?,
501        );
502        // pkgfile is a path inside the sandbox; prepend sandbox path for host access
503        let host_pkgfile = if self.session.sandbox.enabled() {
504            self.session
505                .sandbox
506                .path(self.sandbox_id)
507                .join(pkgfile.trim_start_matches('/'))
508        } else {
509            PathBuf::from(&pkgfile)
510        };
511        fs::copy(&host_pkgfile, &dest)?;
512
513        // Clean
514        callback.stage(Stage::Clean.as_str());
515        let _ = self.run_make_stage(
516            Stage::Clean,
517            &pkgdir,
518            &["clean"],
519            RunAs::Root,
520            false,
521        );
522
523        // Remove log directory on success
524        let _ = fs::remove_dir_all(&self.logdir);
525
526        Ok(PkgBuildResult::Success)
527    }
528
529    /// Determine how to run build commands.
530    fn build_run_as(&self) -> RunAs {
531        if self.build_user.is_some() { RunAs::User } else { RunAs::Root }
532    }
533
534    /// Write the current stage to a .stage file.
535    fn write_stage(&self, stage: Stage) -> anyhow::Result<()> {
536        let stage_file = self.logdir.join(".stage");
537        fs::write(&stage_file, stage.as_str())?;
538        Ok(())
539    }
540
541    /// Run a make stage with output logging.
542    fn run_make_stage(
543        &self,
544        stage: Stage,
545        pkgdir: &Path,
546        targets: &[&str],
547        run_as: RunAs,
548        include_make_flags: bool,
549    ) -> anyhow::Result<bool> {
550        // Write stage to .stage file
551        let _ = self.write_stage(stage);
552
553        let logfile = self.logdir.join(format!("{}.log", stage.as_str()));
554        let work_log = self.logdir.join("work.log");
555
556        let owned_args =
557            self.make_args(pkgdir, targets, include_make_flags, &work_log);
558
559        // Convert to slice of &str for the command
560        let args: Vec<&str> = owned_args.iter().map(|s| s.as_str()).collect();
561
562        info!(stage = stage.as_str(), "Running make stage");
563
564        let status = self.run_command_logged(
565            self.session.config.make(),
566            &args,
567            run_as,
568            &logfile,
569        )?;
570
571        Ok(status.success())
572    }
573
574    /// Run a command with output logged to a file.
575    fn run_command_logged(
576        &self,
577        cmd: &Path,
578        args: &[&str],
579        run_as: RunAs,
580        logfile: &Path,
581    ) -> anyhow::Result<ExitStatus> {
582        self.run_command_logged_with_env(cmd, args, run_as, logfile, &[])
583    }
584
585    fn run_command_logged_with_env(
586        &self,
587        cmd: &Path,
588        args: &[&str],
589        run_as: RunAs,
590        logfile: &Path,
591        extra_envs: &[(&str, &str)],
592    ) -> anyhow::Result<ExitStatus> {
593        use std::io::{BufRead, BufReader, Write};
594
595        let mut log =
596            OpenOptions::new().create(true).append(true).open(logfile)?;
597
598        // Write command being executed to the log file
599        let _ = writeln!(log, "=> {:?} {:?}", cmd, args);
600        let _ = log.flush();
601
602        // Use tee-style pipe handling when output_tx is available for live view.
603        // Otherwise use direct file redirection.
604        if let Some(ref output_tx) = self.output_tx {
605            // Wrap command in shell to merge stdout/stderr with 2>&1, like the
606            // shell script's run_log function does.
607            let shell_cmd =
608                self.build_shell_command(cmd, args, run_as, extra_envs);
609            let mut child = self
610                .session
611                .sandbox
612                .command(self.sandbox_id, Path::new("/bin/sh"))
613                .arg("-c")
614                .arg(&shell_cmd)
615                .stdout(Stdio::piped())
616                .stderr(Stdio::null())
617                .spawn()
618                .context("Failed to spawn shell command")?;
619
620            let stdout = child.stdout.take().unwrap();
621            let output_tx = output_tx.clone();
622            let sandbox_id = self.sandbox_id;
623
624            // Spawn thread to read from pipe and tee to file + output channel.
625            // Batch lines and throttle sends to reduce channel overhead.
626            let tee_handle = std::thread::spawn(move || {
627                let mut reader = BufReader::new(stdout);
628                let mut buf = Vec::new();
629                let mut batch = Vec::with_capacity(50);
630                let mut last_send = Instant::now();
631                let send_interval = OUTPUT_BATCH_INTERVAL;
632
633                loop {
634                    buf.clear();
635                    match reader.read_until(b'\n', &mut buf) {
636                        Ok(0) => break,
637                        Ok(_) => {}
638                        Err(_) => break,
639                    };
640                    // Write raw bytes to log file to preserve original output
641                    let _ = log.write_all(&buf);
642                    // Convert to lossy UTF-8 for live view
643                    let line = String::from_utf8_lossy(&buf);
644                    let line = line.trim_end_matches('\n').to_string();
645                    batch.push(line);
646
647                    // Send batch if interval elapsed or batch is large
648                    if last_send.elapsed() >= send_interval || batch.len() >= 50
649                    {
650                        let _ = output_tx.send(ChannelCommand::OutputLines(
651                            sandbox_id,
652                            std::mem::take(&mut batch),
653                        ));
654                        last_send = Instant::now();
655                    }
656                }
657
658                // Send remaining lines
659                if !batch.is_empty() {
660                    let _ = output_tx
661                        .send(ChannelCommand::OutputLines(sandbox_id, batch));
662                }
663            });
664
665            let status =
666                wait_with_shutdown(&mut child, &self.session.shutdown)?;
667
668            // Reader thread will exit when pipe closes (process exits)
669            let _ = tee_handle.join();
670
671            trace!(cmd = ?cmd, status = ?status, "Command completed");
672            Ok(status)
673        } else {
674            let status =
675                self.spawn_command_to_file(cmd, args, run_as, extra_envs, log)?;
676            trace!(cmd = ?cmd, status = ?status, "Command completed");
677            Ok(status)
678        }
679    }
680
681    /// Spawn a command with stdout/stderr redirected to a file.
682    fn spawn_command_to_file(
683        &self,
684        cmd: &Path,
685        args: &[&str],
686        run_as: RunAs,
687        extra_envs: &[(&str, &str)],
688        log: File,
689    ) -> anyhow::Result<ExitStatus> {
690        // Clone file handle for stderr (stdout and stderr both go to same file)
691        let log_err = log.try_clone()?;
692
693        match run_as {
694            RunAs::Root => {
695                let mut command =
696                    self.session.sandbox.command(self.sandbox_id, cmd);
697                command.args(args);
698                self.apply_envs(&mut command, extra_envs);
699                let mut child = command
700                    .stdout(Stdio::from(log))
701                    .stderr(Stdio::from(log_err))
702                    .spawn()
703                    .with_context(|| {
704                        format!("Failed to spawn {}", cmd.display())
705                    })?;
706                wait_with_shutdown(&mut child, &self.session.shutdown)
707            }
708            RunAs::User => {
709                let user = self.build_user.as_ref().unwrap();
710                let mut parts = Vec::with_capacity(args.len() + 1);
711                parts.push(cmd.display().to_string());
712                parts.extend(args.iter().map(|arg| arg.to_string()));
713                let inner_cmd = parts
714                    .iter()
715                    .map(|part| Self::shell_escape(part))
716                    .collect::<Vec<_>>()
717                    .join(" ");
718                let mut command = self
719                    .session
720                    .sandbox
721                    .command(self.sandbox_id, Path::new("su"));
722                command.arg(user).arg("-c").arg(&inner_cmd);
723                self.apply_envs(&mut command, extra_envs);
724                let mut child = command
725                    .stdout(Stdio::from(log))
726                    .stderr(Stdio::from(log_err))
727                    .spawn()
728                    .context("Failed to spawn su command")?;
729                wait_with_shutdown(&mut child, &self.session.shutdown)
730            }
731        }
732    }
733
734    /// Get a make variable value.
735    fn get_make_var(
736        &self,
737        pkgdir: &Path,
738        varname: &str,
739    ) -> anyhow::Result<String> {
740        let mut cmd = self
741            .session
742            .sandbox
743            .command(self.sandbox_id, self.session.config.make());
744        self.apply_envs(&mut cmd, &[]);
745
746        let work_log = self.logdir.join("work.log");
747        let make_args = self.make_args(
748            pkgdir,
749            &["show-var", &format!("VARNAME={}", varname)],
750            true,
751            &work_log,
752        );
753
754        let bob_log = File::options()
755            .create(true)
756            .append(true)
757            .open(self.logdir.join("bob.log"))?;
758        let output =
759            cmd.args(&make_args).stderr(Stdio::from(bob_log)).output()?;
760
761        if !output.status.success() {
762            bail!("Failed to get make variable {}", varname);
763        }
764
765        Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
766    }
767
768    /// Install package dependencies.
769    fn install_dependencies(&self) -> anyhow::Result<bool> {
770        let deps: Vec<String> =
771            self.pkginfo.depends().iter().map(|d| d.to_string()).collect();
772
773        let pkg_path = self.session.pkgsrc_env.packages.join("All");
774        let logfile = self.logdir.join("depends.log");
775
776        let mut args = vec![];
777        for dep in &deps {
778            args.push(dep.as_str());
779        }
780
781        let status = self.run_pkg_add_with_path(&args, &pkg_path, &logfile)?;
782        Ok(status.success())
783    }
784
785    /// Run pkg_add with PKG_PATH set.
786    fn run_pkg_add_with_path(
787        &self,
788        packages: &[&str],
789        pkg_path: &Path,
790        logfile: &Path,
791    ) -> anyhow::Result<ExitStatus> {
792        let pkg_add = self.session.pkgsrc_env.pkgtools.join("pkg_add");
793        let pkg_dbdir = self.session.pkgsrc_env.pkg_dbdir.to_string_lossy();
794        let pkg_path_value = pkg_path.to_string_lossy().to_string();
795        let extra_envs = [("PKG_PATH", pkg_path_value.as_str())];
796
797        let mut args = vec!["-K", &*pkg_dbdir];
798        args.extend(packages.iter().copied());
799
800        self.run_command_logged_with_env(
801            &pkg_add,
802            &args,
803            RunAs::Root,
804            logfile,
805            &extra_envs,
806        )
807    }
808
809    /// Install a package file.
810    fn pkg_add(&self, pkgfile: &str) -> anyhow::Result<bool> {
811        let pkg_add = self.session.pkgsrc_env.pkgtools.join("pkg_add");
812        let pkg_dbdir = self.session.pkgsrc_env.pkg_dbdir.to_string_lossy();
813        let logfile = self.logdir.join("package.log");
814
815        let status = self.run_command_logged(
816            &pkg_add,
817            &["-K", &*pkg_dbdir, pkgfile],
818            RunAs::Root,
819            &logfile,
820        )?;
821
822        Ok(status.success())
823    }
824
825    /// Delete an installed package.
826    fn pkg_delete(&self, pkgname: &str) -> anyhow::Result<bool> {
827        let pkg_delete = self.session.pkgsrc_env.pkgtools.join("pkg_delete");
828        let pkg_dbdir = self.session.pkgsrc_env.pkg_dbdir.to_string_lossy();
829        let logfile = self.logdir.join("deinstall.log");
830
831        let status = self.run_command_logged(
832            &pkg_delete,
833            &["-K", &*pkg_dbdir, pkgname],
834            RunAs::Root,
835            &logfile,
836        )?;
837
838        Ok(status.success())
839    }
840
841    /// Run create-usergroup if needed based on usergroup_phase.
842    fn run_usergroup_if_needed(
843        &self,
844        stage: Stage,
845        pkgdir: &Path,
846        logfile: &Path,
847    ) -> anyhow::Result<bool> {
848        let usergroup_phase = self.pkginfo.usergroup_phase().unwrap_or("");
849
850        let should_run = match stage {
851            Stage::Configure => usergroup_phase.ends_with("configure"),
852            Stage::Build => usergroup_phase.ends_with("build"),
853            Stage::Install => usergroup_phase == "pre-install",
854            _ => false,
855        };
856
857        if !should_run {
858            return Ok(true);
859        }
860
861        let mut args = vec!["-C", pkgdir.to_str().unwrap(), "create-usergroup"];
862        if stage == Stage::Configure {
863            args.push("clean");
864        }
865
866        let status = self.run_command_logged(
867            self.session.config.make(),
868            &args,
869            RunAs::Root,
870            logfile,
871        )?;
872        Ok(status.success())
873    }
874
875    fn make_args(
876        &self,
877        pkgdir: &Path,
878        targets: &[&str],
879        include_make_flags: bool,
880        work_log: &Path,
881    ) -> Vec<String> {
882        let mut owned_args: Vec<String> =
883            vec!["-C".to_string(), pkgdir.to_str().unwrap().to_string()];
884        owned_args.extend(targets.iter().map(|s| s.to_string()));
885
886        if include_make_flags {
887            owned_args.push("BATCH=1".to_string());
888            owned_args.push("DEPENDS_TARGET=/nonexistent".to_string());
889
890            if let Some(multi_version) = self.pkginfo.multi_version() {
891                for flag in multi_version {
892                    owned_args.push(flag.clone());
893                }
894            }
895
896            owned_args.push(format!("WRKLOG={}", work_log.display()));
897        }
898
899        owned_args
900    }
901
902    fn apply_envs(&self, cmd: &mut Command, extra_envs: &[(&str, &str)]) {
903        for (key, value) in &self.envs {
904            cmd.env(key, value);
905        }
906        for (key, value) in extra_envs {
907            cmd.env(key, value);
908        }
909    }
910
911    fn shell_escape(value: &str) -> String {
912        if value.is_empty() {
913            return "''".to_string();
914        }
915        if value
916            .chars()
917            .all(|c| c.is_ascii_alphanumeric() || "-_.,/:=+@".contains(c))
918        {
919            return value.to_string();
920        }
921        let escaped = value.replace('\'', "'\\''");
922        format!("'{}'", escaped)
923    }
924
925    /// Build a shell command string with environment, run_as handling, and 2>&1.
926    fn build_shell_command(
927        &self,
928        cmd: &Path,
929        args: &[&str],
930        run_as: RunAs,
931        extra_envs: &[(&str, &str)],
932    ) -> String {
933        let mut parts = Vec::new();
934
935        // Add environment variables
936        for (key, value) in &self.envs {
937            parts.push(format!("{}={}", key, Self::shell_escape(value)));
938        }
939        for (key, value) in extra_envs {
940            parts.push(format!("{}={}", key, Self::shell_escape(value)));
941        }
942
943        // Build the actual command
944        let cmd_str = Self::shell_escape(&cmd.to_string_lossy());
945        let args_str: Vec<String> =
946            args.iter().map(|a| Self::shell_escape(a)).collect();
947
948        match run_as {
949            RunAs::Root => {
950                parts.push(cmd_str);
951                parts.extend(args_str);
952            }
953            RunAs::User => {
954                let user = self.build_user.as_ref().unwrap();
955                let inner_cmd = std::iter::once(cmd_str)
956                    .chain(args_str)
957                    .collect::<Vec<_>>()
958                    .join(" ");
959                parts.push("su".to_string());
960                parts.push(Self::shell_escape(user));
961                parts.push("-c".to_string());
962                parts.push(Self::shell_escape(&inner_cmd));
963            }
964        }
965
966        // Merge stdout/stderr
967        parts.push("2>&1".to_string());
968        parts.join(" ")
969    }
970}
971
972/// Callback adapter that sends build updates through a channel.
973struct ChannelCallback<'a> {
974    sandbox_id: usize,
975    status_tx: &'a Sender<ChannelCommand>,
976}
977
978impl<'a> ChannelCallback<'a> {
979    fn new(sandbox_id: usize, status_tx: &'a Sender<ChannelCommand>) -> Self {
980        Self { sandbox_id, status_tx }
981    }
982}
983
984impl<'a> BuildCallback for ChannelCallback<'a> {
985    fn stage(&mut self, stage: &str) {
986        let _ = self.status_tx.send(ChannelCommand::StageUpdate(
987            self.sandbox_id,
988            Some(stage.to_string()),
989        ));
990    }
991}
992
993/// Outcome of a package build attempt.
994///
995/// Used in [`BuildResult`] to indicate whether the build succeeded, failed,
996/// or was skipped.
997#[derive(Clone, Debug, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
998pub enum BuildOutcome {
999    /// Package built and packaged successfully.
1000    Success,
1001    /// Package build failed.
1002    ///
1003    /// The string contains the failure reason (e.g., "Failed in build phase").
1004    Failed(String),
1005    /// Package did not need to be built - we already have a binary package
1006    /// for this revision.
1007    UpToDate,
1008    /// Package was not built due to a scan-phase failure.
1009    ///
1010    /// Contains the reason for skipping.
1011    Skipped(SkipReason),
1012}
1013
1014/// Result of building a single package.
1015///
1016/// Contains the outcome, timing, and log location for a package build.
1017#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
1018pub struct BuildResult {
1019    /// Package name with version (e.g., `mutt-2.2.12`).
1020    pub pkgname: PkgName,
1021    /// Package path in pkgsrc (e.g., `mail/mutt`).
1022    pub pkgpath: Option<PkgPath>,
1023    /// Build outcome (success, failure, or skipped).
1024    pub outcome: BuildOutcome,
1025    /// Time spent building this package.
1026    pub duration: Duration,
1027    /// Path to build logs directory, if available.
1028    ///
1029    /// For failed builds, this contains `pre-clean.log`, `build.log`, etc.
1030    /// Successful builds clean up their log directories.
1031    pub log_dir: Option<PathBuf>,
1032}
1033
1034/// Counts of build results by outcome category.
1035#[derive(Clone, Debug, Default)]
1036pub struct BuildCounts {
1037    /// Packages that built successfully.
1038    pub success: usize,
1039    /// Packages that failed to build.
1040    pub failed: usize,
1041    /// Packages already up-to-date (binary package exists).
1042    pub up_to_date: usize,
1043    /// Packages that were skipped.
1044    pub skipped: SkippedCounts,
1045    /// Packages that failed to scan.
1046    pub scanfail: usize,
1047}
1048
1049/// Summary of an entire build run.
1050#[derive(Clone, Debug)]
1051pub struct BuildSummary {
1052    /// Total duration of the build run.
1053    pub duration: Duration,
1054    /// Results for each package.
1055    pub results: Vec<BuildResult>,
1056    /// Packages that failed to scan (pkgpath, error message).
1057    pub scanfail: Vec<(PkgPath, String)>,
1058}
1059
1060impl BuildSummary {
1061    /// Compute all outcome counts in a single pass.
1062    pub fn counts(&self) -> BuildCounts {
1063        let mut c =
1064            BuildCounts { scanfail: self.scanfail.len(), ..Default::default() };
1065        for r in &self.results {
1066            match &r.outcome {
1067                BuildOutcome::Success => c.success += 1,
1068                BuildOutcome::Failed(_) => c.failed += 1,
1069                BuildOutcome::UpToDate => c.up_to_date += 1,
1070                BuildOutcome::Skipped(SkipReason::PkgSkip(_)) => {
1071                    c.skipped.pkg_skip += 1
1072                }
1073                BuildOutcome::Skipped(SkipReason::PkgFail(_)) => {
1074                    c.skipped.pkg_fail += 1
1075                }
1076                BuildOutcome::Skipped(SkipReason::UnresolvedDep(_)) => {
1077                    c.skipped.unresolved += 1
1078                }
1079                BuildOutcome::Skipped(SkipReason::IndirectFail(_)) => {
1080                    c.skipped.indirect_fail += 1
1081                }
1082                BuildOutcome::Skipped(SkipReason::IndirectSkip(_)) => {
1083                    c.skipped.indirect_skip += 1
1084                }
1085            }
1086        }
1087        c
1088    }
1089
1090    /// Get all failed results (direct build failures only).
1091    pub fn failed(&self) -> Vec<&BuildResult> {
1092        self.results
1093            .iter()
1094            .filter(|r| matches!(r.outcome, BuildOutcome::Failed(_)))
1095            .collect()
1096    }
1097
1098    /// Get all successful results.
1099    pub fn succeeded(&self) -> Vec<&BuildResult> {
1100        self.results
1101            .iter()
1102            .filter(|r| matches!(r.outcome, BuildOutcome::Success))
1103            .collect()
1104    }
1105
1106    /// Get all skipped results.
1107    pub fn skipped(&self) -> Vec<&BuildResult> {
1108        self.results
1109            .iter()
1110            .filter(|r| matches!(r.outcome, BuildOutcome::Skipped(_)))
1111            .collect()
1112    }
1113}
1114
1115/// Options that control build behavior.
1116#[derive(Clone, Debug, Default)]
1117pub struct BuildOptions {
1118    /// Force rebuild even if package is up-to-date.
1119    pub force_rebuild: bool,
1120}
1121
1122#[derive(Debug)]
1123pub struct Build {
1124    /// Parsed [`Config`].
1125    config: Config,
1126    /// Pkgsrc environment variables.
1127    pkgsrc_env: PkgsrcEnv,
1128    /// Sandbox scope - owns created sandboxes, destroys on drop.
1129    scope: SandboxScope,
1130    /// List of packages to build, as input from Scan::resolve.
1131    scanpkgs: IndexMap<PkgName, ResolvedPackage>,
1132    /// Cached build results from previous run.
1133    cached: IndexMap<PkgName, BuildResult>,
1134    /// Build options.
1135    options: BuildOptions,
1136}
1137
1138/// Per-package build task sent to worker threads.
1139#[derive(Debug)]
1140struct PackageBuild {
1141    session: Arc<BuildSession>,
1142    sandbox_id: usize,
1143    pkginfo: ResolvedPackage,
1144}
1145
1146/// Helper for querying bmake variables with the correct environment.
1147struct MakeQuery<'a> {
1148    session: &'a BuildSession,
1149    sandbox_id: usize,
1150    pkgpath: &'a PkgPath,
1151    env: &'a HashMap<String, String>,
1152}
1153
1154impl<'a> MakeQuery<'a> {
1155    fn new(
1156        session: &'a BuildSession,
1157        sandbox_id: usize,
1158        pkgpath: &'a PkgPath,
1159        env: &'a HashMap<String, String>,
1160    ) -> Self {
1161        Self { session, sandbox_id, pkgpath, env }
1162    }
1163
1164    /// Query a bmake variable value.
1165    fn var(&self, name: &str) -> Option<String> {
1166        let pkgdir = self.session.config.pkgsrc().join(self.pkgpath.as_path());
1167
1168        let mut cmd = self
1169            .session
1170            .sandbox
1171            .command(self.sandbox_id, self.session.config.make());
1172        cmd.arg("-C")
1173            .arg(&pkgdir)
1174            .arg("show-var")
1175            .arg(format!("VARNAME={}", name));
1176
1177        // Pass env vars that may affect the variable value
1178        for (key, value) in self.env {
1179            cmd.env(key, value);
1180        }
1181
1182        cmd.stderr(Stdio::null());
1183
1184        let output = cmd.output().ok()?;
1185
1186        if !output.status.success() {
1187            return None;
1188        }
1189
1190        let value = String::from_utf8_lossy(&output.stdout).trim().to_string();
1191
1192        if value.is_empty() { None } else { Some(value) }
1193    }
1194
1195    /// Query a bmake variable and return as PathBuf.
1196    fn var_path(&self, name: &str) -> Option<PathBuf> {
1197        self.var(name).map(PathBuf::from)
1198    }
1199
1200    /// Get the WRKDIR for this package.
1201    fn wrkdir(&self) -> Option<PathBuf> {
1202        self.var_path("WRKDIR")
1203    }
1204
1205    /// Resolve a path to its actual location on the host filesystem.
1206    /// If sandboxed, prepends the sandbox root path.
1207    fn resolve_path(&self, path: &Path) -> PathBuf {
1208        if self.session.sandbox.enabled() {
1209            self.session
1210                .sandbox
1211                .path(self.sandbox_id)
1212                .join(path.strip_prefix("/").unwrap_or(path))
1213        } else {
1214            path.to_path_buf()
1215        }
1216    }
1217}
1218
1219/// Result of a single package build attempt.
1220#[derive(Debug)]
1221enum PackageBuildResult {
1222    /// Build succeeded
1223    Success,
1224    /// Build failed
1225    Failed,
1226    /// Package was up-to-date, skipped
1227    Skipped,
1228}
1229
1230impl std::fmt::Display for PackageBuildResult {
1231    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1232        match self {
1233            Self::Success => write!(f, "success"),
1234            Self::Failed => write!(f, "failed"),
1235            Self::Skipped => write!(f, "skipped"),
1236        }
1237    }
1238}
1239
1240impl PackageBuild {
1241    fn build(
1242        &self,
1243        status_tx: &Sender<ChannelCommand>,
1244    ) -> anyhow::Result<PackageBuildResult> {
1245        let pkgname = self.pkginfo.index.pkgname.pkgname();
1246        info!("Starting package build");
1247
1248        let pkgpath = &self.pkginfo.pkgpath;
1249
1250        let logdir = self.session.config.logdir();
1251
1252        // Get env vars from Lua config for wrkdir saving and build environment
1253        let pkg_env = match self.session.config.get_pkg_env(&self.pkginfo) {
1254            Ok(env) => env,
1255            Err(e) => {
1256                error!(error = %e, "Failed to get env from Lua config");
1257                HashMap::new()
1258            }
1259        };
1260
1261        let mut envs =
1262            self.session.config.script_env(Some(&self.session.pkgsrc_env));
1263        for (key, value) in &pkg_env {
1264            envs.push((key.clone(), value.clone()));
1265        }
1266
1267        let patterns = self.session.config.save_wrkdir_patterns();
1268
1269        // Run pre-build script if defined (always runs)
1270        if !self.session.sandbox.run_pre_build(
1271            self.sandbox_id,
1272            &self.session.config,
1273            envs.clone(),
1274        )? {
1275            warn!("pre-build script failed");
1276        }
1277
1278        // Run the build using PkgBuilder
1279        let builder = PkgBuilder::new(
1280            &self.session,
1281            self.sandbox_id,
1282            &self.pkginfo,
1283            envs.clone(),
1284            Some(status_tx.clone()),
1285        );
1286
1287        let mut callback = ChannelCallback::new(self.sandbox_id, status_tx);
1288        let result = builder.build(&mut callback);
1289
1290        // Clear stage display
1291        let _ =
1292            status_tx.send(ChannelCommand::StageUpdate(self.sandbox_id, None));
1293
1294        let result = match &result {
1295            Ok(PkgBuildResult::Success) => {
1296                info!("Package build completed successfully");
1297                PackageBuildResult::Success
1298            }
1299            Ok(PkgBuildResult::Skipped) => {
1300                info!("Package build skipped (up-to-date)");
1301                PackageBuildResult::Skipped
1302            }
1303            Ok(PkgBuildResult::Failed) => {
1304                error!("Package build failed");
1305                // Show cleanup stage to user
1306                let _ = status_tx.send(ChannelCommand::StageUpdate(
1307                    self.sandbox_id,
1308                    Some("cleanup".to_string()),
1309                ));
1310                // Kill any orphaned processes in the sandbox before cleanup.
1311                // Failed builds may leave processes running that would block
1312                // subsequent commands like bmake show-var or bmake clean.
1313                let kill_start = Instant::now();
1314                self.session.sandbox.kill_processes_by_id(self.sandbox_id);
1315                trace!(
1316                    elapsed_ms = kill_start.elapsed().as_millis(),
1317                    "kill_processes_by_id completed"
1318                );
1319                // Save wrkdir files matching configured patterns, then clean up
1320                if !patterns.is_empty() {
1321                    let save_start = Instant::now();
1322                    self.save_wrkdir_files(
1323                        pkgname, pkgpath, logdir, patterns, &pkg_env,
1324                    );
1325                    trace!(
1326                        elapsed_ms = save_start.elapsed().as_millis(),
1327                        "save_wrkdir_files completed"
1328                    );
1329                    let clean_start = Instant::now();
1330                    self.run_clean(pkgpath, &envs);
1331                    trace!(
1332                        elapsed_ms = clean_start.elapsed().as_millis(),
1333                        "run_clean completed"
1334                    );
1335                } else {
1336                    let clean_start = Instant::now();
1337                    self.run_clean(pkgpath, &envs);
1338                    trace!(
1339                        elapsed_ms = clean_start.elapsed().as_millis(),
1340                        "run_clean completed"
1341                    );
1342                }
1343                PackageBuildResult::Failed
1344            }
1345            Err(e) => {
1346                error!(error = %e, "Package build error");
1347                // Show cleanup stage to user
1348                let _ = status_tx.send(ChannelCommand::StageUpdate(
1349                    self.sandbox_id,
1350                    Some("cleanup".to_string()),
1351                ));
1352                // Kill any orphaned processes in the sandbox before cleanup.
1353                // Failed builds may leave processes running that would block
1354                // subsequent commands like bmake show-var or bmake clean.
1355                let kill_start = Instant::now();
1356                self.session.sandbox.kill_processes_by_id(self.sandbox_id);
1357                trace!(
1358                    elapsed_ms = kill_start.elapsed().as_millis(),
1359                    "kill_processes_by_id completed"
1360                );
1361                // Save wrkdir files matching configured patterns, then clean up
1362                if !patterns.is_empty() {
1363                    let save_start = Instant::now();
1364                    self.save_wrkdir_files(
1365                        pkgname, pkgpath, logdir, patterns, &pkg_env,
1366                    );
1367                    trace!(
1368                        elapsed_ms = save_start.elapsed().as_millis(),
1369                        "save_wrkdir_files completed"
1370                    );
1371                    let clean_start = Instant::now();
1372                    self.run_clean(pkgpath, &envs);
1373                    trace!(
1374                        elapsed_ms = clean_start.elapsed().as_millis(),
1375                        "run_clean completed"
1376                    );
1377                } else {
1378                    let clean_start = Instant::now();
1379                    self.run_clean(pkgpath, &envs);
1380                    trace!(
1381                        elapsed_ms = clean_start.elapsed().as_millis(),
1382                        "run_clean completed"
1383                    );
1384                }
1385                PackageBuildResult::Failed
1386            }
1387        };
1388
1389        // Run post-build script if defined (always runs regardless of result)
1390        match self.session.sandbox.run_post_build(
1391            self.sandbox_id,
1392            &self.session.config,
1393            envs,
1394        ) {
1395            Ok(true) => {}
1396            Ok(false) => warn!("post-build script failed"),
1397            Err(e) => {
1398                warn!(error = %e, "post-build script error")
1399            }
1400        }
1401
1402        Ok(result)
1403    }
1404
1405    /// Save files matching patterns from WRKDIR to logdir on build failure.
1406    fn save_wrkdir_files(
1407        &self,
1408        pkgname: &str,
1409        pkgpath: &PkgPath,
1410        logdir: &Path,
1411        patterns: &[String],
1412        pkg_env: &HashMap<String, String>,
1413    ) {
1414        let make =
1415            MakeQuery::new(&self.session, self.sandbox_id, pkgpath, pkg_env);
1416
1417        // Get WRKDIR
1418        let wrkdir = match make.wrkdir() {
1419            Some(w) => w,
1420            None => {
1421                debug!(pkgname = %pkgname, "Could not determine WRKDIR, skipping file save");
1422                return;
1423            }
1424        };
1425
1426        // Resolve to actual filesystem path
1427        let wrkdir_path = make.resolve_path(&wrkdir);
1428
1429        if !wrkdir_path.exists() {
1430            debug!(pkgname = %pkgname,
1431                wrkdir = %wrkdir_path.display(),
1432                "WRKDIR does not exist, skipping file save"
1433            );
1434            return;
1435        }
1436
1437        let save_dir = logdir.join(pkgname).join("wrkdir-files");
1438        if let Err(e) = fs::create_dir_all(&save_dir) {
1439            warn!(pkgname = %pkgname,
1440                error = %e,
1441                "Failed to create wrkdir-files directory"
1442            );
1443            return;
1444        }
1445
1446        // Compile glob patterns
1447        let compiled_patterns: Vec<Pattern> = patterns
1448            .iter()
1449            .filter_map(|p| {
1450                Pattern::new(p).ok().or_else(|| {
1451                    warn!(pattern = %p, "Invalid glob pattern");
1452                    None
1453                })
1454            })
1455            .collect();
1456
1457        if compiled_patterns.is_empty() {
1458            return;
1459        }
1460
1461        // Walk the wrkdir and find matching files
1462        let mut saved_count = 0;
1463        if let Err(e) = walk_and_save(
1464            &wrkdir_path,
1465            &wrkdir_path,
1466            &save_dir,
1467            &compiled_patterns,
1468            &mut saved_count,
1469        ) {
1470            warn!(pkgname = %pkgname,
1471                error = %e,
1472                "Error while saving wrkdir files"
1473            );
1474        }
1475
1476        if saved_count > 0 {
1477            info!(pkgname = %pkgname,
1478                count = saved_count,
1479                dest = %save_dir.display(),
1480                "Saved wrkdir files"
1481            );
1482        }
1483    }
1484
1485    /// Run bmake clean for a package.
1486    fn run_clean(&self, pkgpath: &PkgPath, envs: &[(String, String)]) {
1487        let pkgdir = self.session.config.pkgsrc().join(pkgpath.as_path());
1488
1489        let mut cmd = self
1490            .session
1491            .sandbox
1492            .command(self.sandbox_id, self.session.config.make());
1493        cmd.arg("-C").arg(&pkgdir).arg("clean");
1494        for (key, value) in envs {
1495            cmd.env(key, value);
1496        }
1497        let result = cmd
1498            .stdout(std::process::Stdio::null())
1499            .stderr(std::process::Stdio::null())
1500            .status();
1501
1502        if let Err(e) = result {
1503            debug!(error = %e, "Failed to run bmake clean");
1504        }
1505    }
1506}
1507
1508/// Recursively walk a directory and save files matching patterns.
1509fn walk_and_save(
1510    base: &Path,
1511    current: &Path,
1512    save_dir: &Path,
1513    patterns: &[Pattern],
1514    saved_count: &mut usize,
1515) -> std::io::Result<()> {
1516    if !current.is_dir() {
1517        return Ok(());
1518    }
1519
1520    for entry in fs::read_dir(current)? {
1521        let entry = entry?;
1522        let path = entry.path();
1523
1524        if path.is_dir() {
1525            walk_and_save(base, &path, save_dir, patterns, saved_count)?;
1526        } else if path.is_file() {
1527            // Get relative path from base
1528            let rel_path = path.strip_prefix(base).unwrap_or(&path);
1529            let rel_str = rel_path.to_string_lossy();
1530
1531            // Check if any pattern matches
1532            for pattern in patterns {
1533                if pattern.matches(&rel_str)
1534                    || pattern.matches(
1535                        path.file_name()
1536                            .unwrap_or_default()
1537                            .to_string_lossy()
1538                            .as_ref(),
1539                    )
1540                {
1541                    // Create destination directory
1542                    let dest_path = save_dir.join(rel_path);
1543                    if let Some(parent) = dest_path.parent() {
1544                        fs::create_dir_all(parent)?;
1545                    }
1546
1547                    // Copy the file
1548                    if let Err(e) = fs::copy(&path, &dest_path) {
1549                        warn!(src = %path.display(),
1550                            dest = %dest_path.display(),
1551                            error = %e,
1552                            "Failed to copy file"
1553                        );
1554                    } else {
1555                        debug!(src = %path.display(),
1556                            dest = %dest_path.display(),
1557                            "Saved wrkdir file"
1558                        );
1559                        *saved_count += 1;
1560                    }
1561                    break; // Don't copy same file multiple times
1562                }
1563            }
1564        }
1565    }
1566
1567    Ok(())
1568}
1569
1570/**
1571 * Commands sent between the manager and clients.
1572 */
1573#[derive(Debug)]
1574enum ChannelCommand {
1575    /**
1576     * Client (with specified identifier) indicating they are ready for work.
1577     */
1578    ClientReady(usize),
1579    /**
1580     * Manager has no work available at the moment, try again later.
1581     */
1582    ComeBackLater,
1583    /**
1584     * Manager directing a client to build a specific package.
1585     */
1586    JobData(Box<PackageBuild>),
1587    /**
1588     * Client returning a successful package build with duration.
1589     */
1590    JobSuccess(PkgName, Duration),
1591    /**
1592     * Client returning a failed package build with duration.
1593     */
1594    JobFailed(PkgName, Duration),
1595    /**
1596     * Client returning a skipped package (up-to-date).
1597     */
1598    JobSkipped(PkgName),
1599    /**
1600     * Client returning an error during the package build.
1601     */
1602    JobError((PkgName, Duration, anyhow::Error)),
1603    /**
1604     * Manager directing a client to quit.
1605     */
1606    Quit,
1607    /**
1608     * Shutdown signal - workers should stop immediately.
1609     */
1610    Shutdown,
1611    /**
1612     * Client reporting a stage update for a build.
1613     */
1614    StageUpdate(usize, Option<String>),
1615    /**
1616     * Client reporting output lines from a build.
1617     */
1618    OutputLines(usize, Vec<String>),
1619}
1620
1621/**
1622 * Return the current build job status.
1623 */
1624#[derive(Debug)]
1625enum BuildStatus {
1626    /**
1627     * The next package ordered by priority is available for building.
1628     */
1629    Available(PkgName),
1630    /**
1631     * No packages are currently available for building, i.e. all remaining
1632     * packages have at least one dependency that is still unavailable.
1633     */
1634    NoneAvailable,
1635    /**
1636     * All package builds have been completed.
1637     */
1638    Done,
1639}
1640
1641#[derive(Clone, Debug)]
1642struct BuildJobs {
1643    scanpkgs: IndexMap<PkgName, ResolvedPackage>,
1644    incoming: HashMap<PkgName, HashSet<PkgName>>,
1645    /// Reverse dependency map: package -> packages that depend on it.
1646    /// Precomputed for O(1) lookup in mark_failure instead of O(n) scan.
1647    reverse_deps: HashMap<PkgName, HashSet<PkgName>>,
1648    /// Effective weight: package's PBULK_WEIGHT + sum of weights of all
1649    /// transitive dependents. Precomputed for efficient build ordering.
1650    effective_weights: HashMap<PkgName, usize>,
1651    running: HashSet<PkgName>,
1652    done: HashSet<PkgName>,
1653    failed: HashSet<PkgName>,
1654    results: Vec<BuildResult>,
1655    logdir: PathBuf,
1656}
1657
1658impl BuildJobs {
1659    /**
1660     * Mark a package as successful and remove it from pending dependencies.
1661     */
1662    fn mark_success(&mut self, pkgname: &PkgName, duration: Duration) {
1663        self.mark_done(pkgname, BuildOutcome::Success, duration);
1664    }
1665
1666    fn mark_up_to_date(&mut self, pkgname: &PkgName) {
1667        self.mark_done(pkgname, BuildOutcome::UpToDate, Duration::ZERO);
1668    }
1669
1670    /**
1671     * Mark a package as done and remove it from pending dependencies.
1672     */
1673    fn mark_done(
1674        &mut self,
1675        pkgname: &PkgName,
1676        outcome: BuildOutcome,
1677        duration: Duration,
1678    ) {
1679        /*
1680         * Remove the package from the list of dependencies in all
1681         * packages it is listed in.  Once a package has no outstanding
1682         * dependencies remaining it is ready for building.
1683         */
1684        for dep in self.incoming.values_mut() {
1685            if dep.contains(pkgname) {
1686                dep.remove(pkgname);
1687            }
1688        }
1689        /*
1690         * The package was already removed from "incoming" when it started
1691         * building, so we only need to add it to "done".
1692         */
1693        self.done.insert(pkgname.clone());
1694
1695        // Record the result
1696        let scanpkg = self.scanpkgs.get(pkgname);
1697        let log_dir = Some(self.logdir.join(pkgname.pkgname()));
1698        self.results.push(BuildResult {
1699            pkgname: pkgname.clone(),
1700            pkgpath: scanpkg.map(|s| s.pkgpath.clone()),
1701            outcome,
1702            duration,
1703            log_dir,
1704        });
1705    }
1706
1707    /**
1708     * Recursively mark a package and its dependents as failed.
1709     */
1710    fn mark_failure(&mut self, pkgname: &PkgName, duration: Duration) {
1711        trace!(pkgname = %pkgname.pkgname(), "mark_failure called");
1712        let start = std::time::Instant::now();
1713        let mut broken: HashSet<PkgName> = HashSet::new();
1714        let mut to_check: Vec<PkgName> = vec![];
1715        to_check.push(pkgname.clone());
1716        /*
1717         * Starting with the original failed package, recursively loop through
1718         * adding any packages that depend on it, adding them to broken.
1719         * Uses precomputed reverse_deps for O(1) lookup instead of O(n) scan.
1720         */
1721        loop {
1722            /* No packages left to check, we're done. */
1723            let Some(badpkg) = to_check.pop() else {
1724                break;
1725            };
1726            /* Already checked this package. */
1727            if broken.contains(&badpkg) {
1728                continue;
1729            }
1730            /* Add all packages that depend on this one. */
1731            if let Some(dependents) = self.reverse_deps.get(&badpkg) {
1732                for pkg in dependents {
1733                    to_check.push(pkg.clone());
1734                }
1735            }
1736            broken.insert(badpkg);
1737        }
1738        trace!(pkgname = %pkgname.pkgname(), broken_count = broken.len(), elapsed_ms = start.elapsed().as_millis(), "mark_failure found broken packages");
1739        /*
1740         * We now have a full HashSet of affected packages.  Remove them from
1741         * incoming and move to failed.  The original failed package will
1742         * already be removed from incoming, we rely on .remove() accepting
1743         * this.
1744         */
1745        let is_original = |p: &PkgName| p == pkgname;
1746        for pkg in broken {
1747            self.incoming.remove(&pkg);
1748            self.failed.insert(pkg.clone());
1749
1750            // Record the result
1751            let scanpkg = self.scanpkgs.get(&pkg);
1752            let log_dir = Some(self.logdir.join(pkg.pkgname()));
1753            let (outcome, dur) = if is_original(&pkg) {
1754                (BuildOutcome::Failed("Build failed".to_string()), duration)
1755            } else {
1756                (
1757                    BuildOutcome::Skipped(SkipReason::IndirectFail(format!(
1758                        "dependency {} failed",
1759                        pkgname.pkgname()
1760                    ))),
1761                    Duration::ZERO,
1762                )
1763            };
1764            self.results.push(BuildResult {
1765                pkgname: pkg,
1766                pkgpath: scanpkg.map(|s| s.pkgpath.clone()),
1767                outcome,
1768                duration: dur,
1769                log_dir,
1770            });
1771        }
1772        trace!(pkgname = %pkgname.pkgname(), total_results = self.results.len(), elapsed_ms = start.elapsed().as_millis(), "mark_failure completed");
1773    }
1774
1775    /**
1776     * Get next package status.
1777     */
1778    fn get_next_build(&self) -> BuildStatus {
1779        /*
1780         * If incoming is empty then we're done.
1781         */
1782        if self.incoming.is_empty() {
1783            return BuildStatus::Done;
1784        }
1785
1786        /*
1787         * Get all packages in incoming that are cleared for building, ordered
1788         * by effective weight (own weight + transitive dependents' weights).
1789         */
1790        let mut pkgs: Vec<(PkgName, usize)> = self
1791            .incoming
1792            .iter()
1793            .filter(|(_, v)| v.is_empty())
1794            .map(|(k, _)| {
1795                (k.clone(), *self.effective_weights.get(k).unwrap_or(&100))
1796            })
1797            .collect();
1798
1799        /*
1800         * If no packages are returned then we're still waiting for
1801         * dependencies to finish.  Clients should keep retrying until this
1802         * changes.
1803         */
1804        if pkgs.is_empty() {
1805            return BuildStatus::NoneAvailable;
1806        }
1807
1808        /*
1809         * Order packages by build weight and return the highest.
1810         */
1811        pkgs.sort_by_key(|&(_, weight)| std::cmp::Reverse(weight));
1812        BuildStatus::Available(pkgs[0].0.clone())
1813    }
1814}
1815
1816impl Build {
1817    pub fn new(
1818        config: &Config,
1819        pkgsrc_env: PkgsrcEnv,
1820        scope: SandboxScope,
1821        scanpkgs: IndexMap<PkgName, ResolvedPackage>,
1822        options: BuildOptions,
1823    ) -> Build {
1824        info!(
1825            package_count = scanpkgs.len(),
1826            sandbox_enabled = scope.enabled(),
1827            build_threads = config.build_threads(),
1828            ?options,
1829            "Creating new Build instance"
1830        );
1831        for (pkgname, index) in &scanpkgs {
1832            debug!(pkgname = %pkgname.pkgname(),
1833                pkgpath = ?index.pkgpath,
1834                depends_count = index.depends().len(),
1835                depends = ?index.depends().iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
1836                "Package in build queue"
1837            );
1838        }
1839        Build {
1840            config: config.clone(),
1841            pkgsrc_env,
1842            scope,
1843            scanpkgs,
1844            cached: IndexMap::new(),
1845            options,
1846        }
1847    }
1848
1849    /// Load cached build results from database.
1850    ///
1851    /// Returns the number of packages loaded from cache. Only loads results
1852    /// for packages that are in our build queue.
1853    pub fn load_cached_from_db(
1854        &mut self,
1855        db: &crate::db::Database,
1856    ) -> anyhow::Result<usize> {
1857        let mut count = 0;
1858        for pkgname in self.scanpkgs.keys() {
1859            if let Some(pkg) = db.get_package_by_name(pkgname.pkgname())? {
1860                if let Some(result) = db.get_build_result(pkg.id)? {
1861                    self.cached.insert(pkgname.clone(), result);
1862                    count += 1;
1863                }
1864            }
1865        }
1866        if count > 0 {
1867            info!(
1868                cached_count = count,
1869                "Loaded cached build results from database"
1870            );
1871        }
1872        Ok(count)
1873    }
1874
1875    pub fn start(
1876        &mut self,
1877        ctx: &RunContext,
1878        db: &crate::db::Database,
1879    ) -> anyhow::Result<BuildSummary> {
1880        let started = Instant::now();
1881
1882        info!(package_count = self.scanpkgs.len(), "Build::start() called");
1883
1884        let shutdown_flag = Arc::clone(&ctx.shutdown);
1885
1886        /*
1887         * Populate BuildJobs.
1888         */
1889        debug!("Populating BuildJobs from scanpkgs");
1890        let mut incoming: HashMap<PkgName, HashSet<PkgName>> = HashMap::new();
1891        let mut reverse_deps: HashMap<PkgName, HashSet<PkgName>> =
1892            HashMap::new();
1893        for (pkgname, index) in &self.scanpkgs {
1894            let mut deps: HashSet<PkgName> = HashSet::new();
1895            for dep in index.depends() {
1896                // Only track dependencies that are in our build queue.
1897                // Dependencies outside scanpkgs are assumed to already be
1898                // installed (from a previous build) or will cause the build
1899                // to fail at runtime.
1900                if !self.scanpkgs.contains_key(dep) {
1901                    continue;
1902                }
1903                deps.insert(dep.clone());
1904                // Build reverse dependency map: dep -> packages that depend on it
1905                reverse_deps
1906                    .entry(dep.clone())
1907                    .or_default()
1908                    .insert(pkgname.clone());
1909            }
1910            trace!(pkgname = %pkgname.pkgname(),
1911                deps_count = deps.len(),
1912                deps = ?deps.iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
1913                "Adding package to incoming build queue"
1914            );
1915            incoming.insert(pkgname.clone(), deps);
1916        }
1917
1918        /*
1919         * Process cached build results.
1920         */
1921        let mut done: HashSet<PkgName> = HashSet::new();
1922        let mut failed: HashSet<PkgName> = HashSet::new();
1923        let results: Vec<BuildResult> = Vec::new();
1924        let mut cached_count = 0usize;
1925
1926        for (pkgname, result) in &self.cached {
1927            match result.outcome {
1928                BuildOutcome::Success | BuildOutcome::UpToDate => {
1929                    // Completed package - remove from incoming, add to done
1930                    incoming.remove(pkgname);
1931                    done.insert(pkgname.clone());
1932                    // Remove from deps of other packages
1933                    for deps in incoming.values_mut() {
1934                        deps.remove(pkgname);
1935                    }
1936                    // Don't add to results - already in database
1937                    cached_count += 1;
1938                }
1939                BuildOutcome::Failed(_) | BuildOutcome::Skipped(_) => {
1940                    // Failed package - remove from incoming, add to failed
1941                    incoming.remove(pkgname);
1942                    failed.insert(pkgname.clone());
1943                    // Don't add to results - already in database
1944                    cached_count += 1;
1945                }
1946            }
1947        }
1948
1949        /*
1950         * Propagate cached failures: any package in incoming that depends on
1951         * a failed package must also be marked as failed.
1952         */
1953        loop {
1954            let mut newly_failed: Vec<PkgName> = Vec::new();
1955            for (pkgname, deps) in &incoming {
1956                for dep in deps {
1957                    if failed.contains(dep) {
1958                        newly_failed.push(pkgname.clone());
1959                        break;
1960                    }
1961                }
1962            }
1963            if newly_failed.is_empty() {
1964                break;
1965            }
1966            for pkgname in newly_failed {
1967                incoming.remove(&pkgname);
1968                failed.insert(pkgname);
1969            }
1970        }
1971
1972        if cached_count > 0 {
1973            println!("Loaded {} cached build results", cached_count);
1974        }
1975
1976        info!(
1977            incoming_count = incoming.len(),
1978            scanpkgs_count = self.scanpkgs.len(),
1979            cached_count = cached_count,
1980            "BuildJobs populated"
1981        );
1982
1983        if incoming.is_empty() {
1984            // Guard is dropped when Build goes out of scope, destroying sandboxes
1985            return Ok(BuildSummary {
1986                duration: started.elapsed(),
1987                results,
1988                scanfail: Vec::new(),
1989            });
1990        }
1991
1992        /*
1993         * Compute effective weights for build ordering.  The effective weight
1994         * is the package's own PBULK_WEIGHT plus the sum of weights of all
1995         * packages that transitively depend on it.  This prioritises building
1996         * packages that unblock the most downstream work.
1997         */
1998        let get_weight = |pkg: &PkgName| -> usize {
1999            self.scanpkgs
2000                .get(pkg)
2001                .and_then(|idx| idx.pbulk_weight())
2002                .and_then(|w| w.parse().ok())
2003                .unwrap_or(100)
2004        };
2005
2006        let mut effective_weights: HashMap<PkgName, usize> = HashMap::new();
2007        let mut pending: HashMap<&PkgName, usize> = incoming
2008            .keys()
2009            .map(|p| (p, reverse_deps.get(p).map_or(0, |s| s.len())))
2010            .collect();
2011        let mut queue: VecDeque<&PkgName> =
2012            pending.iter().filter(|(_, c)| **c == 0).map(|(&p, _)| p).collect();
2013        while let Some(pkg) = queue.pop_front() {
2014            let mut total = get_weight(pkg);
2015            if let Some(dependents) = reverse_deps.get(pkg) {
2016                for dep in dependents {
2017                    total += effective_weights.get(dep).unwrap_or(&0);
2018                }
2019            }
2020            effective_weights.insert(pkg.clone(), total);
2021            for dep in incoming.get(pkg).iter().flat_map(|s| s.iter()) {
2022                if let Some(c) = pending.get_mut(dep) {
2023                    *c -= 1;
2024                    if *c == 0 {
2025                        queue.push_back(dep);
2026                    }
2027                }
2028            }
2029        }
2030
2031        let running: HashSet<PkgName> = HashSet::new();
2032        let logdir = self.config.logdir().clone();
2033        let jobs = BuildJobs {
2034            scanpkgs: self.scanpkgs.clone(),
2035            incoming,
2036            reverse_deps,
2037            effective_weights,
2038            running,
2039            done,
2040            failed,
2041            results,
2042            logdir,
2043        };
2044
2045        println!("Building packages...");
2046
2047        // Set up multi-line progress display using ratatui inline viewport
2048        let progress = Arc::new(Mutex::new(
2049            MultiProgress::new(
2050                "Building",
2051                "Built",
2052                self.scanpkgs.len(),
2053                self.config.build_threads(),
2054            )
2055            .expect("Failed to initialize progress display"),
2056        ));
2057
2058        // Mark cached packages in progress display
2059        if cached_count > 0 {
2060            if let Ok(mut p) = progress.lock() {
2061                p.state_mut().cached = cached_count;
2062            }
2063        }
2064
2065        // Flag to stop the refresh thread
2066        let stop_refresh = Arc::new(AtomicBool::new(false));
2067
2068        // Spawn a thread to periodically refresh the display (for timer updates)
2069        let progress_refresh = Arc::clone(&progress);
2070        let stop_flag = Arc::clone(&stop_refresh);
2071        let shutdown_for_refresh = Arc::clone(&shutdown_flag);
2072        let refresh_thread = std::thread::spawn(move || {
2073            while !stop_flag.load(Ordering::Relaxed)
2074                && !shutdown_for_refresh.load(Ordering::SeqCst)
2075            {
2076                // Poll outside lock to avoid blocking main thread
2077                let has_event = event::poll(REFRESH_INTERVAL).unwrap_or(false);
2078
2079                if let Ok(mut p) = progress_refresh.lock() {
2080                    if has_event {
2081                        let _ = p.handle_event();
2082                    }
2083                    let _ = p.render();
2084                }
2085            }
2086        });
2087
2088        /*
2089         * Configure a mananger channel.  This is used for clients to indicate
2090         * to the manager that they are ready for work.
2091         */
2092        let (manager_tx, manager_rx) = mpsc::channel::<ChannelCommand>();
2093
2094        /*
2095         * Client threads.  Each client has its own channel to the manager,
2096         * with the client sending ready status on the manager channel, and
2097         * receiving instructions on its private channel.
2098         */
2099        let mut threads = vec![];
2100        let mut clients: HashMap<usize, Sender<ChannelCommand>> =
2101            HashMap::new();
2102        for i in 0..self.config.build_threads() {
2103            let (client_tx, client_rx) = mpsc::channel::<ChannelCommand>();
2104            clients.insert(i, client_tx);
2105            let manager_tx = manager_tx.clone();
2106            let shutdown_for_worker = Arc::clone(&shutdown_flag);
2107            let thread = std::thread::spawn(move || {
2108                loop {
2109                    if shutdown_for_worker.load(Ordering::SeqCst) {
2110                        break;
2111                    }
2112
2113                    // Use send() which can fail if receiver is dropped (manager shutdown)
2114                    if manager_tx.send(ChannelCommand::ClientReady(i)).is_err()
2115                    {
2116                        break;
2117                    }
2118
2119                    let Ok(msg) = client_rx.recv() else {
2120                        break;
2121                    };
2122
2123                    match msg {
2124                        ChannelCommand::ComeBackLater => {
2125                            std::thread::sleep(WORKER_BACKOFF_INTERVAL);
2126                            continue;
2127                        }
2128                        ChannelCommand::JobData(pkg) => {
2129                            let pkgname = pkg.pkginfo.index.pkgname.clone();
2130                            let pkgpath = &pkg.pkginfo.pkgpath;
2131                            let span = info_span!(
2132                                "build",
2133                                sandbox_id = pkg.sandbox_id,
2134                                pkgpath = %pkgpath,
2135                                pkgname = %pkgname.pkgname(),
2136                            );
2137                            let _guard = span.enter();
2138
2139                            let build_start = Instant::now();
2140                            let result = pkg.build(&manager_tx);
2141                            let duration = build_start.elapsed();
2142                            trace!(
2143                                elapsed_ms = duration.as_millis(),
2144                                result = %result.as_ref().map_or("error".to_string(), |r| r.to_string()),
2145                                "Build finished"
2146                            );
2147
2148                            match result {
2149                                Ok(PackageBuildResult::Success) => {
2150                                    let _ = manager_tx.send(
2151                                        ChannelCommand::JobSuccess(
2152                                            pkgname, duration,
2153                                        ),
2154                                    );
2155                                }
2156                                Ok(PackageBuildResult::Skipped) => {
2157                                    let _ = manager_tx.send(
2158                                        ChannelCommand::JobSkipped(pkgname),
2159                                    );
2160                                }
2161                                Ok(PackageBuildResult::Failed) => {
2162                                    let _ = manager_tx.send(
2163                                        ChannelCommand::JobFailed(
2164                                            pkgname, duration,
2165                                        ),
2166                                    );
2167                                }
2168                                Err(e) => {
2169                                    // Don't report errors caused by shutdown
2170                                    if !shutdown_for_worker
2171                                        .load(Ordering::SeqCst)
2172                                    {
2173                                        let _ = manager_tx.send(
2174                                            ChannelCommand::JobError((
2175                                                pkgname, duration, e,
2176                                            )),
2177                                        );
2178                                    }
2179                                }
2180                            }
2181
2182                            if shutdown_for_worker.load(Ordering::SeqCst) {
2183                                break;
2184                            }
2185                            continue;
2186                        }
2187                        ChannelCommand::Quit | ChannelCommand::Shutdown => {
2188                            break;
2189                        }
2190                        _ => todo!(),
2191                    }
2192                }
2193            });
2194            threads.push(thread);
2195        }
2196
2197        /*
2198         * Manager thread.  Read incoming commands from clients and reply
2199         * accordingly.  Returns the build results via a channel.
2200         */
2201        let session = Arc::new(BuildSession {
2202            config: self.config.clone(),
2203            pkgsrc_env: self.pkgsrc_env.clone(),
2204            sandbox: self.scope.sandbox().clone(),
2205            options: self.options.clone(),
2206            shutdown: Arc::clone(&shutdown_flag),
2207        });
2208        let progress_clone = Arc::clone(&progress);
2209        let shutdown_for_manager = Arc::clone(&shutdown_flag);
2210        let (results_tx, results_rx) = mpsc::channel::<Vec<BuildResult>>();
2211        let (interrupted_tx, interrupted_rx) = mpsc::channel::<bool>();
2212        // Channel for completed results to save immediately
2213        let (completed_tx, completed_rx) = mpsc::channel::<BuildResult>();
2214        let manager = std::thread::spawn(move || {
2215            let mut clients = clients.clone();
2216            let mut jobs = jobs.clone();
2217            let mut was_interrupted = false;
2218
2219            // Track which thread is building which package
2220            let mut thread_packages: HashMap<usize, PkgName> = HashMap::new();
2221
2222            loop {
2223                // Check shutdown flag periodically
2224                if shutdown_for_manager.load(Ordering::SeqCst) {
2225                    // Suppress all further output
2226                    if let Ok(mut p) = progress_clone.lock() {
2227                        p.state_mut().suppress();
2228                    }
2229                    // Send shutdown to all remaining clients
2230                    for (_, client) in clients.drain() {
2231                        let _ = client.send(ChannelCommand::Shutdown);
2232                    }
2233                    was_interrupted = true;
2234                    break;
2235                }
2236
2237                let command =
2238                    match manager_rx.recv_timeout(SHUTDOWN_POLL_INTERVAL) {
2239                        Ok(cmd) => cmd,
2240                        Err(mpsc::RecvTimeoutError::Timeout) => continue,
2241                        Err(mpsc::RecvTimeoutError::Disconnected) => break,
2242                    };
2243
2244                match command {
2245                    ChannelCommand::ClientReady(c) => {
2246                        let client = clients.get(&c).unwrap();
2247                        match jobs.get_next_build() {
2248                            BuildStatus::Available(pkg) => {
2249                                let pkginfo = jobs.scanpkgs.get(&pkg).unwrap();
2250                                jobs.incoming.remove(&pkg);
2251                                jobs.running.insert(pkg.clone());
2252
2253                                // Update thread progress
2254                                thread_packages.insert(c, pkg.clone());
2255                                if let Ok(mut p) = progress_clone.lock() {
2256                                    p.clear_output_buffer(c);
2257                                    p.state_mut()
2258                                        .set_worker_active(c, pkg.pkgname());
2259                                    let _ = p.render();
2260                                }
2261
2262                                let _ = client.send(ChannelCommand::JobData(
2263                                    Box::new(PackageBuild {
2264                                        session: Arc::clone(&session),
2265                                        sandbox_id: c,
2266                                        pkginfo: pkginfo.clone(),
2267                                    }),
2268                                ));
2269                            }
2270                            BuildStatus::NoneAvailable => {
2271                                if let Ok(mut p) = progress_clone.lock() {
2272                                    p.clear_output_buffer(c);
2273                                    p.state_mut().set_worker_idle(c);
2274                                    let _ = p.render();
2275                                }
2276                                let _ =
2277                                    client.send(ChannelCommand::ComeBackLater);
2278                            }
2279                            BuildStatus::Done => {
2280                                if let Ok(mut p) = progress_clone.lock() {
2281                                    p.clear_output_buffer(c);
2282                                    p.state_mut().set_worker_idle(c);
2283                                    let _ = p.render();
2284                                }
2285                                let _ = client.send(ChannelCommand::Quit);
2286                                clients.remove(&c);
2287                                if clients.is_empty() {
2288                                    break;
2289                                }
2290                            }
2291                        };
2292                    }
2293                    ChannelCommand::JobSuccess(pkgname, duration) => {
2294                        jobs.mark_success(&pkgname, duration);
2295                        jobs.running.remove(&pkgname);
2296
2297                        // Send result for immediate saving
2298                        if let Some(result) = jobs.results.last() {
2299                            let _ = completed_tx.send(result.clone());
2300                        }
2301
2302                        // Find which thread completed and mark idle
2303                        if let Ok(mut p) = progress_clone.lock() {
2304                            let _ = p.print_status(&format!(
2305                                "       Built {} ({})",
2306                                pkgname.pkgname(),
2307                                format_duration(duration)
2308                            ));
2309                            p.state_mut().increment_completed();
2310                            for (tid, pkg) in &thread_packages {
2311                                if pkg == &pkgname {
2312                                    p.clear_output_buffer(*tid);
2313                                    p.state_mut().set_worker_idle(*tid);
2314                                    break;
2315                                }
2316                            }
2317                            let _ = p.render();
2318                        }
2319                    }
2320                    ChannelCommand::JobSkipped(pkgname) => {
2321                        jobs.mark_up_to_date(&pkgname);
2322                        jobs.running.remove(&pkgname);
2323
2324                        // Send result for immediate saving
2325                        if let Some(result) = jobs.results.last() {
2326                            let _ = completed_tx.send(result.clone());
2327                        }
2328
2329                        // Find which thread completed and mark idle
2330                        if let Ok(mut p) = progress_clone.lock() {
2331                            let _ = p.print_status(&format!(
2332                                "     Skipped {} (up-to-date)",
2333                                pkgname.pkgname()
2334                            ));
2335                            p.state_mut().increment_skipped();
2336                            for (tid, pkg) in &thread_packages {
2337                                if pkg == &pkgname {
2338                                    p.clear_output_buffer(*tid);
2339                                    p.state_mut().set_worker_idle(*tid);
2340                                    break;
2341                                }
2342                            }
2343                            let _ = p.render();
2344                        }
2345                    }
2346                    ChannelCommand::JobFailed(pkgname, duration) => {
2347                        let results_before = jobs.results.len();
2348                        jobs.mark_failure(&pkgname, duration);
2349                        jobs.running.remove(&pkgname);
2350
2351                        // Send all new results for immediate saving
2352                        for result in jobs.results.iter().skip(results_before) {
2353                            let _ = completed_tx.send(result.clone());
2354                        }
2355
2356                        // Find which thread failed and mark idle
2357                        if let Ok(mut p) = progress_clone.lock() {
2358                            let _ = p.print_status(&format!(
2359                                "      Failed {} ({})",
2360                                pkgname.pkgname(),
2361                                format_duration(duration)
2362                            ));
2363                            p.state_mut().increment_failed();
2364                            for (tid, pkg) in &thread_packages {
2365                                if pkg == &pkgname {
2366                                    p.clear_output_buffer(*tid);
2367                                    p.state_mut().set_worker_idle(*tid);
2368                                    break;
2369                                }
2370                            }
2371                            let _ = p.render();
2372                        }
2373                    }
2374                    ChannelCommand::JobError((pkgname, duration, e)) => {
2375                        let results_before = jobs.results.len();
2376                        jobs.mark_failure(&pkgname, duration);
2377                        jobs.running.remove(&pkgname);
2378
2379                        // Send all new results for immediate saving
2380                        for result in jobs.results.iter().skip(results_before) {
2381                            let _ = completed_tx.send(result.clone());
2382                        }
2383
2384                        // Find which thread errored and mark idle
2385                        if let Ok(mut p) = progress_clone.lock() {
2386                            let _ = p.print_status(&format!(
2387                                "      Failed {} ({})",
2388                                pkgname.pkgname(),
2389                                format_duration(duration)
2390                            ));
2391                            p.state_mut().increment_failed();
2392                            for (tid, pkg) in &thread_packages {
2393                                if pkg == &pkgname {
2394                                    p.clear_output_buffer(*tid);
2395                                    p.state_mut().set_worker_idle(*tid);
2396                                    break;
2397                                }
2398                            }
2399                            let _ = p.render();
2400                        }
2401                        tracing::error!(error = %e, pkgname = %pkgname.pkgname(), "Build error");
2402                    }
2403                    ChannelCommand::StageUpdate(tid, stage) => {
2404                        if let Ok(mut p) = progress_clone.lock() {
2405                            p.state_mut()
2406                                .set_worker_stage(tid, stage.as_deref());
2407                            let _ = p.render();
2408                        }
2409                    }
2410                    ChannelCommand::OutputLines(tid, lines) => {
2411                        if let Ok(mut p) = progress_clone.lock() {
2412                            if let Some(buf) = p.output_buffer_mut(tid) {
2413                                for line in lines {
2414                                    buf.push(line);
2415                                }
2416                            }
2417                        }
2418                    }
2419                    _ => {}
2420                }
2421            }
2422
2423            // Send results and interrupted status back
2424            debug!(
2425                result_count = jobs.results.len(),
2426                "Manager sending results back"
2427            );
2428            let _ = results_tx.send(jobs.results);
2429            let _ = interrupted_tx.send(was_interrupted);
2430        });
2431
2432        threads.push(manager);
2433        debug!("Waiting for worker threads to complete");
2434        let join_start = Instant::now();
2435        for thread in threads {
2436            thread.join().expect("thread panicked");
2437        }
2438        debug!(
2439            elapsed_ms = join_start.elapsed().as_millis(),
2440            "Worker threads completed"
2441        );
2442
2443        // Save all completed results to database.
2444        // Important: We save results even on interrupt - these are builds that
2445        // COMPLETED before the interrupt, and should be preserved. Only builds
2446        // that were in-progress when interrupted are excluded (they never sent
2447        // a result to the channel).
2448        let mut saved_count = 0;
2449        while let Ok(result) = completed_rx.try_recv() {
2450            if let Err(e) = db.store_build_by_name(&result) {
2451                warn!(
2452                    pkgname = %result.pkgname.pkgname(),
2453                    error = %e,
2454                    "Failed to save build result"
2455                );
2456            } else {
2457                saved_count += 1;
2458            }
2459        }
2460        if saved_count > 0 {
2461            debug!(saved_count, "Saved build results to database");
2462        }
2463
2464        // Stop the refresh thread
2465        stop_refresh.store(true, Ordering::Relaxed);
2466        let _ = refresh_thread.join();
2467
2468        // Check if we were interrupted
2469        let was_interrupted = interrupted_rx.recv().unwrap_or(false);
2470
2471        // Print appropriate summary
2472        if let Ok(mut p) = progress.lock() {
2473            if was_interrupted {
2474                let _ = p.finish_interrupted();
2475            } else {
2476                let _ = p.finish();
2477            }
2478        }
2479
2480        // Collect results from manager
2481        debug!("Collecting results from manager");
2482        let results = results_rx.recv().unwrap_or_default();
2483        debug!(result_count = results.len(), "Collected results from manager");
2484        let summary = BuildSummary {
2485            duration: started.elapsed(),
2486            results,
2487            scanfail: Vec::new(),
2488        };
2489
2490        // Guard is dropped when Build goes out of scope, destroying sandboxes
2491        Ok(summary)
2492    }
2493}