Skip to main content

bob/
build.rs

1/*
2 * Copyright (c) 2026 Jonathan Perkin <jonathan@perkin.org.uk>
3 *
4 * Permission to use, copy, modify, and distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17//! Parallel package builds.
18//!
19//! This module provides the [`Build`] struct for building packages in parallel
20//! across multiple sandboxes. Packages are scheduled using a dependency graph
21//! to ensure correct build order.
22//!
23//! # Build Process
24//!
25//! 1. Create build sandboxes (one per `build_threads`)
26//! 2. Execute pre-build script in each sandbox
27//! 3. Build packages in parallel, respecting dependencies
28//! 4. Execute post-build script after each package
29//! 5. Destroy sandboxes and generate report
30//!
31//! # Build Phases
32//!
33//! Each package goes through these phases in turn:
34//!
35//! - `pre-clean` - Clean any previous build artifacts
36//! - `depends` - Install required dependencies
37//! - `checksum` - Verify distfile checksums
38//! - `configure` - Configure the build
39//! - `build` - Compile the package
40//! - `install` - Install to staging area
41//! - `package` - Create binary package
42//! - `deinstall` - Test package removal (non-bootstrap only)
43//! - `clean` - Clean up build artifacts
44
45use crate::config::PkgsrcEnv;
46use crate::sandbox::{SHUTDOWN_POLL_INTERVAL, SandboxScope, wait_with_shutdown};
47use crate::scan::{ResolvedPackage, SkipReason, SkippedCounts};
48use crate::tui::{MultiProgress, REFRESH_INTERVAL, format_duration};
49use crate::{Config, RunContext, Sandbox};
50use anyhow::{Context, bail};
51use crossterm::event;
52use glob::Pattern;
53use indexmap::IndexMap;
54use pkgsrc::{PkgName, PkgPath};
55use std::collections::{HashMap, HashSet, VecDeque};
56use std::fs::{self, File, OpenOptions};
57use std::path::{Path, PathBuf};
58use std::process::{Command, ExitStatus, Stdio};
59use std::sync::atomic::{AtomicBool, Ordering};
60use std::sync::{Arc, Mutex, mpsc, mpsc::Sender};
61use std::time::{Duration, Instant};
62use tracing::{debug, error, info, info_span, trace, warn};
63
64/// How often to batch and send build output lines to the UI channel.
65/// This is the floor on log display responsiveness — output cannot appear
66/// faster than this regardless of UI refresh rate. 100ms (10fps) is
67/// imperceptible for build logs while reducing channel overhead.
68const OUTPUT_BATCH_INTERVAL: Duration = Duration::from_millis(100);
69
70/// How long a worker thread sleeps when told no work is available.
71/// This prevents busy-spinning when all pending builds are blocked on
72/// dependencies. 100ms balances responsiveness with CPU efficiency.
73const WORKER_BACKOFF_INTERVAL: Duration = Duration::from_millis(100);
74
75/// Build stages in order of execution.
76#[derive(Debug, Clone, Copy, PartialEq, Eq)]
77enum Stage {
78    PreClean,
79    Depends,
80    Checksum,
81    Configure,
82    Build,
83    Install,
84    Package,
85    Deinstall,
86    Clean,
87}
88
89impl Stage {
90    fn as_str(&self) -> &'static str {
91        match self {
92            Stage::PreClean => "pre-clean",
93            Stage::Depends => "depends",
94            Stage::Checksum => "checksum",
95            Stage::Configure => "configure",
96            Stage::Build => "build",
97            Stage::Install => "install",
98            Stage::Package => "package",
99            Stage::Deinstall => "deinstall",
100            Stage::Clean => "clean",
101        }
102    }
103}
104
105/// Result of a package build.
106#[derive(Debug)]
107enum PkgBuildResult {
108    Success,
109    Failed,
110    Skipped,
111}
112
113/// How to run a command.
114#[derive(Debug, Clone, Copy)]
115enum RunAs {
116    Root,
117    User,
118}
119
120/// Callback for status updates during build.
121trait BuildCallback: Send {
122    fn stage(&mut self, stage: &str);
123}
124
125/// Session-level build data shared across all package builds.
126#[derive(Debug)]
127struct BuildSession {
128    config: Config,
129    pkgsrc_env: PkgsrcEnv,
130    sandbox: Sandbox,
131    options: BuildOptions,
132    shutdown: Arc<AtomicBool>,
133}
134
135/// Package builder that executes build stages.
136struct PkgBuilder<'a> {
137    session: &'a BuildSession,
138    sandbox_id: usize,
139    pkginfo: &'a ResolvedPackage,
140    logdir: PathBuf,
141    build_user: Option<String>,
142    envs: Vec<(String, String)>,
143    output_tx: Option<Sender<ChannelCommand>>,
144}
145
146impl<'a> PkgBuilder<'a> {
147    fn new(
148        session: &'a BuildSession,
149        sandbox_id: usize,
150        pkginfo: &'a ResolvedPackage,
151        envs: Vec<(String, String)>,
152        output_tx: Option<Sender<ChannelCommand>>,
153    ) -> Self {
154        let logdir = session
155            .config
156            .logdir()
157            .join(pkginfo.index.pkgname.pkgname());
158        let build_user = session.config.build_user().map(|s| s.to_string());
159        Self {
160            session,
161            sandbox_id,
162            pkginfo,
163            logdir,
164            build_user,
165            envs,
166            output_tx,
167        }
168    }
169
170    /// Run a command in the sandbox and capture its stdout.
171    fn run_cmd(&self, cmd: &Path, args: &[&str]) -> Option<String> {
172        let mut command = self.session.sandbox.command(self.sandbox_id, cmd);
173        command.args(args);
174        self.apply_envs(&mut command, &[]);
175        match command.output() {
176            Ok(output) if output.status.success() => {
177                Some(String::from_utf8_lossy(&output.stdout).into_owned())
178            }
179            Ok(output) => {
180                let stderr = String::from_utf8_lossy(&output.stderr);
181                debug!(
182                    cmd = %cmd.display(),
183                    exit_code = ?output.status.code(),
184                    stderr = %stderr.trim(),
185                    "command failed"
186                );
187                None
188            }
189            Err(e) => {
190                debug!(cmd = %cmd.display(), error = %e, "command execution error");
191                None
192            }
193        }
194    }
195
196    /// Check if the package is already up-to-date.
197    fn check_up_to_date(&self) -> anyhow::Result<bool> {
198        let pkgname = self.pkginfo.index.pkgname.pkgname();
199        let pkgfile = self
200            .session
201            .pkgsrc_env
202            .packages
203            .join("All")
204            .join(format!("{}.tgz", pkgname));
205
206        // Check if package file exists
207        if !pkgfile.exists() {
208            debug!(path = %pkgfile.display(), "Package file not found");
209            return Ok(false);
210        }
211
212        let pkgfile_str = pkgfile.to_string_lossy();
213        let pkg_info = self.session.pkgsrc_env.pkgtools.join("pkg_info");
214        let pkg_admin = self.session.pkgsrc_env.pkgtools.join("pkg_admin");
215
216        // Get BUILD_INFO and verify source files
217        let Some(build_info) = self.run_cmd(&pkg_info, &["-qb", &pkgfile_str]) else {
218            debug!("pkg_info -qb failed or returned empty");
219            return Ok(false);
220        };
221        debug!(lines = build_info.lines().count(), "Checking BUILD_INFO");
222
223        for line in build_info.lines() {
224            let Some((file, file_id)) = line.split_once(':') else {
225                continue;
226            };
227            let file_id = file_id.trim();
228            if file.is_empty() || file_id.is_empty() {
229                continue;
230            }
231
232            let src_file = self.session.config.pkgsrc().join(file);
233            if !src_file.exists() {
234                debug!(file, "Source file missing");
235                return Ok(false);
236            }
237
238            if file_id.starts_with("$NetBSD") {
239                // CVS ID comparison - extract $NetBSD...$ from actual file
240                let Ok(content) = std::fs::read_to_string(&src_file) else {
241                    return Ok(false);
242                };
243                let id = content.lines().find_map(|line| {
244                    if let Some(start) = line.find("$NetBSD") {
245                        if let Some(end) = line[start + 1..].find('$') {
246                            return Some(&line[start..start + 1 + end + 1]);
247                        }
248                    }
249                    None
250                });
251                if id != Some(file_id) {
252                    debug!(file, "CVS ID mismatch");
253                    return Ok(false);
254                }
255            } else {
256                // Hash comparison
257                let src_file_str = src_file.to_string_lossy();
258                let Some(hash) = self.run_cmd(&pkg_admin, &["digest", &src_file_str]) else {
259                    debug!(file, "pkg_admin digest failed");
260                    return Ok(false);
261                };
262                let hash = hash.trim();
263                if hash != file_id {
264                    debug!(
265                        file,
266                        path = %src_file.display(),
267                        expected = file_id,
268                        actual = hash,
269                        "Hash mismatch"
270                    );
271                    return Ok(false);
272                }
273            }
274        }
275
276        // Get package dependencies and verify
277        let Some(pkg_deps) = self.run_cmd(&pkg_info, &["-qN", &pkgfile_str]) else {
278            return Ok(false);
279        };
280
281        // Build sets of recorded vs expected dependencies
282        let recorded_deps: HashSet<&str> = pkg_deps
283            .lines()
284            .map(|l| l.trim())
285            .filter(|l| !l.is_empty())
286            .collect();
287        let expected_deps: HashSet<&str> =
288            self.pkginfo.depends().iter().map(|d| d.pkgname()).collect();
289
290        // If dependency list has changed in any way, rebuild
291        if recorded_deps != expected_deps {
292            debug!(
293                recorded = recorded_deps.len(),
294                expected = expected_deps.len(),
295                "Dependency list changed"
296            );
297            return Ok(false);
298        }
299
300        let pkgfile_mtime = match pkgfile.metadata().and_then(|m| m.modified()) {
301            Ok(t) => t,
302            Err(_) => return Ok(false),
303        };
304
305        // Check each dependency package exists and is not newer
306        for dep in &recorded_deps {
307            let dep_pkg = self
308                .session
309                .pkgsrc_env
310                .packages
311                .join("All")
312                .join(format!("{}.tgz", dep));
313            if !dep_pkg.exists() {
314                debug!(dep, "Dependency package missing");
315                return Ok(false);
316            }
317
318            let dep_mtime = match dep_pkg.metadata().and_then(|m| m.modified()) {
319                Ok(t) => t,
320                Err(_) => return Ok(false),
321            };
322
323            if dep_mtime > pkgfile_mtime {
324                debug!(dep, "Dependency is newer");
325                return Ok(false);
326            }
327        }
328
329        debug!("Package is up-to-date");
330        Ok(true)
331    }
332
333    /// Run the full build process.
334    fn build<C: BuildCallback>(&self, callback: &mut C) -> anyhow::Result<PkgBuildResult> {
335        let pkgname_str = self.pkginfo.pkgname().pkgname();
336        let pkgpath = &self.pkginfo.pkgpath;
337
338        // Check if package is already up-to-date (skip check if force rebuild)
339        if !self.session.options.force_rebuild && self.check_up_to_date()? {
340            return Ok(PkgBuildResult::Skipped);
341        }
342
343        // Clean up and create log directory
344        if self.logdir.exists() {
345            fs::remove_dir_all(&self.logdir)?;
346        }
347        fs::create_dir_all(&self.logdir)?;
348
349        // Create work.log and chown to build_user if set
350        let work_log = self.logdir.join("work.log");
351        File::create(&work_log)?;
352        if let Some(ref user) = self.build_user {
353            let bob_log = File::options()
354                .create(true)
355                .append(true)
356                .open(self.logdir.join("bob.log"))?;
357            let bob_log_err = bob_log.try_clone()?;
358            let _ = Command::new("chown")
359                .arg(user)
360                .arg(&work_log)
361                .stdout(bob_log)
362                .stderr(bob_log_err)
363                .status();
364        }
365
366        let pkgdir = self.session.config.pkgsrc().join(pkgpath.as_path());
367
368        // Pre-clean
369        callback.stage(Stage::PreClean.as_str());
370        self.run_make_stage(Stage::PreClean, &pkgdir, &["clean"], RunAs::Root, false)?;
371
372        // Install dependencies
373        if !self.pkginfo.depends().is_empty() {
374            callback.stage(Stage::Depends.as_str());
375            let _ = self.write_stage(Stage::Depends);
376            if !self.install_dependencies()? {
377                return Ok(PkgBuildResult::Failed);
378            }
379        }
380
381        // Checksum
382        callback.stage(Stage::Checksum.as_str());
383        if !self.run_make_stage(Stage::Checksum, &pkgdir, &["checksum"], RunAs::Root, true)? {
384            return Ok(PkgBuildResult::Failed);
385        }
386
387        // Configure
388        callback.stage(Stage::Configure.as_str());
389        let configure_log = self.logdir.join("configure.log");
390        if !self.run_usergroup_if_needed(Stage::Configure, &pkgdir, &configure_log)? {
391            return Ok(PkgBuildResult::Failed);
392        }
393        if !self.run_make_stage(
394            Stage::Configure,
395            &pkgdir,
396            &["configure"],
397            self.build_run_as(),
398            true,
399        )? {
400            return Ok(PkgBuildResult::Failed);
401        }
402
403        // Build
404        callback.stage(Stage::Build.as_str());
405        let build_log = self.logdir.join("build.log");
406        if !self.run_usergroup_if_needed(Stage::Build, &pkgdir, &build_log)? {
407            return Ok(PkgBuildResult::Failed);
408        }
409        if !self.run_make_stage(Stage::Build, &pkgdir, &["all"], self.build_run_as(), true)? {
410            return Ok(PkgBuildResult::Failed);
411        }
412
413        // Install
414        callback.stage(Stage::Install.as_str());
415        let install_log = self.logdir.join("install.log");
416        if !self.run_usergroup_if_needed(Stage::Install, &pkgdir, &install_log)? {
417            return Ok(PkgBuildResult::Failed);
418        }
419        if !self.run_make_stage(
420            Stage::Install,
421            &pkgdir,
422            &["stage-install"],
423            self.build_run_as(),
424            true,
425        )? {
426            return Ok(PkgBuildResult::Failed);
427        }
428
429        // Package
430        callback.stage(Stage::Package.as_str());
431        if !self.run_make_stage(
432            Stage::Package,
433            &pkgdir,
434            &["stage-package-create"],
435            RunAs::Root,
436            true,
437        )? {
438            return Ok(PkgBuildResult::Failed);
439        }
440
441        // Get the package file path
442        let pkgfile = self.get_make_var(&pkgdir, "STAGE_PKGFILE")?;
443
444        // Test package install (unless bootstrap package)
445        let is_bootstrap = self.pkginfo.bootstrap_pkg() == Some("yes");
446        if !is_bootstrap {
447            if !self.pkg_add(&pkgfile)? {
448                return Ok(PkgBuildResult::Failed);
449            }
450
451            // Test package deinstall
452            callback.stage(Stage::Deinstall.as_str());
453            let _ = self.write_stage(Stage::Deinstall);
454            if !self.pkg_delete(pkgname_str)? {
455                return Ok(PkgBuildResult::Failed);
456            }
457        }
458
459        // Save package to packages directory
460        let packages_dir = self.session.pkgsrc_env.packages.join("All");
461        fs::create_dir_all(&packages_dir)?;
462        let dest = packages_dir.join(
463            Path::new(&pkgfile)
464                .file_name()
465                .context("Invalid package file path")?,
466        );
467        // pkgfile is a path inside the sandbox; prepend sandbox path for host access
468        let host_pkgfile = if self.session.sandbox.enabled() {
469            self.session
470                .sandbox
471                .path(self.sandbox_id)
472                .join(pkgfile.trim_start_matches('/'))
473        } else {
474            PathBuf::from(&pkgfile)
475        };
476        fs::copy(&host_pkgfile, &dest)?;
477
478        // Clean
479        callback.stage(Stage::Clean.as_str());
480        let _ = self.run_make_stage(Stage::Clean, &pkgdir, &["clean"], RunAs::Root, false);
481
482        // Remove log directory on success
483        let _ = fs::remove_dir_all(&self.logdir);
484
485        Ok(PkgBuildResult::Success)
486    }
487
488    /// Determine how to run build commands.
489    fn build_run_as(&self) -> RunAs {
490        if self.build_user.is_some() {
491            RunAs::User
492        } else {
493            RunAs::Root
494        }
495    }
496
497    /// Write the current stage to a .stage file.
498    fn write_stage(&self, stage: Stage) -> anyhow::Result<()> {
499        let stage_file = self.logdir.join(".stage");
500        fs::write(&stage_file, stage.as_str())?;
501        Ok(())
502    }
503
504    /// Run a make stage with output logging.
505    fn run_make_stage(
506        &self,
507        stage: Stage,
508        pkgdir: &Path,
509        targets: &[&str],
510        run_as: RunAs,
511        include_make_flags: bool,
512    ) -> anyhow::Result<bool> {
513        // Write stage to .stage file
514        let _ = self.write_stage(stage);
515
516        let logfile = self.logdir.join(format!("{}.log", stage.as_str()));
517        let work_log = self.logdir.join("work.log");
518
519        let owned_args = self.make_args(pkgdir, targets, include_make_flags, &work_log);
520
521        // Convert to slice of &str for the command
522        let args: Vec<&str> = owned_args.iter().map(|s| s.as_str()).collect();
523
524        info!(stage = stage.as_str(), "Running make stage");
525
526        let status =
527            self.run_command_logged(self.session.config.make(), &args, run_as, &logfile)?;
528
529        Ok(status.success())
530    }
531
532    /// Run a command with output logged to a file.
533    fn run_command_logged(
534        &self,
535        cmd: &Path,
536        args: &[&str],
537        run_as: RunAs,
538        logfile: &Path,
539    ) -> anyhow::Result<ExitStatus> {
540        self.run_command_logged_with_env(cmd, args, run_as, logfile, &[])
541    }
542
543    fn run_command_logged_with_env(
544        &self,
545        cmd: &Path,
546        args: &[&str],
547        run_as: RunAs,
548        logfile: &Path,
549        extra_envs: &[(&str, &str)],
550    ) -> anyhow::Result<ExitStatus> {
551        use std::io::{BufRead, BufReader, Write};
552
553        let mut log = OpenOptions::new().create(true).append(true).open(logfile)?;
554
555        // Write command being executed to the log file
556        let _ = writeln!(log, "=> {:?} {:?}", cmd, args);
557        let _ = log.flush();
558
559        // Use tee-style pipe handling when output_tx is available for live view.
560        // Otherwise use direct file redirection.
561        if let Some(ref output_tx) = self.output_tx {
562            // Wrap command in shell to merge stdout/stderr with 2>&1, like the
563            // shell script's run_log function does.
564            let shell_cmd = self.build_shell_command(cmd, args, run_as, extra_envs);
565            let mut child = self
566                .session
567                .sandbox
568                .command(self.sandbox_id, Path::new("/bin/sh"))
569                .arg("-c")
570                .arg(&shell_cmd)
571                .stdout(Stdio::piped())
572                .stderr(Stdio::null())
573                .spawn()
574                .context("Failed to spawn shell command")?;
575
576            let stdout = child.stdout.take().unwrap();
577            let output_tx = output_tx.clone();
578            let sandbox_id = self.sandbox_id;
579
580            // Spawn thread to read from pipe and tee to file + output channel.
581            // Batch lines and throttle sends to reduce channel overhead.
582            let tee_handle = std::thread::spawn(move || {
583                let mut reader = BufReader::new(stdout);
584                let mut buf = Vec::new();
585                let mut batch = Vec::with_capacity(50);
586                let mut last_send = Instant::now();
587                let send_interval = OUTPUT_BATCH_INTERVAL;
588
589                loop {
590                    buf.clear();
591                    match reader.read_until(b'\n', &mut buf) {
592                        Ok(0) => break,
593                        Ok(_) => {}
594                        Err(_) => break,
595                    };
596                    // Write raw bytes to log file to preserve original output
597                    let _ = log.write_all(&buf);
598                    // Convert to lossy UTF-8 for live view
599                    let line = String::from_utf8_lossy(&buf);
600                    let line = line.trim_end_matches('\n').to_string();
601                    batch.push(line);
602
603                    // Send batch if interval elapsed or batch is large
604                    if last_send.elapsed() >= send_interval || batch.len() >= 50 {
605                        let _ = output_tx.send(ChannelCommand::OutputLines(
606                            sandbox_id,
607                            std::mem::take(&mut batch),
608                        ));
609                        last_send = Instant::now();
610                    }
611                }
612
613                // Send remaining lines
614                if !batch.is_empty() {
615                    let _ = output_tx.send(ChannelCommand::OutputLines(sandbox_id, batch));
616                }
617            });
618
619            let status = wait_with_shutdown(&mut child, &self.session.shutdown)?;
620
621            // Reader thread will exit when pipe closes (process exits)
622            let _ = tee_handle.join();
623
624            trace!(?cmd, ?status, "Command completed");
625            Ok(status)
626        } else {
627            let status = self.spawn_command_to_file(cmd, args, run_as, extra_envs, log)?;
628            trace!(?cmd, ?status, "Command completed");
629            Ok(status)
630        }
631    }
632
633    /// Spawn a command with stdout/stderr redirected to a file.
634    fn spawn_command_to_file(
635        &self,
636        cmd: &Path,
637        args: &[&str],
638        run_as: RunAs,
639        extra_envs: &[(&str, &str)],
640        log: File,
641    ) -> anyhow::Result<ExitStatus> {
642        // Clone file handle for stderr (stdout and stderr both go to same file)
643        let log_err = log.try_clone()?;
644
645        match run_as {
646            RunAs::Root => {
647                let mut command = self.session.sandbox.command(self.sandbox_id, cmd);
648                command.args(args);
649                self.apply_envs(&mut command, extra_envs);
650                let mut child = command
651                    .stdout(Stdio::from(log))
652                    .stderr(Stdio::from(log_err))
653                    .spawn()
654                    .with_context(|| format!("Failed to spawn {}", cmd.display()))?;
655                wait_with_shutdown(&mut child, &self.session.shutdown)
656            }
657            RunAs::User => {
658                let user = self.build_user.as_ref().unwrap();
659                let mut parts = Vec::with_capacity(args.len() + 1);
660                parts.push(cmd.display().to_string());
661                parts.extend(args.iter().map(|arg| arg.to_string()));
662                let inner_cmd = parts
663                    .iter()
664                    .map(|part| Self::shell_escape(part))
665                    .collect::<Vec<_>>()
666                    .join(" ");
667                let mut command = self
668                    .session
669                    .sandbox
670                    .command(self.sandbox_id, Path::new("su"));
671                command.arg(user).arg("-c").arg(&inner_cmd);
672                self.apply_envs(&mut command, extra_envs);
673                let mut child = command
674                    .stdout(Stdio::from(log))
675                    .stderr(Stdio::from(log_err))
676                    .spawn()
677                    .context("Failed to spawn su command")?;
678                wait_with_shutdown(&mut child, &self.session.shutdown)
679            }
680        }
681    }
682
683    /// Get a make variable value.
684    fn get_make_var(&self, pkgdir: &Path, varname: &str) -> anyhow::Result<String> {
685        let mut cmd = self
686            .session
687            .sandbox
688            .command(self.sandbox_id, self.session.config.make());
689        self.apply_envs(&mut cmd, &[]);
690
691        let work_log = self.logdir.join("work.log");
692        let make_args = self.make_args(
693            pkgdir,
694            &["show-var", &format!("VARNAME={}", varname)],
695            true,
696            &work_log,
697        );
698
699        let bob_log = File::options()
700            .create(true)
701            .append(true)
702            .open(self.logdir.join("bob.log"))?;
703        let output = cmd.args(&make_args).stderr(Stdio::from(bob_log)).output()?;
704
705        if !output.status.success() {
706            bail!("Failed to get make variable {}", varname);
707        }
708
709        Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
710    }
711
712    /// Install package dependencies.
713    fn install_dependencies(&self) -> anyhow::Result<bool> {
714        let deps: Vec<String> = self
715            .pkginfo
716            .depends()
717            .iter()
718            .map(|d| d.to_string())
719            .collect();
720
721        let pkg_path = self.session.pkgsrc_env.packages.join("All");
722        let logfile = self.logdir.join("depends.log");
723
724        let mut args = vec![];
725        for dep in &deps {
726            args.push(dep.as_str());
727        }
728
729        let status = self.run_pkg_add_with_path(&args, &pkg_path, &logfile)?;
730        Ok(status.success())
731    }
732
733    /// Run pkg_add with PKG_PATH set.
734    fn run_pkg_add_with_path(
735        &self,
736        packages: &[&str],
737        pkg_path: &Path,
738        logfile: &Path,
739    ) -> anyhow::Result<ExitStatus> {
740        let pkg_add = self.session.pkgsrc_env.pkgtools.join("pkg_add");
741        let pkg_dbdir = self.session.pkgsrc_env.pkg_dbdir.to_string_lossy();
742        let pkg_path_value = pkg_path.to_string_lossy().to_string();
743        let extra_envs = [("PKG_PATH", pkg_path_value.as_str())];
744
745        let mut args = vec!["-K", &*pkg_dbdir];
746        args.extend(packages.iter().copied());
747
748        self.run_command_logged_with_env(&pkg_add, &args, RunAs::Root, logfile, &extra_envs)
749    }
750
751    /// Install a package file.
752    fn pkg_add(&self, pkgfile: &str) -> anyhow::Result<bool> {
753        let pkg_add = self.session.pkgsrc_env.pkgtools.join("pkg_add");
754        let pkg_dbdir = self.session.pkgsrc_env.pkg_dbdir.to_string_lossy();
755        let logfile = self.logdir.join("package.log");
756
757        let status = self.run_command_logged(
758            &pkg_add,
759            &["-K", &*pkg_dbdir, pkgfile],
760            RunAs::Root,
761            &logfile,
762        )?;
763
764        Ok(status.success())
765    }
766
767    /// Delete an installed package.
768    fn pkg_delete(&self, pkgname: &str) -> anyhow::Result<bool> {
769        let pkg_delete = self.session.pkgsrc_env.pkgtools.join("pkg_delete");
770        let pkg_dbdir = self.session.pkgsrc_env.pkg_dbdir.to_string_lossy();
771        let logfile = self.logdir.join("deinstall.log");
772
773        let status = self.run_command_logged(
774            &pkg_delete,
775            &["-K", &*pkg_dbdir, pkgname],
776            RunAs::Root,
777            &logfile,
778        )?;
779
780        Ok(status.success())
781    }
782
783    /// Run create-usergroup if needed based on usergroup_phase.
784    fn run_usergroup_if_needed(
785        &self,
786        stage: Stage,
787        pkgdir: &Path,
788        logfile: &Path,
789    ) -> anyhow::Result<bool> {
790        let usergroup_phase = self.pkginfo.usergroup_phase().unwrap_or("");
791
792        let should_run = match stage {
793            Stage::Configure => usergroup_phase.ends_with("configure"),
794            Stage::Build => usergroup_phase.ends_with("build"),
795            Stage::Install => usergroup_phase == "pre-install",
796            _ => false,
797        };
798
799        if !should_run {
800            return Ok(true);
801        }
802
803        let mut args = vec!["-C", pkgdir.to_str().unwrap(), "create-usergroup"];
804        if stage == Stage::Configure {
805            args.push("clean");
806        }
807
808        let status =
809            self.run_command_logged(self.session.config.make(), &args, RunAs::Root, logfile)?;
810        Ok(status.success())
811    }
812
813    fn make_args(
814        &self,
815        pkgdir: &Path,
816        targets: &[&str],
817        include_make_flags: bool,
818        work_log: &Path,
819    ) -> Vec<String> {
820        let mut owned_args: Vec<String> =
821            vec!["-C".to_string(), pkgdir.to_str().unwrap().to_string()];
822        owned_args.extend(targets.iter().map(|s| s.to_string()));
823
824        if include_make_flags {
825            owned_args.push("BATCH=1".to_string());
826            owned_args.push("DEPENDS_TARGET=/nonexistent".to_string());
827
828            if let Some(multi_version) = self.pkginfo.multi_version() {
829                for flag in multi_version {
830                    owned_args.push(flag.clone());
831                }
832            }
833
834            owned_args.push(format!("WRKLOG={}", work_log.display()));
835        }
836
837        owned_args
838    }
839
840    fn apply_envs(&self, cmd: &mut Command, extra_envs: &[(&str, &str)]) {
841        for (key, value) in &self.envs {
842            cmd.env(key, value);
843        }
844        for (key, value) in extra_envs {
845            cmd.env(key, value);
846        }
847    }
848
849    fn shell_escape(value: &str) -> String {
850        if value.is_empty() {
851            return "''".to_string();
852        }
853        if value
854            .chars()
855            .all(|c| c.is_ascii_alphanumeric() || "-_.,/:=+@".contains(c))
856        {
857            return value.to_string();
858        }
859        let escaped = value.replace('\'', "'\\''");
860        format!("'{}'", escaped)
861    }
862
863    /// Build a shell command string with environment, run_as handling, and 2>&1.
864    fn build_shell_command(
865        &self,
866        cmd: &Path,
867        args: &[&str],
868        run_as: RunAs,
869        extra_envs: &[(&str, &str)],
870    ) -> String {
871        let mut parts = Vec::new();
872
873        // Add environment variables
874        for (key, value) in &self.envs {
875            parts.push(format!("{}={}", key, Self::shell_escape(value)));
876        }
877        for (key, value) in extra_envs {
878            parts.push(format!("{}={}", key, Self::shell_escape(value)));
879        }
880
881        // Build the actual command
882        let cmd_str = Self::shell_escape(&cmd.to_string_lossy());
883        let args_str: Vec<String> = args.iter().map(|a| Self::shell_escape(a)).collect();
884
885        match run_as {
886            RunAs::Root => {
887                parts.push(cmd_str);
888                parts.extend(args_str);
889            }
890            RunAs::User => {
891                let user = self.build_user.as_ref().unwrap();
892                let inner_cmd = std::iter::once(cmd_str)
893                    .chain(args_str)
894                    .collect::<Vec<_>>()
895                    .join(" ");
896                parts.push("su".to_string());
897                parts.push(Self::shell_escape(user));
898                parts.push("-c".to_string());
899                parts.push(Self::shell_escape(&inner_cmd));
900            }
901        }
902
903        // Merge stdout/stderr
904        parts.push("2>&1".to_string());
905        parts.join(" ")
906    }
907}
908
909/// Callback adapter that sends build updates through a channel.
910struct ChannelCallback<'a> {
911    sandbox_id: usize,
912    status_tx: &'a Sender<ChannelCommand>,
913}
914
915impl<'a> ChannelCallback<'a> {
916    fn new(sandbox_id: usize, status_tx: &'a Sender<ChannelCommand>) -> Self {
917        Self {
918            sandbox_id,
919            status_tx,
920        }
921    }
922}
923
924impl<'a> BuildCallback for ChannelCallback<'a> {
925    fn stage(&mut self, stage: &str) {
926        let _ = self.status_tx.send(ChannelCommand::StageUpdate(
927            self.sandbox_id,
928            Some(stage.to_string()),
929        ));
930    }
931}
932
933/// Outcome of a package build attempt.
934///
935/// Used in [`BuildResult`] to indicate whether the build succeeded, failed,
936/// or was skipped.
937#[derive(Clone, Debug, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
938pub enum BuildOutcome {
939    /// Package built and packaged successfully.
940    Success,
941    /// Package build failed.
942    ///
943    /// The string contains the failure reason (e.g., "Failed in build phase").
944    Failed(String),
945    /// Package did not need to be built - we already have a binary package
946    /// for this revision.
947    UpToDate,
948    /// Package was not built due to a scan-phase failure.
949    ///
950    /// Contains the reason for skipping.
951    Skipped(SkipReason),
952}
953
954/// Result of building a single package.
955///
956/// Contains the outcome, timing, and log location for a package build.
957#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
958pub struct BuildResult {
959    /// Package name with version (e.g., `mutt-2.2.12`).
960    pub pkgname: PkgName,
961    /// Package path in pkgsrc (e.g., `mail/mutt`).
962    pub pkgpath: Option<PkgPath>,
963    /// Build outcome (success, failure, or skipped).
964    pub outcome: BuildOutcome,
965    /// Time spent building this package.
966    pub duration: Duration,
967    /// Path to build logs directory, if available.
968    ///
969    /// For failed builds, this contains `pre-clean.log`, `build.log`, etc.
970    /// Successful builds clean up their log directories.
971    pub log_dir: Option<PathBuf>,
972}
973
974/// Counts of build results by outcome category.
975#[derive(Clone, Debug, Default)]
976pub struct BuildCounts {
977    /// Packages that built successfully.
978    pub success: usize,
979    /// Packages that failed to build.
980    pub failed: usize,
981    /// Packages already up-to-date (binary package exists).
982    pub up_to_date: usize,
983    /// Packages that were skipped.
984    pub skipped: SkippedCounts,
985    /// Packages that failed to scan.
986    pub scanfail: usize,
987}
988
989/// Summary of an entire build run.
990#[derive(Clone, Debug)]
991pub struct BuildSummary {
992    /// Total duration of the build run.
993    pub duration: Duration,
994    /// Results for each package.
995    pub results: Vec<BuildResult>,
996    /// Packages that failed to scan (pkgpath, error message).
997    pub scanfail: Vec<(PkgPath, String)>,
998}
999
1000impl BuildSummary {
1001    /// Compute all outcome counts in a single pass.
1002    pub fn counts(&self) -> BuildCounts {
1003        let mut c = BuildCounts {
1004            scanfail: self.scanfail.len(),
1005            ..Default::default()
1006        };
1007        for r in &self.results {
1008            match &r.outcome {
1009                BuildOutcome::Success => c.success += 1,
1010                BuildOutcome::Failed(_) => c.failed += 1,
1011                BuildOutcome::UpToDate => c.up_to_date += 1,
1012                BuildOutcome::Skipped(SkipReason::PkgSkip(_)) => c.skipped.pkg_skip += 1,
1013                BuildOutcome::Skipped(SkipReason::PkgFail(_)) => c.skipped.pkg_fail += 1,
1014                BuildOutcome::Skipped(SkipReason::UnresolvedDep(_)) => c.skipped.unresolved += 1,
1015                BuildOutcome::Skipped(SkipReason::IndirectFail(_)) => c.skipped.indirect_fail += 1,
1016                BuildOutcome::Skipped(SkipReason::IndirectSkip(_)) => c.skipped.indirect_skip += 1,
1017            }
1018        }
1019        c
1020    }
1021
1022    /// Get all failed results (direct build failures only).
1023    pub fn failed(&self) -> Vec<&BuildResult> {
1024        self.results
1025            .iter()
1026            .filter(|r| matches!(r.outcome, BuildOutcome::Failed(_)))
1027            .collect()
1028    }
1029
1030    /// Get all successful results.
1031    pub fn succeeded(&self) -> Vec<&BuildResult> {
1032        self.results
1033            .iter()
1034            .filter(|r| matches!(r.outcome, BuildOutcome::Success))
1035            .collect()
1036    }
1037
1038    /// Get all skipped results.
1039    pub fn skipped(&self) -> Vec<&BuildResult> {
1040        self.results
1041            .iter()
1042            .filter(|r| matches!(r.outcome, BuildOutcome::Skipped(_)))
1043            .collect()
1044    }
1045}
1046
1047/// Options that control build behavior.
1048#[derive(Clone, Debug, Default)]
1049pub struct BuildOptions {
1050    /// Force rebuild even if package is up-to-date.
1051    pub force_rebuild: bool,
1052}
1053
1054#[derive(Debug)]
1055pub struct Build {
1056    /// Parsed [`Config`].
1057    config: Config,
1058    /// Pkgsrc environment variables.
1059    pkgsrc_env: PkgsrcEnv,
1060    /// Sandbox scope - owns created sandboxes, destroys on drop.
1061    scope: SandboxScope,
1062    /// List of packages to build, as input from Scan::resolve.
1063    scanpkgs: IndexMap<PkgName, ResolvedPackage>,
1064    /// Cached build results from previous run.
1065    cached: IndexMap<PkgName, BuildResult>,
1066    /// Build options.
1067    options: BuildOptions,
1068}
1069
1070/// Per-package build task sent to worker threads.
1071#[derive(Debug)]
1072struct PackageBuild {
1073    session: Arc<BuildSession>,
1074    sandbox_id: usize,
1075    pkginfo: ResolvedPackage,
1076}
1077
1078/// Helper for querying bmake variables with the correct environment.
1079struct MakeQuery<'a> {
1080    session: &'a BuildSession,
1081    sandbox_id: usize,
1082    pkgpath: &'a PkgPath,
1083    env: &'a HashMap<String, String>,
1084}
1085
1086impl<'a> MakeQuery<'a> {
1087    fn new(
1088        session: &'a BuildSession,
1089        sandbox_id: usize,
1090        pkgpath: &'a PkgPath,
1091        env: &'a HashMap<String, String>,
1092    ) -> Self {
1093        Self {
1094            session,
1095            sandbox_id,
1096            pkgpath,
1097            env,
1098        }
1099    }
1100
1101    /// Query a bmake variable value.
1102    fn var(&self, name: &str) -> Option<String> {
1103        let pkgdir = self.session.config.pkgsrc().join(self.pkgpath.as_path());
1104
1105        let mut cmd = self
1106            .session
1107            .sandbox
1108            .command(self.sandbox_id, self.session.config.make());
1109        cmd.arg("-C")
1110            .arg(&pkgdir)
1111            .arg("show-var")
1112            .arg(format!("VARNAME={}", name));
1113
1114        // Pass env vars that may affect the variable value
1115        for (key, value) in self.env {
1116            cmd.env(key, value);
1117        }
1118
1119        cmd.stderr(Stdio::null());
1120
1121        let output = cmd.output().ok()?;
1122
1123        if !output.status.success() {
1124            return None;
1125        }
1126
1127        let value = String::from_utf8_lossy(&output.stdout).trim().to_string();
1128
1129        if value.is_empty() { None } else { Some(value) }
1130    }
1131
1132    /// Query a bmake variable and return as PathBuf.
1133    fn var_path(&self, name: &str) -> Option<PathBuf> {
1134        self.var(name).map(PathBuf::from)
1135    }
1136
1137    /// Get the WRKDIR for this package.
1138    fn wrkdir(&self) -> Option<PathBuf> {
1139        self.var_path("WRKDIR")
1140    }
1141
1142    /// Resolve a path to its actual location on the host filesystem.
1143    /// If sandboxed, prepends the sandbox root path.
1144    fn resolve_path(&self, path: &Path) -> PathBuf {
1145        if self.session.sandbox.enabled() {
1146            self.session
1147                .sandbox
1148                .path(self.sandbox_id)
1149                .join(path.strip_prefix("/").unwrap_or(path))
1150        } else {
1151            path.to_path_buf()
1152        }
1153    }
1154}
1155
1156/// Result of a single package build attempt.
1157#[derive(Debug)]
1158enum PackageBuildResult {
1159    /// Build succeeded
1160    Success,
1161    /// Build failed
1162    Failed,
1163    /// Package was up-to-date, skipped
1164    Skipped,
1165}
1166
1167impl std::fmt::Display for PackageBuildResult {
1168    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1169        match self {
1170            Self::Success => write!(f, "success"),
1171            Self::Failed => write!(f, "failed"),
1172            Self::Skipped => write!(f, "skipped"),
1173        }
1174    }
1175}
1176
1177impl PackageBuild {
1178    fn build(&self, status_tx: &Sender<ChannelCommand>) -> anyhow::Result<PackageBuildResult> {
1179        let pkgname = self.pkginfo.index.pkgname.pkgname();
1180        info!("Starting package build");
1181
1182        let pkgpath = &self.pkginfo.pkgpath;
1183
1184        let logdir = self.session.config.logdir();
1185
1186        // Get env vars from Lua config for wrkdir saving and build environment
1187        let pkg_env = match self.session.config.get_pkg_env(&self.pkginfo) {
1188            Ok(env) => env,
1189            Err(e) => {
1190                error!(error = %e, "Failed to get env from Lua config");
1191                HashMap::new()
1192            }
1193        };
1194
1195        let mut envs = self
1196            .session
1197            .config
1198            .script_env(Some(&self.session.pkgsrc_env));
1199        for (key, value) in &pkg_env {
1200            envs.push((key.clone(), value.clone()));
1201        }
1202
1203        let patterns = self.session.config.save_wrkdir_patterns();
1204
1205        // Run pre-build script if defined (always runs)
1206        if !self.session.sandbox.run_pre_build(
1207            self.sandbox_id,
1208            &self.session.config,
1209            envs.clone(),
1210        )? {
1211            warn!("pre-build script failed");
1212        }
1213
1214        // Run the build using PkgBuilder
1215        let builder = PkgBuilder::new(
1216            &self.session,
1217            self.sandbox_id,
1218            &self.pkginfo,
1219            envs.clone(),
1220            Some(status_tx.clone()),
1221        );
1222
1223        let mut callback = ChannelCallback::new(self.sandbox_id, status_tx);
1224        let result = builder.build(&mut callback);
1225
1226        // Clear stage display
1227        let _ = status_tx.send(ChannelCommand::StageUpdate(self.sandbox_id, None));
1228
1229        let result = match &result {
1230            Ok(PkgBuildResult::Success) => {
1231                info!("Package build completed successfully");
1232                PackageBuildResult::Success
1233            }
1234            Ok(PkgBuildResult::Skipped) => {
1235                info!("Package build skipped (up-to-date)");
1236                PackageBuildResult::Skipped
1237            }
1238            Ok(PkgBuildResult::Failed) => {
1239                error!("Package build failed");
1240                // Show cleanup stage to user
1241                let _ = status_tx.send(ChannelCommand::StageUpdate(
1242                    self.sandbox_id,
1243                    Some("cleanup".to_string()),
1244                ));
1245                // Kill any orphaned processes in the sandbox before cleanup.
1246                // Failed builds may leave processes running that would block
1247                // subsequent commands like bmake show-var or bmake clean.
1248                let kill_start = Instant::now();
1249                self.session.sandbox.kill_processes_by_id(self.sandbox_id);
1250                trace!(
1251                    elapsed_ms = kill_start.elapsed().as_millis(),
1252                    "kill_processes_by_id completed"
1253                );
1254                // Save wrkdir files matching configured patterns, then clean up
1255                if !patterns.is_empty() {
1256                    let save_start = Instant::now();
1257                    self.save_wrkdir_files(pkgname, pkgpath, logdir, patterns, &pkg_env);
1258                    trace!(
1259                        elapsed_ms = save_start.elapsed().as_millis(),
1260                        "save_wrkdir_files completed"
1261                    );
1262                    let clean_start = Instant::now();
1263                    self.run_clean(pkgpath, &envs);
1264                    trace!(
1265                        elapsed_ms = clean_start.elapsed().as_millis(),
1266                        "run_clean completed"
1267                    );
1268                } else {
1269                    let clean_start = Instant::now();
1270                    self.run_clean(pkgpath, &envs);
1271                    trace!(
1272                        elapsed_ms = clean_start.elapsed().as_millis(),
1273                        "run_clean completed"
1274                    );
1275                }
1276                PackageBuildResult::Failed
1277            }
1278            Err(e) => {
1279                error!(error = %e, "Package build error");
1280                // Show cleanup stage to user
1281                let _ = status_tx.send(ChannelCommand::StageUpdate(
1282                    self.sandbox_id,
1283                    Some("cleanup".to_string()),
1284                ));
1285                // Kill any orphaned processes in the sandbox before cleanup.
1286                // Failed builds may leave processes running that would block
1287                // subsequent commands like bmake show-var or bmake clean.
1288                let kill_start = Instant::now();
1289                self.session.sandbox.kill_processes_by_id(self.sandbox_id);
1290                trace!(
1291                    elapsed_ms = kill_start.elapsed().as_millis(),
1292                    "kill_processes_by_id completed"
1293                );
1294                // Save wrkdir files matching configured patterns, then clean up
1295                if !patterns.is_empty() {
1296                    let save_start = Instant::now();
1297                    self.save_wrkdir_files(pkgname, pkgpath, logdir, patterns, &pkg_env);
1298                    trace!(
1299                        elapsed_ms = save_start.elapsed().as_millis(),
1300                        "save_wrkdir_files completed"
1301                    );
1302                    let clean_start = Instant::now();
1303                    self.run_clean(pkgpath, &envs);
1304                    trace!(
1305                        elapsed_ms = clean_start.elapsed().as_millis(),
1306                        "run_clean completed"
1307                    );
1308                } else {
1309                    let clean_start = Instant::now();
1310                    self.run_clean(pkgpath, &envs);
1311                    trace!(
1312                        elapsed_ms = clean_start.elapsed().as_millis(),
1313                        "run_clean completed"
1314                    );
1315                }
1316                PackageBuildResult::Failed
1317            }
1318        };
1319
1320        // Run post-build script if defined (always runs regardless of result)
1321        match self
1322            .session
1323            .sandbox
1324            .run_post_build(self.sandbox_id, &self.session.config, envs)
1325        {
1326            Ok(true) => {}
1327            Ok(false) => warn!("post-build script failed"),
1328            Err(e) => {
1329                warn!(error = %e, "post-build script error")
1330            }
1331        }
1332
1333        Ok(result)
1334    }
1335
1336    /// Save files matching patterns from WRKDIR to logdir on build failure.
1337    fn save_wrkdir_files(
1338        &self,
1339        pkgname: &str,
1340        pkgpath: &PkgPath,
1341        logdir: &Path,
1342        patterns: &[String],
1343        pkg_env: &HashMap<String, String>,
1344    ) {
1345        let make = MakeQuery::new(&self.session, self.sandbox_id, pkgpath, pkg_env);
1346
1347        // Get WRKDIR
1348        let wrkdir = match make.wrkdir() {
1349            Some(w) => w,
1350            None => {
1351                debug!(%pkgname, "Could not determine WRKDIR, skipping file save");
1352                return;
1353            }
1354        };
1355
1356        // Resolve to actual filesystem path
1357        let wrkdir_path = make.resolve_path(&wrkdir);
1358
1359        if !wrkdir_path.exists() {
1360            debug!(%pkgname, wrkdir = %wrkdir_path.display(), "WRKDIR does not exist, skipping file save");
1361            return;
1362        }
1363
1364        let save_dir = logdir.join(pkgname).join("wrkdir-files");
1365        if let Err(e) = fs::create_dir_all(&save_dir) {
1366            warn!(%pkgname, error = %e, "Failed to create wrkdir-files directory");
1367            return;
1368        }
1369
1370        // Compile glob patterns
1371        let compiled_patterns: Vec<Pattern> = patterns
1372            .iter()
1373            .filter_map(|p| {
1374                Pattern::new(p).ok().or_else(|| {
1375                    warn!(pattern = %p, "Invalid glob pattern");
1376                    None
1377                })
1378            })
1379            .collect();
1380
1381        if compiled_patterns.is_empty() {
1382            return;
1383        }
1384
1385        // Walk the wrkdir and find matching files
1386        let mut saved_count = 0;
1387        if let Err(e) = walk_and_save(
1388            &wrkdir_path,
1389            &wrkdir_path,
1390            &save_dir,
1391            &compiled_patterns,
1392            &mut saved_count,
1393        ) {
1394            warn!(%pkgname, error = %e, "Error while saving wrkdir files");
1395        }
1396
1397        if saved_count > 0 {
1398            info!(%pkgname, count = saved_count, dest = %save_dir.display(), "Saved wrkdir files");
1399        }
1400    }
1401
1402    /// Run bmake clean for a package.
1403    fn run_clean(&self, pkgpath: &PkgPath, envs: &[(String, String)]) {
1404        let pkgdir = self.session.config.pkgsrc().join(pkgpath.as_path());
1405
1406        let mut cmd = self
1407            .session
1408            .sandbox
1409            .command(self.sandbox_id, self.session.config.make());
1410        cmd.arg("-C").arg(&pkgdir).arg("clean");
1411        for (key, value) in envs {
1412            cmd.env(key, value);
1413        }
1414        let result = cmd
1415            .stdout(std::process::Stdio::null())
1416            .stderr(std::process::Stdio::null())
1417            .status();
1418
1419        if let Err(e) = result {
1420            debug!(error = %e, "Failed to run bmake clean");
1421        }
1422    }
1423}
1424
1425/// Recursively walk a directory and save files matching patterns.
1426fn walk_and_save(
1427    base: &Path,
1428    current: &Path,
1429    save_dir: &Path,
1430    patterns: &[Pattern],
1431    saved_count: &mut usize,
1432) -> std::io::Result<()> {
1433    if !current.is_dir() {
1434        return Ok(());
1435    }
1436
1437    for entry in fs::read_dir(current)? {
1438        let entry = entry?;
1439        let path = entry.path();
1440
1441        if path.is_dir() {
1442            walk_and_save(base, &path, save_dir, patterns, saved_count)?;
1443        } else if path.is_file() {
1444            // Get relative path from base
1445            let rel_path = path.strip_prefix(base).unwrap_or(&path);
1446            let rel_str = rel_path.to_string_lossy();
1447
1448            // Check if any pattern matches
1449            for pattern in patterns {
1450                if pattern.matches(&rel_str)
1451                    || pattern.matches(
1452                        path.file_name()
1453                            .unwrap_or_default()
1454                            .to_string_lossy()
1455                            .as_ref(),
1456                    )
1457                {
1458                    // Create destination directory
1459                    let dest_path = save_dir.join(rel_path);
1460                    if let Some(parent) = dest_path.parent() {
1461                        fs::create_dir_all(parent)?;
1462                    }
1463
1464                    // Copy the file
1465                    if let Err(e) = fs::copy(&path, &dest_path) {
1466                        warn!(src = %path.display(),
1467                            dest = %dest_path.display(),
1468                            error = %e,
1469                            "Failed to copy file"
1470                        );
1471                    } else {
1472                        debug!(src = %path.display(),
1473                            dest = %dest_path.display(),
1474                            "Saved wrkdir file"
1475                        );
1476                        *saved_count += 1;
1477                    }
1478                    break; // Don't copy same file multiple times
1479                }
1480            }
1481        }
1482    }
1483
1484    Ok(())
1485}
1486
1487/**
1488 * Commands sent between the manager and clients.
1489 */
1490#[derive(Debug)]
1491enum ChannelCommand {
1492    /**
1493     * Client (with specified identifier) indicating they are ready for work.
1494     */
1495    ClientReady(usize),
1496    /**
1497     * Manager has no work available at the moment, try again later.
1498     */
1499    ComeBackLater,
1500    /**
1501     * Manager directing a client to build a specific package.
1502     */
1503    JobData(Box<PackageBuild>),
1504    /**
1505     * Client returning a successful package build with duration.
1506     */
1507    JobSuccess(PkgName, Duration),
1508    /**
1509     * Client returning a failed package build with duration.
1510     */
1511    JobFailed(PkgName, Duration),
1512    /**
1513     * Client returning a skipped package (up-to-date).
1514     */
1515    JobSkipped(PkgName),
1516    /**
1517     * Client returning an error during the package build.
1518     */
1519    JobError((PkgName, Duration, anyhow::Error)),
1520    /**
1521     * Manager directing a client to quit.
1522     */
1523    Quit,
1524    /**
1525     * Shutdown signal - workers should stop immediately.
1526     */
1527    Shutdown,
1528    /**
1529     * Client reporting a stage update for a build.
1530     */
1531    StageUpdate(usize, Option<String>),
1532    /**
1533     * Client reporting output lines from a build.
1534     */
1535    OutputLines(usize, Vec<String>),
1536}
1537
1538/**
1539 * Return the current build job status.
1540 */
1541#[derive(Debug)]
1542enum BuildStatus {
1543    /**
1544     * The next package ordered by priority is available for building.
1545     */
1546    Available(PkgName),
1547    /**
1548     * No packages are currently available for building, i.e. all remaining
1549     * packages have at least one dependency that is still unavailable.
1550     */
1551    NoneAvailable,
1552    /**
1553     * All package builds have been completed.
1554     */
1555    Done,
1556}
1557
1558#[derive(Clone, Debug)]
1559struct BuildJobs {
1560    scanpkgs: IndexMap<PkgName, ResolvedPackage>,
1561    incoming: HashMap<PkgName, HashSet<PkgName>>,
1562    /// Reverse dependency map: package -> packages that depend on it.
1563    /// Precomputed for O(1) lookup in mark_failure instead of O(n) scan.
1564    reverse_deps: HashMap<PkgName, HashSet<PkgName>>,
1565    /// Effective weight: package's PBULK_WEIGHT + sum of weights of all
1566    /// transitive dependents. Precomputed for efficient build ordering.
1567    effective_weights: HashMap<PkgName, usize>,
1568    running: HashSet<PkgName>,
1569    done: HashSet<PkgName>,
1570    failed: HashSet<PkgName>,
1571    results: Vec<BuildResult>,
1572    logdir: PathBuf,
1573}
1574
1575impl BuildJobs {
1576    /**
1577     * Mark a package as successful and remove it from pending dependencies.
1578     */
1579    fn mark_success(&mut self, pkgname: &PkgName, duration: Duration) {
1580        self.mark_done(pkgname, BuildOutcome::Success, duration);
1581    }
1582
1583    fn mark_up_to_date(&mut self, pkgname: &PkgName) {
1584        self.mark_done(pkgname, BuildOutcome::UpToDate, Duration::ZERO);
1585    }
1586
1587    /**
1588     * Mark a package as done and remove it from pending dependencies.
1589     */
1590    fn mark_done(&mut self, pkgname: &PkgName, outcome: BuildOutcome, duration: Duration) {
1591        /*
1592         * Remove the package from the list of dependencies in all
1593         * packages it is listed in.  Once a package has no outstanding
1594         * dependencies remaining it is ready for building.
1595         */
1596        for dep in self.incoming.values_mut() {
1597            if dep.contains(pkgname) {
1598                dep.remove(pkgname);
1599            }
1600        }
1601        /*
1602         * The package was already removed from "incoming" when it started
1603         * building, so we only need to add it to "done".
1604         */
1605        self.done.insert(pkgname.clone());
1606
1607        // Record the result
1608        let scanpkg = self.scanpkgs.get(pkgname);
1609        let log_dir = Some(self.logdir.join(pkgname.pkgname()));
1610        self.results.push(BuildResult {
1611            pkgname: pkgname.clone(),
1612            pkgpath: scanpkg.map(|s| s.pkgpath.clone()),
1613            outcome,
1614            duration,
1615            log_dir,
1616        });
1617    }
1618
1619    /**
1620     * Recursively mark a package and its dependents as failed.
1621     */
1622    fn mark_failure(&mut self, pkgname: &PkgName, duration: Duration) {
1623        trace!(pkgname = %pkgname.pkgname(), "mark_failure called");
1624        let start = std::time::Instant::now();
1625        let mut broken: HashSet<PkgName> = HashSet::new();
1626        let mut to_check: Vec<PkgName> = vec![];
1627        to_check.push(pkgname.clone());
1628        /*
1629         * Starting with the original failed package, recursively loop through
1630         * adding any packages that depend on it, adding them to broken.
1631         * Uses precomputed reverse_deps for O(1) lookup instead of O(n) scan.
1632         */
1633        loop {
1634            /* No packages left to check, we're done. */
1635            let Some(badpkg) = to_check.pop() else {
1636                break;
1637            };
1638            /* Already checked this package. */
1639            if broken.contains(&badpkg) {
1640                continue;
1641            }
1642            /* Add all packages that depend on this one. */
1643            if let Some(dependents) = self.reverse_deps.get(&badpkg) {
1644                for pkg in dependents {
1645                    to_check.push(pkg.clone());
1646                }
1647            }
1648            broken.insert(badpkg);
1649        }
1650        trace!(pkgname = %pkgname.pkgname(), broken_count = broken.len(), elapsed_ms = start.elapsed().as_millis(), "mark_failure found broken packages");
1651        /*
1652         * We now have a full HashSet of affected packages.  Remove them from
1653         * incoming and move to failed.  The original failed package will
1654         * already be removed from incoming, we rely on .remove() accepting
1655         * this.
1656         */
1657        let is_original = |p: &PkgName| p == pkgname;
1658        for pkg in broken {
1659            self.incoming.remove(&pkg);
1660            self.failed.insert(pkg.clone());
1661
1662            // Record the result
1663            let scanpkg = self.scanpkgs.get(&pkg);
1664            let log_dir = Some(self.logdir.join(pkg.pkgname()));
1665            let (outcome, dur) = if is_original(&pkg) {
1666                (BuildOutcome::Failed("Build failed".to_string()), duration)
1667            } else {
1668                (
1669                    BuildOutcome::Skipped(SkipReason::IndirectFail(format!(
1670                        "dependency {} failed",
1671                        pkgname.pkgname()
1672                    ))),
1673                    Duration::ZERO,
1674                )
1675            };
1676            self.results.push(BuildResult {
1677                pkgname: pkg,
1678                pkgpath: scanpkg.map(|s| s.pkgpath.clone()),
1679                outcome,
1680                duration: dur,
1681                log_dir,
1682            });
1683        }
1684        trace!(pkgname = %pkgname.pkgname(), total_results = self.results.len(), elapsed_ms = start.elapsed().as_millis(), "mark_failure completed");
1685    }
1686
1687    /**
1688     * Get next package status.
1689     */
1690    fn get_next_build(&self) -> BuildStatus {
1691        /*
1692         * If incoming is empty then we're done.
1693         */
1694        if self.incoming.is_empty() {
1695            return BuildStatus::Done;
1696        }
1697
1698        /*
1699         * Get all packages in incoming that are cleared for building, ordered
1700         * by effective weight (own weight + transitive dependents' weights).
1701         */
1702        let mut pkgs: Vec<(PkgName, usize)> = self
1703            .incoming
1704            .iter()
1705            .filter(|(_, v)| v.is_empty())
1706            .map(|(k, _)| (k.clone(), *self.effective_weights.get(k).unwrap_or(&100)))
1707            .collect();
1708
1709        /*
1710         * If no packages are returned then we're still waiting for
1711         * dependencies to finish.  Clients should keep retrying until this
1712         * changes.
1713         */
1714        if pkgs.is_empty() {
1715            return BuildStatus::NoneAvailable;
1716        }
1717
1718        /*
1719         * Order packages by build weight and return the highest.
1720         */
1721        pkgs.sort_by_key(|&(_, weight)| std::cmp::Reverse(weight));
1722        BuildStatus::Available(pkgs[0].0.clone())
1723    }
1724}
1725
1726impl Build {
1727    pub fn new(
1728        config: &Config,
1729        pkgsrc_env: PkgsrcEnv,
1730        scope: SandboxScope,
1731        scanpkgs: IndexMap<PkgName, ResolvedPackage>,
1732        options: BuildOptions,
1733    ) -> Build {
1734        info!(
1735            package_count = scanpkgs.len(),
1736            sandbox_enabled = scope.enabled(),
1737            build_threads = config.build_threads(),
1738            ?options,
1739            "Creating new Build instance"
1740        );
1741        for (pkgname, index) in &scanpkgs {
1742            debug!(pkgname = %pkgname.pkgname(),
1743                pkgpath = ?index.pkgpath,
1744                depends_count = index.depends().len(),
1745                depends = ?index.depends().iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
1746                "Package in build queue"
1747            );
1748        }
1749        Build {
1750            config: config.clone(),
1751            pkgsrc_env,
1752            scope,
1753            scanpkgs,
1754            cached: IndexMap::new(),
1755            options,
1756        }
1757    }
1758
1759    /// Load cached build results from database.
1760    ///
1761    /// Returns the number of packages loaded from cache. Only loads results
1762    /// for packages that are in our build queue.
1763    pub fn load_cached_from_db(&mut self, db: &crate::db::Database) -> anyhow::Result<usize> {
1764        let mut count = 0;
1765        for pkgname in self.scanpkgs.keys() {
1766            if let Some(pkg) = db.get_package_by_name(pkgname.pkgname())? {
1767                if let Some(result) = db.get_build_result(pkg.id)? {
1768                    self.cached.insert(pkgname.clone(), result);
1769                    count += 1;
1770                }
1771            }
1772        }
1773        if count > 0 {
1774            info!(
1775                cached_count = count,
1776                "Loaded cached build results from database"
1777            );
1778        }
1779        Ok(count)
1780    }
1781
1782    pub fn start(
1783        &mut self,
1784        ctx: &RunContext,
1785        db: &crate::db::Database,
1786    ) -> anyhow::Result<BuildSummary> {
1787        let started = Instant::now();
1788
1789        info!(package_count = self.scanpkgs.len(), "Build::start() called");
1790
1791        let shutdown_flag = Arc::clone(&ctx.shutdown);
1792
1793        /*
1794         * Populate BuildJobs.
1795         */
1796        debug!("Populating BuildJobs from scanpkgs");
1797        let mut incoming: HashMap<PkgName, HashSet<PkgName>> = HashMap::new();
1798        let mut reverse_deps: HashMap<PkgName, HashSet<PkgName>> = HashMap::new();
1799        for (pkgname, index) in &self.scanpkgs {
1800            let mut deps: HashSet<PkgName> = HashSet::new();
1801            for dep in index.depends() {
1802                // Only track dependencies that are in our build queue.
1803                // Dependencies outside scanpkgs are assumed to already be
1804                // installed (from a previous build) or will cause the build
1805                // to fail at runtime.
1806                if !self.scanpkgs.contains_key(dep) {
1807                    continue;
1808                }
1809                deps.insert(dep.clone());
1810                // Build reverse dependency map: dep -> packages that depend on it
1811                reverse_deps
1812                    .entry(dep.clone())
1813                    .or_default()
1814                    .insert(pkgname.clone());
1815            }
1816            trace!(pkgname = %pkgname.pkgname(),
1817                deps_count = deps.len(),
1818                deps = ?deps.iter().map(|d| d.pkgname()).collect::<Vec<_>>(),
1819                "Adding package to incoming build queue"
1820            );
1821            incoming.insert(pkgname.clone(), deps);
1822        }
1823
1824        /*
1825         * Process cached build results.
1826         */
1827        let mut done: HashSet<PkgName> = HashSet::new();
1828        let mut failed: HashSet<PkgName> = HashSet::new();
1829        let results: Vec<BuildResult> = Vec::new();
1830        let mut cached_count = 0usize;
1831
1832        for (pkgname, result) in &self.cached {
1833            match result.outcome {
1834                BuildOutcome::Success | BuildOutcome::UpToDate => {
1835                    // Completed package - remove from incoming, add to done
1836                    incoming.remove(pkgname);
1837                    done.insert(pkgname.clone());
1838                    // Remove from deps of other packages
1839                    for deps in incoming.values_mut() {
1840                        deps.remove(pkgname);
1841                    }
1842                    // Don't add to results - already in database
1843                    cached_count += 1;
1844                }
1845                BuildOutcome::Failed(_) | BuildOutcome::Skipped(_) => {
1846                    // Failed package - remove from incoming, add to failed
1847                    incoming.remove(pkgname);
1848                    failed.insert(pkgname.clone());
1849                    // Don't add to results - already in database
1850                    cached_count += 1;
1851                }
1852            }
1853        }
1854
1855        /*
1856         * Propagate cached failures: any package in incoming that depends on
1857         * a failed package must also be marked as failed.
1858         */
1859        loop {
1860            let mut newly_failed: Vec<PkgName> = Vec::new();
1861            for (pkgname, deps) in &incoming {
1862                for dep in deps {
1863                    if failed.contains(dep) {
1864                        newly_failed.push(pkgname.clone());
1865                        break;
1866                    }
1867                }
1868            }
1869            if newly_failed.is_empty() {
1870                break;
1871            }
1872            for pkgname in newly_failed {
1873                incoming.remove(&pkgname);
1874                failed.insert(pkgname);
1875            }
1876        }
1877
1878        if cached_count > 0 {
1879            println!("Loaded {} cached build results", cached_count);
1880        }
1881
1882        info!(
1883            incoming_count = incoming.len(),
1884            scanpkgs_count = self.scanpkgs.len(),
1885            cached_count = cached_count,
1886            "BuildJobs populated"
1887        );
1888
1889        if incoming.is_empty() {
1890            return Ok(BuildSummary {
1891                duration: started.elapsed(),
1892                results,
1893                scanfail: Vec::new(),
1894            });
1895        }
1896
1897        // Only create sandboxes when there's actual work to do
1898        self.scope.ensure(self.config.build_threads())?;
1899
1900        /*
1901         * Compute effective weights for build ordering.  The effective weight
1902         * is the package's own PBULK_WEIGHT plus the sum of weights of all
1903         * packages that transitively depend on it.  This prioritises building
1904         * packages that unblock the most downstream work.
1905         */
1906        let get_weight = |pkg: &PkgName| -> usize {
1907            self.scanpkgs
1908                .get(pkg)
1909                .and_then(|idx| idx.pbulk_weight())
1910                .and_then(|w| w.parse().ok())
1911                .unwrap_or(100)
1912        };
1913
1914        let mut effective_weights: HashMap<PkgName, usize> = HashMap::new();
1915        let mut pending: HashMap<&PkgName, usize> = incoming
1916            .keys()
1917            .map(|p| (p, reverse_deps.get(p).map_or(0, |s| s.len())))
1918            .collect();
1919        let mut queue: VecDeque<&PkgName> = pending
1920            .iter()
1921            .filter(|(_, c)| **c == 0)
1922            .map(|(&p, _)| p)
1923            .collect();
1924        while let Some(pkg) = queue.pop_front() {
1925            let mut total = get_weight(pkg);
1926            if let Some(dependents) = reverse_deps.get(pkg) {
1927                for dep in dependents {
1928                    total += effective_weights.get(dep).unwrap_or(&0);
1929                }
1930            }
1931            effective_weights.insert(pkg.clone(), total);
1932            for dep in incoming.get(pkg).iter().flat_map(|s| s.iter()) {
1933                if let Some(c) = pending.get_mut(dep) {
1934                    *c -= 1;
1935                    if *c == 0 {
1936                        queue.push_back(dep);
1937                    }
1938                }
1939            }
1940        }
1941
1942        let running: HashSet<PkgName> = HashSet::new();
1943        let logdir = self.config.logdir().clone();
1944        let jobs = BuildJobs {
1945            scanpkgs: self.scanpkgs.clone(),
1946            incoming,
1947            reverse_deps,
1948            effective_weights,
1949            running,
1950            done,
1951            failed,
1952            results,
1953            logdir,
1954        };
1955
1956        println!("Building packages...");
1957
1958        // Set up multi-line progress display using ratatui inline viewport
1959        let progress = Arc::new(Mutex::new(
1960            MultiProgress::new(
1961                "Building",
1962                "Built",
1963                self.scanpkgs.len(),
1964                self.config.build_threads(),
1965            )
1966            .expect("Failed to initialize progress display"),
1967        ));
1968
1969        // Mark cached packages in progress display
1970        if cached_count > 0 {
1971            if let Ok(mut p) = progress.lock() {
1972                p.state_mut().cached = cached_count;
1973            }
1974        }
1975
1976        // Flag to stop the refresh thread
1977        let stop_refresh = Arc::new(AtomicBool::new(false));
1978
1979        // Spawn a thread to periodically refresh the display (for timer updates)
1980        let progress_refresh = Arc::clone(&progress);
1981        let stop_flag = Arc::clone(&stop_refresh);
1982        let shutdown_for_refresh = Arc::clone(&shutdown_flag);
1983        let refresh_thread = std::thread::spawn(move || {
1984            while !stop_flag.load(Ordering::Relaxed) && !shutdown_for_refresh.load(Ordering::SeqCst)
1985            {
1986                // Poll outside lock to avoid blocking main thread
1987                let has_event = event::poll(REFRESH_INTERVAL).unwrap_or(false);
1988
1989                if let Ok(mut p) = progress_refresh.lock() {
1990                    if has_event {
1991                        let _ = p.handle_event();
1992                    }
1993                    let _ = p.render();
1994                }
1995            }
1996        });
1997
1998        /*
1999         * Configure a mananger channel.  This is used for clients to indicate
2000         * to the manager that they are ready for work.
2001         */
2002        let (manager_tx, manager_rx) = mpsc::channel::<ChannelCommand>();
2003
2004        /*
2005         * Client threads.  Each client has its own channel to the manager,
2006         * with the client sending ready status on the manager channel, and
2007         * receiving instructions on its private channel.
2008         */
2009        let mut threads = vec![];
2010        let mut clients: HashMap<usize, Sender<ChannelCommand>> = HashMap::new();
2011        for i in 0..self.config.build_threads() {
2012            let (client_tx, client_rx) = mpsc::channel::<ChannelCommand>();
2013            clients.insert(i, client_tx);
2014            let manager_tx = manager_tx.clone();
2015            let shutdown_for_worker = Arc::clone(&shutdown_flag);
2016            let thread = std::thread::spawn(move || {
2017                loop {
2018                    if shutdown_for_worker.load(Ordering::SeqCst) {
2019                        break;
2020                    }
2021
2022                    // Use send() which can fail if receiver is dropped (manager shutdown)
2023                    if manager_tx.send(ChannelCommand::ClientReady(i)).is_err() {
2024                        break;
2025                    }
2026
2027                    let Ok(msg) = client_rx.recv() else {
2028                        break;
2029                    };
2030
2031                    match msg {
2032                        ChannelCommand::ComeBackLater => {
2033                            std::thread::sleep(WORKER_BACKOFF_INTERVAL);
2034                            continue;
2035                        }
2036                        ChannelCommand::JobData(pkg) => {
2037                            let pkgname = pkg.pkginfo.index.pkgname.clone();
2038                            let pkgpath = &pkg.pkginfo.pkgpath;
2039                            let span = info_span!(
2040                                "build",
2041                                sandbox_id = pkg.sandbox_id,
2042                                pkgpath = %pkgpath,
2043                                pkgname = %pkgname.pkgname(),
2044                            );
2045                            let _guard = span.enter();
2046
2047                            let build_start = Instant::now();
2048                            let result = pkg.build(&manager_tx);
2049                            let duration = build_start.elapsed();
2050                            trace!(
2051                                elapsed_ms = duration.as_millis(),
2052                                result = %result.as_ref().map_or("error".to_string(), |r| r.to_string()),
2053                                "Build finished"
2054                            );
2055
2056                            match result {
2057                                Ok(PackageBuildResult::Success) => {
2058                                    let _ = manager_tx
2059                                        .send(ChannelCommand::JobSuccess(pkgname, duration));
2060                                }
2061                                Ok(PackageBuildResult::Skipped) => {
2062                                    let _ = manager_tx.send(ChannelCommand::JobSkipped(pkgname));
2063                                }
2064                                Ok(PackageBuildResult::Failed) => {
2065                                    let _ = manager_tx
2066                                        .send(ChannelCommand::JobFailed(pkgname, duration));
2067                                }
2068                                Err(e) => {
2069                                    // Don't report errors caused by shutdown
2070                                    if !shutdown_for_worker.load(Ordering::SeqCst) {
2071                                        let _ = manager_tx
2072                                            .send(ChannelCommand::JobError((pkgname, duration, e)));
2073                                    }
2074                                }
2075                            }
2076
2077                            if shutdown_for_worker.load(Ordering::SeqCst) {
2078                                break;
2079                            }
2080                            continue;
2081                        }
2082                        ChannelCommand::Quit | ChannelCommand::Shutdown => {
2083                            break;
2084                        }
2085                        _ => todo!(),
2086                    }
2087                }
2088            });
2089            threads.push(thread);
2090        }
2091
2092        /*
2093         * Manager thread.  Read incoming commands from clients and reply
2094         * accordingly.  Returns the build results via a channel.
2095         */
2096        let session = Arc::new(BuildSession {
2097            config: self.config.clone(),
2098            pkgsrc_env: self.pkgsrc_env.clone(),
2099            sandbox: self.scope.sandbox().clone(),
2100            options: self.options.clone(),
2101            shutdown: Arc::clone(&shutdown_flag),
2102        });
2103        let progress_clone = Arc::clone(&progress);
2104        let shutdown_for_manager = Arc::clone(&shutdown_flag);
2105        let (results_tx, results_rx) = mpsc::channel::<Vec<BuildResult>>();
2106        let (interrupted_tx, interrupted_rx) = mpsc::channel::<bool>();
2107        // Channel for completed results to save immediately
2108        let (completed_tx, completed_rx) = mpsc::channel::<BuildResult>();
2109        let manager = std::thread::spawn(move || {
2110            let mut clients = clients.clone();
2111            let mut jobs = jobs.clone();
2112            let mut was_interrupted = false;
2113
2114            // Track which thread is building which package
2115            let mut thread_packages: HashMap<usize, PkgName> = HashMap::new();
2116
2117            loop {
2118                // Check shutdown flag periodically
2119                if shutdown_for_manager.load(Ordering::SeqCst) {
2120                    // Suppress all further output
2121                    if let Ok(mut p) = progress_clone.lock() {
2122                        p.state_mut().suppress();
2123                    }
2124                    // Send shutdown to all remaining clients
2125                    for (_, client) in clients.drain() {
2126                        let _ = client.send(ChannelCommand::Shutdown);
2127                    }
2128                    was_interrupted = true;
2129                    break;
2130                }
2131
2132                let command = match manager_rx.recv_timeout(SHUTDOWN_POLL_INTERVAL) {
2133                    Ok(cmd) => cmd,
2134                    Err(mpsc::RecvTimeoutError::Timeout) => continue,
2135                    Err(mpsc::RecvTimeoutError::Disconnected) => break,
2136                };
2137
2138                match command {
2139                    ChannelCommand::ClientReady(c) => {
2140                        let client = clients.get(&c).unwrap();
2141                        match jobs.get_next_build() {
2142                            BuildStatus::Available(pkg) => {
2143                                let pkginfo = jobs.scanpkgs.get(&pkg).unwrap();
2144                                jobs.incoming.remove(&pkg);
2145                                jobs.running.insert(pkg.clone());
2146
2147                                // Update thread progress
2148                                thread_packages.insert(c, pkg.clone());
2149                                if let Ok(mut p) = progress_clone.lock() {
2150                                    p.clear_output_buffer(c);
2151                                    p.state_mut().set_worker_active(c, pkg.pkgname());
2152                                    let _ = p.render();
2153                                }
2154
2155                                let _ =
2156                                    client.send(ChannelCommand::JobData(Box::new(PackageBuild {
2157                                        session: Arc::clone(&session),
2158                                        sandbox_id: c,
2159                                        pkginfo: pkginfo.clone(),
2160                                    })));
2161                            }
2162                            BuildStatus::NoneAvailable => {
2163                                if let Ok(mut p) = progress_clone.lock() {
2164                                    p.clear_output_buffer(c);
2165                                    p.state_mut().set_worker_idle(c);
2166                                    let _ = p.render();
2167                                }
2168                                let _ = client.send(ChannelCommand::ComeBackLater);
2169                            }
2170                            BuildStatus::Done => {
2171                                if let Ok(mut p) = progress_clone.lock() {
2172                                    p.clear_output_buffer(c);
2173                                    p.state_mut().set_worker_idle(c);
2174                                    let _ = p.render();
2175                                }
2176                                let _ = client.send(ChannelCommand::Quit);
2177                                clients.remove(&c);
2178                                if clients.is_empty() {
2179                                    break;
2180                                }
2181                            }
2182                        };
2183                    }
2184                    ChannelCommand::JobSuccess(pkgname, duration) => {
2185                        jobs.mark_success(&pkgname, duration);
2186                        jobs.running.remove(&pkgname);
2187
2188                        // Send result for immediate saving
2189                        if let Some(result) = jobs.results.last() {
2190                            let _ = completed_tx.send(result.clone());
2191                        }
2192
2193                        // Find which thread completed and mark idle
2194                        if let Ok(mut p) = progress_clone.lock() {
2195                            let _ = p.print_status(&format!(
2196                                "       Built {} ({})",
2197                                pkgname.pkgname(),
2198                                format_duration(duration)
2199                            ));
2200                            p.state_mut().increment_completed();
2201                            for (tid, pkg) in &thread_packages {
2202                                if pkg == &pkgname {
2203                                    p.clear_output_buffer(*tid);
2204                                    p.state_mut().set_worker_idle(*tid);
2205                                    break;
2206                                }
2207                            }
2208                            let _ = p.render();
2209                        }
2210                    }
2211                    ChannelCommand::JobSkipped(pkgname) => {
2212                        jobs.mark_up_to_date(&pkgname);
2213                        jobs.running.remove(&pkgname);
2214
2215                        // Send result for immediate saving
2216                        if let Some(result) = jobs.results.last() {
2217                            let _ = completed_tx.send(result.clone());
2218                        }
2219
2220                        // Find which thread completed and mark idle
2221                        if let Ok(mut p) = progress_clone.lock() {
2222                            let _ = p.print_status(&format!(
2223                                "     Skipped {} (up-to-date)",
2224                                pkgname.pkgname()
2225                            ));
2226                            p.state_mut().increment_skipped();
2227                            for (tid, pkg) in &thread_packages {
2228                                if pkg == &pkgname {
2229                                    p.clear_output_buffer(*tid);
2230                                    p.state_mut().set_worker_idle(*tid);
2231                                    break;
2232                                }
2233                            }
2234                            let _ = p.render();
2235                        }
2236                    }
2237                    ChannelCommand::JobFailed(pkgname, duration) => {
2238                        let results_before = jobs.results.len();
2239                        jobs.mark_failure(&pkgname, duration);
2240                        jobs.running.remove(&pkgname);
2241
2242                        // Send all new results for immediate saving
2243                        for result in jobs.results.iter().skip(results_before) {
2244                            let _ = completed_tx.send(result.clone());
2245                        }
2246
2247                        // Find which thread failed and mark idle
2248                        if let Ok(mut p) = progress_clone.lock() {
2249                            let _ = p.print_status(&format!(
2250                                "      Failed {} ({})",
2251                                pkgname.pkgname(),
2252                                format_duration(duration)
2253                            ));
2254                            p.state_mut().increment_failed();
2255                            for (tid, pkg) in &thread_packages {
2256                                if pkg == &pkgname {
2257                                    p.clear_output_buffer(*tid);
2258                                    p.state_mut().set_worker_idle(*tid);
2259                                    break;
2260                                }
2261                            }
2262                            let _ = p.render();
2263                        }
2264                    }
2265                    ChannelCommand::JobError((pkgname, duration, e)) => {
2266                        let results_before = jobs.results.len();
2267                        jobs.mark_failure(&pkgname, duration);
2268                        jobs.running.remove(&pkgname);
2269
2270                        // Send all new results for immediate saving
2271                        for result in jobs.results.iter().skip(results_before) {
2272                            let _ = completed_tx.send(result.clone());
2273                        }
2274
2275                        // Find which thread errored and mark idle
2276                        if let Ok(mut p) = progress_clone.lock() {
2277                            let _ = p.print_status(&format!(
2278                                "      Failed {} ({})",
2279                                pkgname.pkgname(),
2280                                format_duration(duration)
2281                            ));
2282                            p.state_mut().increment_failed();
2283                            for (tid, pkg) in &thread_packages {
2284                                if pkg == &pkgname {
2285                                    p.clear_output_buffer(*tid);
2286                                    p.state_mut().set_worker_idle(*tid);
2287                                    break;
2288                                }
2289                            }
2290                            let _ = p.render();
2291                        }
2292                        tracing::error!(error = %e, pkgname = %pkgname.pkgname(), "Build error");
2293                    }
2294                    ChannelCommand::StageUpdate(tid, stage) => {
2295                        if let Ok(mut p) = progress_clone.lock() {
2296                            p.state_mut().set_worker_stage(tid, stage.as_deref());
2297                            let _ = p.render();
2298                        }
2299                    }
2300                    ChannelCommand::OutputLines(tid, lines) => {
2301                        if let Ok(mut p) = progress_clone.lock() {
2302                            if let Some(buf) = p.output_buffer_mut(tid) {
2303                                for line in lines {
2304                                    buf.push(line);
2305                                }
2306                            }
2307                        }
2308                    }
2309                    _ => {}
2310                }
2311            }
2312
2313            // Send results and interrupted status back
2314            debug!(
2315                result_count = jobs.results.len(),
2316                "Manager sending results back"
2317            );
2318            let _ = results_tx.send(jobs.results);
2319            let _ = interrupted_tx.send(was_interrupted);
2320        });
2321
2322        threads.push(manager);
2323        debug!("Waiting for worker threads to complete");
2324        let join_start = Instant::now();
2325        for thread in threads {
2326            thread.join().expect("thread panicked");
2327        }
2328        debug!(
2329            elapsed_ms = join_start.elapsed().as_millis(),
2330            "Worker threads completed"
2331        );
2332
2333        // Save all completed results to database.
2334        // Important: We save results even on interrupt - these are builds that
2335        // COMPLETED before the interrupt, and should be preserved. Only builds
2336        // that were in-progress when interrupted are excluded (they never sent
2337        // a result to the channel).
2338        let mut saved_count = 0;
2339        while let Ok(result) = completed_rx.try_recv() {
2340            if let Err(e) = db.store_build_by_name(&result) {
2341                warn!(
2342                    pkgname = %result.pkgname.pkgname(),
2343                    error = %e,
2344                    "Failed to save build result"
2345                );
2346            } else {
2347                saved_count += 1;
2348            }
2349        }
2350        if saved_count > 0 {
2351            debug!(saved_count, "Saved build results to database");
2352        }
2353
2354        // Stop the refresh thread
2355        stop_refresh.store(true, Ordering::Relaxed);
2356        let _ = refresh_thread.join();
2357
2358        // Check if we were interrupted
2359        let was_interrupted = interrupted_rx.recv().unwrap_or(false);
2360
2361        // Print appropriate summary
2362        if let Ok(mut p) = progress.lock() {
2363            if was_interrupted {
2364                let _ = p.finish_interrupted();
2365            } else {
2366                let _ = p.finish();
2367            }
2368        }
2369
2370        // Collect results from manager
2371        debug!("Collecting results from manager");
2372        let results = results_rx.recv().unwrap_or_default();
2373        debug!(
2374            result_count = results.len(),
2375            "Collected results from manager"
2376        );
2377        let summary = BuildSummary {
2378            duration: started.elapsed(),
2379            results,
2380            scanfail: Vec::new(),
2381        };
2382
2383        // Guard is dropped when Build goes out of scope, destroying sandboxes
2384        Ok(summary)
2385    }
2386}