use crate::args::Arg;
use crate::chroot::Chroot;
use crate::clean::clean_untracked;
use crate::completion::update_aur_cache;
use crate::config::{Config, LocalRepos, Mode, Op, Sign, YesNoAll, YesNoAsk};
use crate::devel::{fetch_devel_info, load_devel_info, save_devel_info, DevelInfo};
use crate::download::{self, Bases};
use crate::fmt::{color_repo, print_indent, print_install, print_install_verbose};
use crate::keys::check_pgp_keys;
use crate::print_error;
use crate::upgrade::get_upgrades;
use crate::util::{ask, get_provider, repo_aur_pkgs, split_repo_aur_targets, NumberMenu};
use crate::{args, exec, news, printtr, repo, RaurHandle};
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
use std::env::var;
use std::ffi::OsStr;
use std::fs::{read_dir, read_link, OpenOptions};
use std::io::{stdin, stdout, BufRead, Read, Write};
use std::path::Path;
use std::process::{Command, Stdio};
use std::sync::atomic::Ordering;
use alpm::{Alpm, Depend, Version};
use alpm_utils::depends::{satisfies_dep, satisfies_provide, satisfies_provide_nover};
use alpm_utils::{DbListExt, Targ};
use ansi_term::Style;
use anyhow::{bail, ensure, Context, Result};
use args::Args;
use aur_depends::{Actions, AurPackage, Base, Conflict, Flags, RepoPackage, Resolver};
use log::debug;
use raur::Cache;
use srcinfo::Srcinfo;
use tr::tr;
#[derive(SmartDefault, PartialEq, Eq, Debug)]
enum Status {
#[default]
Ok,
NothingToDo,
Stop(i32),
}
#[derive(SmartDefault, Debug)]
struct BuildInfo {
#[default(Ok(0))]
err: Result<i32>,
status: Status,
build: Vec<aur_depends::Base>,
srcinfos: HashMap<String, Srcinfo>,
aur_repos: HashMap<String, String>,
bases: Bases,
conflicts: HashSet<String>,
remove_make: Vec<String>,
failed: Vec<aur_depends::Base>,
conflict: bool,
}
impl BuildInfo {
fn nothing_to_do() -> Self {
BuildInfo {
status: Status::NothingToDo,
..Default::default()
}
}
fn stop() -> Self {
BuildInfo {
status: Status::Stop(1),
..Default::default()
}
}
}
fn early_refresh(config: &Config) -> Result<()> {
let mut args = config.pacman_globals();
for _ in 0..config.args.count("y", "refresh") {
args.arg("y");
}
args.targets.clear();
exec::pacman(config, &args)?.success()?;
Ok(())
}
fn early_pacman(config: &Config, targets: Vec<String>) -> Result<()> {
let mut args = config.pacman_args();
args.targets.clear();
args.targets(targets.iter().map(|i| i.as_str()));
exec::pacman(config, &args)?.success()?;
Ok(())
}
fn upgrade_later(config: &Config) -> bool {
config.mode != Mode::Aur
&& config.chroot
&& (config.args.has_arg("u", "sysupgrade") || config.args.has_arg("y", "refresh"))
}
pub fn copy_sync_args<'a>(config: &'a Config, args: &mut Args<&'a str>) {
config
.args
.args
.iter()
.filter(|a| matches!(&*a.key, "overwrite" | "ignore"))
.for_each(|a| args.push(&a.key, a.value.as_deref()));
config
.assume_installed
.iter()
.for_each(|a| args.push("assume-installed", Some(a.as_str())));
if config.args.has_arg("dbonly", "dbonly") {
args.arg("dbonly");
}
for _ in 0..config.args.count("d", "nodeps") {
args.arg("d");
}
}
pub async fn build_pkgbuild(config: &mut Config) -> Result<i32> {
let mut cache = Cache::new();
let mut ret = 0;
let c = config.color;
let arch = config
.alpm
.architectures()
.first()
.context(tr!("no architecture"))?;
let dir = std::env::current_dir()?;
if nix::unistd::getuid().is_root() {
bail!(tr!("can't build package as root"));
}
let output = exec::makepkg_output(config, dir.as_path(), &["--printsrcinfo"])?;
let srcinfo = Srcinfo::parse_buf(output.stdout.as_slice())
.context(tr!("failed to parse srcinfo generated by makepkg"))?;
let deps = srcinfo
.pkgs
.iter()
.flat_map(|pkg| pkg.depends.iter().filter(|d| d.supports(arch)))
.flat_map(|av| &av.vec)
.collect::<Vec<_>>();
let make_deps = srcinfo
.base
.makedepends
.iter()
.filter(|d| d.supports(arch))
.flat_map(|av| &av.vec)
.collect::<Vec<_>>();
if !config.sudo_loop.is_empty() {
let mut flags = config.sudo_flags.clone();
flags.extend(config.sudo_loop.clone());
exec::spawn_sudo(config.sudo_bin.clone(), flags)?;
}
config.set_op_args_globals(Op::Sync);
let flags = flags(config);
let resolver = resolver(config, &config.alpm, &config.raur, &mut cache, flags);
println!(
"{} {}",
c.action.paint("::"),
c.bold.paint(tr!("Resolving dependencies..."))
);
let actions = resolver.resolve_depends(&deps, &make_deps).await?;
debug!("{:#?}", actions);
let mut build_info =
prepare_build(config, HashMap::new(), &mut cache, actions, Some(&srcinfo)).await?;
if let Status::Stop(ret) = build_info.status {
return Ok(ret);
}
if !build_info.build.is_empty() {
if build_info.err.is_ok() {
let err = build_install_pkgbuilds(config, &mut build_info).await;
build_info.err = err;
}
if build_info.err.is_ok() && config.chroot {
let err = chroot_install(config, &build_info, &[]);
build_info.err = err;
}
ret = build_cleanup(config, &build_info)?;
}
build_info.err?;
let chroot = chroot(config);
let (_, repo) = repo::repo_aur_dbs(config);
let default_repo = repo.first();
if let Some(repo) = default_repo {
let file = repo::file(&repo).unwrap();
repo::init(config, file, repo.name())?;
std::env::set_var("PKGDEST", file);
}
if config.chroot {
if !chroot.exists() {
chroot.create(config, &["base-devel"])?;
} else {
chroot.update()?;
}
}
if config.chroot {
chroot
.build(&dir, &["-c"], &["-ofA"])
.context(tr!("failed to download sources"))?;
} else {
let mut args = vec!["--verifysource", "-Af"];
if !config.keep_src {
args.push("-Cc");
}
exec::makepkg(config, &dir, &args)?
.success()
.context(tr!("failed to download sources"))?;
let mut args = vec!["-ofA"];
if !config.keep_src {
args.push("-C");
}
exec::makepkg(config, &dir, &args)?
.success()
.context(tr!("failed to build"))?;
}
printtr!("parsing pkg list...");
let (mut pkgdest, _) = parse_package_list(config, &dir)?;
let build = !pkgdest.values().all(|p| Path::new(p).exists())
|| matches!(config.rebuild, YesNoAll::Yes | YesNoAll::All);
if build {
if config.chroot {
chroot
.build(
&dir,
&[],
&["-feA", "--noconfirm", "--noprepare", "--holdver"],
)
.context(tr!("failed to build"))?;
} else {
let mut args = vec!["-feA", "--noconfirm", "--noprepare", "--holdver"];
if !config.keep_src {
args.push("-c");
}
exec::makepkg(config, &dir, &args)?
.success()
.context(tr!("failed to build"))?;
}
} else {
println!(
"{} {}",
c.warning.paint("::"),
tr!(
"{}-{} is up to date -- skipping build",
srcinfo.base.pkgbase,
srcinfo.base.pkgver,
)
)
}
pkgdest.retain(|_, v| Path::new(v).exists());
let paths = pkgdest.values().map(|s| s.as_str()).collect::<Vec<_>>();
sign_pkg(config, &paths, build)?;
let db = repo::repo_aur_dbs(config).1;
if let Some(default_repo) = default_repo {
let pkgs = pkgdest.values().collect::<Vec<_>>();
let r = if let Some(repo) = db
.pkg(srcinfo.base.pkgbase.as_str())
.ok()
.and_then(|pkg| pkg.db())
{
repo
} else {
default_repo
};
let path = repo::file(&r).unwrap();
let name = r.name().to_string();
drop(repo);
repo::add(config, path, &name, &pkgs)?;
drop(db);
repo::refresh(config, &[name])?;
}
if config.install {
let mut args = config.pacman_globals();
copy_sync_args(config, &mut args);
if config.chroot {
args.op("sync");
args.targets = pkgdest.keys().map(|s| s.as_str()).collect();
} else {
args.op("upgrade");
args.targets = pkgdest.values().map(|s| s.as_str()).collect();
}
if config.args.has_arg("asexplicit", "asexplicit") {
args.arg("asexplicit");
} else if config.args.has_arg("asdeps", "asdeps") {
args.arg("asdeps");
}
let code = exec::pacman(config, &args)?.code();
return Ok(code);
}
Ok(ret)
}
pub async fn install(config: &mut Config, targets_str: &[String]) -> Result<i32> {
let mut cache = Cache::new();
let flags = flags(config);
let c = config.color;
if !config.sudo_loop.is_empty() {
let mut flags = config.sudo_flags.clone();
flags.extend(config.sudo_loop.clone());
exec::spawn_sudo(config.sudo_bin.clone(), flags)?;
}
if config.news_on_upgrade && config.args.has_arg("u", "sysupgrade") {
let mut ret = 0;
match news::news(config).await {
Ok(v) => ret = v,
Err(err) => eprintln!(
"{} {}: {}",
c.error.paint(tr!("error:")),
tr!("could not get news",),
err
),
}
if ret != 1 && !ask(config, &tr!("Proceed with installation?"), true) {
return Ok(1);
}
}
config.set_op_args_globals(Op::Sync);
config.targets = targets_str.to_vec();
config.args.targets = config.targets.clone();
let targets = args::parse_targets(targets_str);
let (mut repo_targets, aur_targets) = split_repo_aur_targets(config, &targets)?;
let mut done_something = false;
let mut ran_pacman = false;
if targets_str.is_empty()
&& !config.args.has_arg("u", "sysupgrade")
&& !config.args.has_arg("y", "refresh")
{
bail!(tr!("no targets specified (use -h for help)"));
}
if config.mode != Mode::Aur {
if config.combined_upgrade {
if config.args.has_arg("y", "refresh") {
early_refresh(config)?;
}
} else if !config.chroot
&& ((config.args.has_arg("y", "refresh")
|| config.args.has_arg("u", "sysupgrade")
|| !repo_targets.is_empty())
|| config.mode == Mode::Repo)
{
let targets = repo_targets.iter().map(|t| t.to_string()).collect();
repo_targets.clear();
done_something = true;
ran_pacman = true;
early_pacman(config, targets)?;
}
}
if targets_str.is_empty()
&& !config.args.has_arg("u", "sysupgrade")
&& !config.args.has_arg("y", "refresh")
{
return Ok(0);
}
config.init_alpm()?;
let mut resolver = resolver(config, &config.alpm, &config.raur, &mut cache, flags);
let upgrades = if config.args.has_arg("u", "sysupgrade") {
let upgrades = get_upgrades(config, &mut resolver).await?;
for pkg in &upgrades.repo_skip {
let arg = Arg {
key: "ignore".to_string(),
value: Some(pkg.to_string()),
};
config.args.args.push(arg);
}
done_something = false;
upgrades
} else {
Default::default()
};
let mut targets = repo_targets;
targets.extend(&aur_targets);
targets.extend(upgrades.aur_keep.iter().map(|p| Targ {
repo: Some(config.aur_namespace()),
pkg: p,
}));
targets.extend(upgrades.repo_keep.iter().map(Targ::from));
if config.mode != Mode::Aur
&& aur_targets.is_empty()
&& upgrades.aur_keep.is_empty()
&& !ran_pacman
{
print_warnings(config, &cache, None);
let mut args = config.pacman_args();
let targets = targets.iter().map(|t| t.to_string()).collect::<Vec<_>>();
args.targets = targets.iter().map(|s| s.as_str()).collect();
if config.combined_upgrade {
args.remove("y").remove("refresh");
}
if !args.targets.is_empty()
|| args.has_arg("u", "sysupgrade")
|| args.has_arg("y", "refresh")
{
let code = exec::pacman(config, &args)?.code();
return Ok(code);
}
return Ok(0);
}
if targets.is_empty() && !upgrade_later(config) {
print_warnings(config, &cache, None);
if !done_something {
printtr!(" there is nothing to do");
}
return Ok(0);
}
println!(
"{} {}",
c.action.paint("::"),
c.bold.paint(tr!("Resolving dependencies..."))
);
let actions = resolver.resolve_targets(&targets).await?;
debug!("{:#?}", actions);
let repo_targs = actions
.install
.iter()
.filter(|p| p.target)
.map(|p| p.pkg.name().to_string())
.collect::<Vec<_>>();
let mut build_info =
prepare_build(config, upgrades.aur_repos, &mut cache, actions, None).await?;
if let Status::Stop(ret) = build_info.status {
return Ok(ret);
}
if build_info.status == Status::NothingToDo && !upgrade_later(config) {
printtr!(" there is nothing to do");
return Ok(0);
}
if !build_info.build.is_empty() && build_info.err.is_ok() {
let err = build_install_pkgbuilds(config, &mut build_info).await;
build_info.err = err;
}
if build_info.err.is_ok() && config.chroot {
let err = chroot_install(config, &build_info, &repo_targs);
build_info.err = err;
}
let ret = build_cleanup(config, &build_info)?;
build_info.err?;
Ok(ret)
}
async fn prepare_build(
config: &Config,
aur_repos: HashMap<String, String>,
cache: &mut Cache,
mut actions: Actions<'_>,
srcinfo: Option<&Srcinfo>,
) -> Result<BuildInfo> {
if !actions.build.is_empty() && nix::unistd::getuid().is_root() {
bail!(tr!("can't install AUR package as root"));
}
let conflicts = check_actions(config, &mut actions, srcinfo)?;
print_warnings(config, cache, Some(&actions));
if actions.build.is_empty() && actions.install.is_empty() {
return Ok(BuildInfo::nothing_to_do());
}
if config.pacman.verbose_pkg_lists {
print_install_verbose(config, &actions);
} else {
print_install(config, &actions);
}
let has_make = if !config.chroot
&& (actions.iter_build_pkgs().any(|p| p.make) || actions.install.iter().any(|p| p.make))
{
if config.remove_make == YesNoAsk::Ask {
ask(
config,
&tr!("Remove make dependencies after install?"),
false,
)
} else {
config.remove_make == YesNoAsk::Yes
}
} else {
false
};
if !config.skip_review && !actions.build.is_empty() {
if !ask(config, &tr!("Proceed to review?"), true) {
return Ok(BuildInfo::stop());
}
} else if !ask(config, &tr!("Proceed with installation?"), true) {
return Ok(BuildInfo::stop());
}
if actions.build.is_empty() {
let err = if !config.chroot {
repo_install(config, &actions.install)
} else {
Ok(0)
};
let bi = BuildInfo {
err,
..Default::default()
};
return Ok(bi);
}
let bases = actions.iter_build_pkgs().map(|p| p.pkg.clone()).collect();
let srcinfos = download_pkgbuilds(config, &bases).await?;
if let Some(ref pb_cmd) = config.pre_build_command {
for base in &bases.bases {
let dir = config.fetch.clone_dir.join(base.package_base());
std::env::set_var("PKGBASE", base.package_base());
std::env::set_var("VERSION", base.version());
let mut cmd = Command::new("sh");
cmd.current_dir(dir).arg("-c").arg(pb_cmd);
exec::command(&mut cmd)?;
}
std::env::remove_var("PKGBASE");
std::env::remove_var("VERSION");
}
if !config.skip_review {
let ret = review(config, &actions)?;
if ret != 0 {
let mut bi = BuildInfo::stop();
bi.status = Status::Stop(ret);
return Ok(bi);
}
}
let arch = config
.alpm
.architectures()
.first()
.context(tr!("no architecture"))?;
let incompatible = srcinfos
.values()
.flat_map(|s| &s.pkgs)
.filter(|p| !p.arch.iter().any(|a| a == "any") && !p.arch.iter().any(|a| a == arch))
.collect::<Vec<_>>();
if !incompatible.is_empty() {
let c = config.color;
println!(
"{} {}",
c.error.paint("::"),
c.bold.paint(tr!(
"The following packages are not compatible with your architecture:"
))
);
print!(" ");
print_indent(
Style::new(),
0,
4,
config.cols,
" ",
incompatible.iter().map(|i| i.pkgname.as_str()),
);
if !ask(
config,
&tr!("Would you like to try build them anyway?"),
true,
) {
return Ok(BuildInfo::stop());
}
}
if config.pgp_fetch {
check_pgp_keys(config, &bases, &srcinfos)?;
}
let err = if !config.chroot {
repo_install(config, &actions.install)
} else {
Ok(0)
};
update_aur_list(config);
let conflicts = conflicts
.0
.iter()
.map(|c| c.pkg.clone())
.chain(conflicts.1.iter().map(|c| c.pkg.clone()))
.collect::<HashSet<_>>();
let build = actions.build;
let mut remove_make = Vec::new();
if has_make {
remove_make.extend(
actions
.install
.iter()
.filter(|p| p.make)
.map(|p| p.pkg.name().to_string())
.collect::<Vec<_>>(),
);
remove_make.extend(
build
.iter()
.flat_map(|b| &b.pkgs)
.filter(|p| p.make)
.map(|p| p.pkg.name.clone()),
);
}
Ok(BuildInfo {
err,
status: Status::Ok,
build,
srcinfos,
aur_repos,
bases,
conflicts,
remove_make,
failed: Vec::new(),
conflict: false,
})
}
fn build_cleanup(config: &Config, bi: &BuildInfo) -> Result<i32> {
let mut ret = 0;
if !bi.remove_make.is_empty() {
let mut args = config.pacman_globals();
args.op("remove").arg("noconfirm");
args.targets = bi.remove_make.iter().map(|s| s.as_str()).collect();
if let Err(err) = exec::pacman(config, &args) {
print_error(config.color.error, err);
ret = 1;
}
}
if config.clean_after {
for base in &bi.build {
let path = config.build_dir.join(base.package_base());
if let Err(err) = clean_untracked(config, &path) {
print_error(config.color.error, err);
ret = 1;
}
}
}
if !bi.failed.is_empty() {
let failed = bi.failed.iter().map(|f| f.to_string()).collect::<Vec<_>>();
bail!(tr!("packages failed to build: {}", failed.join(" ")));
}
Ok(ret)
}
async fn download_pkgbuilds(config: &Config, bases: &Bases) -> Result<HashMap<String, Srcinfo>> {
let mut srcinfos = HashMap::new();
for base in &bases.bases {
let path = config.build_dir.join(base.package_base()).join(".SRCINFO");
if path.exists() {
let srcinfo = Srcinfo::parse_file(path);
if let Ok(srcinfo) = srcinfo {
srcinfos.insert(srcinfo.base.pkgbase.to_string(), srcinfo);
}
}
}
download::new_aur_pkgbuilds(config, bases, &srcinfos).await?;
for base in &bases.bases {
if srcinfos.contains_key(base.package_base()) {
continue;
}
let path = config.build_dir.join(base.package_base()).join(".SRCINFO");
if path.exists() {
if let Entry::Vacant(vacant) = srcinfos.entry(base.package_base().to_string()) {
let srcinfo = Srcinfo::parse_file(path)
.with_context(|| tr!("failed to parse srcinfo for '{}'", base))?;
vacant.insert(srcinfo);
}
} else {
bail!(tr!("could not find .SRCINFO for '{}'", base.package_base()));
}
}
Ok(srcinfos)
}
fn review<'a>(config: &Config, actions: &Actions<'a>) -> Result<i32> {
let c = config.color;
let pkgs = actions
.build
.iter()
.map(|b| b.package_base())
.collect::<Vec<_>>();
if !config.no_confirm {
if let Some(ref fm) = config.fm {
let _view = file_manager(config, fm, &pkgs)?;
if !ask(config, &tr!("Proceed with installation?"), true) {
return Ok(1);
}
if config.save_changes {
config.fetch.commit(&pkgs, "paru save changes")?;
}
} else {
let unseen = config.fetch.unseen(&pkgs)?;
let has_diff = config.fetch.has_diff(&unseen)?;
let printed = !has_diff.is_empty() || unseen.iter().any(|p| !has_diff.contains(p));
let diffs = config.fetch.diff(&has_diff, config.color.enabled)?;
if printed {
let pager = config
.pager_cmd
.clone()
.or_else(|| var("PARU_PAGER").ok())
.or_else(|| var("PAGER").ok())
.unwrap_or_else(|| "less".to_string());
exec::RAISE_SIGPIPE.store(false, Ordering::Relaxed);
let mut command = Command::new("sh");
if std::env::var("LESS").is_err() {
command.env("LESS", "SRXF");
}
let mut command = command
.arg("-c")
.arg(&pager)
.stdin(Stdio::piped())
.spawn()
.with_context(|| format!("{} {}", tr!("failed to run:"), pager))?;
let mut stdin = command.stdin.take().unwrap();
for diff in diffs {
let _ = stdin.write_all(diff.as_bytes());
let _ = stdin.write_all(b"\n\n\n");
}
let bat = config.color.enabled
&& Command::new(&config.bat_bin).arg("-V").output().is_ok();
let mut buf = Vec::new();
for pkg in &unseen {
if !has_diff.contains(pkg) {
let path = config.build_dir.join(pkg);
for file in read_dir(&path)
.with_context(|| tr!("failed to read dir: {}", path.display()))?
{
let file = file?;
if file.file_type()?.is_dir()
&& file.path().file_name() == Some(OsStr::new(".git"))
{
continue;
}
if file.file_type()?.is_file()
&& file.path().file_name() == Some(OsStr::new(".SRCINFO"))
{
continue;
}
if file.file_type()?.is_dir() {
let s =
tr!("{} is a directory\n\n", file.path().display().to_string());
let _ = write!(stdin, "{}", c.bold.paint(s));
continue;
}
if file.file_type()?.is_symlink() {
let s = format!(
"{} -> {}\n\n\n",
file.path().display(),
read_link(file.path())?.display()
);
let _ = write!(stdin, "{}", c.bold.paint(s));
continue;
}
let _ = writeln!(
stdin,
"{}",
c.bold.paint(file.path().display().to_string())
);
if bat {
let output = Command::new(&config.bat_bin)
.arg("-pp")
.arg("--color=always")
.arg(file.path())
.args(&config.bat_flags)
.output()
.with_context(|| {
format!(
"{} {} {}",
tr!("failed to run:"),
config.bat_bin,
file.path().display()
)
})?;
let _ = stdin.write_all(&output.stdout);
} else {
let mut pkgbbuild = OpenOptions::new()
.read(true)
.open(&file.path())
.with_context(|| {
tr!("failed to open: {}", file.path().display().to_string())
})?;
buf.clear();
pkgbbuild.read_to_end(&mut buf)?;
let _ = match std::str::from_utf8(&buf) {
Ok(_) => stdin.write_all(&buf),
Err(_) => {
write!(
stdin,
"{}",
tr!(
"binary file: {}",
file.path().display().to_string()
)
)
}
};
}
let _ = stdin.write_all(b"\n\n");
}
}
}
drop(stdin);
command
.wait()
.with_context(|| format!("{} {}", tr!("failed to run:"), pager))?;
exec::RAISE_SIGPIPE.store(true, Ordering::Relaxed);
if !ask(config, &tr!("Proceed with installation?"), true) {
return Ok(1);
}
} else {
printtr!(" nothing new to review");
}
}
}
config.fetch.mark_seen(&pkgs)?;
Ok(0)
}
fn file_manager(config: &Config, fm: &str, pkgs: &[&str]) -> Result<tempfile::TempDir> {
let has_diff = config.fetch.has_diff(pkgs)?;
config.fetch.save_diffs(&has_diff)?;
let view = config.fetch.make_view(pkgs, &has_diff)?;
let ret = Command::new(fm)
.args(&config.fm_flags)
.arg(view.path())
.current_dir(view.path())
.status()
.with_context(|| tr!("failed to execute file manager: {}", fm))?;
ensure!(
ret.success(),
tr!("file manager did not execute successfully")
);
Ok(view)
}
fn repo_install(config: &Config, install: &[RepoPackage]) -> Result<i32> {
if install.is_empty() {
return Ok(0);
}
let mut deps = Vec::new();
let mut exp = Vec::new();
let targets = install
.iter()
.map(|p| format!("{}/{}", p.pkg.db().unwrap().name(), p.pkg.name()))
.collect::<Vec<_>>();
let mut args = config.pacman_args();
args.remove("asdeps")
.remove("asdep")
.remove("asexplicit")
.remove("asexp")
.remove("y")
.remove("i")
.remove("refresh");
args.targets = targets.iter().map(|s| s.as_str()).collect();
if !config.combined_upgrade || config.mode == Mode::Aur {
args.remove("u").remove("sysupgrade");
}
if config.globals.has_arg("asexplicit", "asexp") {
exp.extend(install.iter().map(|p| p.pkg.name()));
} else if config.globals.has_arg("asdeps", "asdep") {
deps.extend(install.iter().map(|p| p.pkg.name()));
} else {
for pkg in install {
if config.alpm.localdb().pkg(pkg.pkg.name()).is_err() {
if pkg.target {
exp.push(pkg.pkg.name())
} else {
deps.push(pkg.pkg.name())
}
}
}
}
exec::pacman(config, &args)?.success()?;
asdeps(config, &deps)?;
asexp(config, &exp)?;
Ok(0)
}
fn check_actions(
config: &Config,
actions: &mut Actions,
srcinfo: Option<&Srcinfo>,
) -> Result<(Vec<Conflict>, Vec<Conflict>)> {
let c = config.color;
let dups = actions.duplicate_targets();
ensure!(
dups.is_empty(),
tr!("duplicate packages: {}", dups.join(" "))
);
let arch = config
.alpm
.architectures()
.first()
.context(tr!("no architecture"))?;
if !actions.missing.is_empty() {
if let Some(srcinfo) = srcinfo {
let provides = srcinfo
.pkgs
.iter()
.flat_map(|p| &p.provides)
.filter(|v| v.supports(arch))
.flat_map(|v| &v.vec);
let names = srcinfo.pkgs.iter().map(|p| &p.pkgname);
for provide in provides {
actions.missing.retain(|m| {
!satisfies_provide(Depend::new(m.dep.as_str()), Depend::new(provide.as_str()))
})
}
for name in names {
actions.missing.retain(|m| {
!satisfies_dep(
Depend::new(m.dep.as_str()),
name,
Version::new(srcinfo.version()),
)
})
}
}
}
if !actions.missing.is_empty() {
let mut err = tr!("could not find all required packages:");
for missing in &actions.missing {
if missing.stack.is_empty() {
err.push_str(&format!("\n {} (target)", c.error.paint(&missing.dep)));
} else {
let stack = missing.stack.join(" -> ");
err.push_str(&tr!(
"\n {missing} (wanted by: {stack})",
missing = c.error.paint(&missing.dep),
stack = stack
));
};
}
bail!("{}", err);
}
for pkg in &actions.unneeded {
eprintln!(
"{} {}",
c.warning.paint("::"),
tr!("{}-{} is up to date -- skipping", pkg.name, pkg.version)
);
}
if actions.build.is_empty() {
return Ok((Vec::new(), Vec::new()));
}
if config.chroot && config.args.has_arg("w", "downloadonly") {
return Ok((Vec::new(), Vec::new()));
}
println!(
"{} {}",
c.action.paint("::"),
c.bold.paint(tr!("Calculating conflicts..."))
);
let conflicts = actions.calculate_conflicts(!config.chroot);
println!(
"{} {}",
c.action.paint("::"),
c.bold.paint(tr!("Calculating inner conflicts..."))
);
let inner_conflicts = actions.calculate_inner_conflicts(!config.chroot);
if !conflicts.is_empty() || !inner_conflicts.is_empty() {
eprintln!();
}
if !inner_conflicts.is_empty() {
eprintln!(
"{} {}",
c.error.paint("::"),
c.bold.paint(tr!("Inner conflicts found:"))
);
for conflict in &inner_conflicts {
eprint!(" {}: ", conflict.pkg);
for conflict in &conflict.conflicting {
eprint!("{}", conflict.pkg);
if let Some(conflict) = &conflict.conflict {
eprint!(" ({})", conflict);
}
eprint!(" ");
}
eprintln!();
}
eprintln!();
}
if !conflicts.is_empty() {
eprintln!(
"{} {}",
c.error.paint("::"),
c.bold.paint(tr!("Conflicts found:"))
);
for conflict in &conflicts {
eprint!(" {}: ", conflict.pkg);
for conflict in &conflict.conflicting {
eprint!("{}", conflict.pkg);
if let Some(conflict) = &conflict.conflict {
eprint!(" ({})", conflict);
}
eprint!(" ");
}
eprintln!();
}
eprintln!();
}
if (!conflicts.is_empty() || !inner_conflicts.is_empty()) && !config.use_ask {
eprintln!(
"{} {}",
c.warning.paint("::"),
c.bold.paint(tr!(
"Conflicting packages will have to be confirmed manually"
))
);
if config.no_confirm {
bail!(tr!("can not install conflicting packages with --noconfirm"));
}
}
Ok((conflicts, inner_conflicts))
}
fn do_install(
config: &Config,
deps: &mut Vec<&str>,
exp: &mut Vec<&str>,
install_queue: &mut Vec<String>,
conflict: bool,
devel_info: &mut DevelInfo,
) -> Result<()> {
if !install_queue.is_empty() {
let mut args = config.pacman_globals();
let ask;
args.op("upgrade");
copy_sync_args(config, &mut args);
for _ in 0..args.count("d", "nodeps") {
args.arg("d");
}
if conflict {
if config.use_ask {
if let Some(arg) = args.args.iter_mut().find(|a| a.key == "ask") {
let num = arg.value.unwrap_or_default();
let mut num = num.parse::<i32>().unwrap_or_default();
num |= alpm::QuestionType::ConflictPkg as i32;
ask = num.to_string();
arg.value = Some(ask.as_str());
} else {
let value = alpm::QuestionType::ConflictPkg as i32;
ask = value.to_string();
args.push_value("ask", ask.as_str());
}
}
} else {
args.arg("noconfirm");
}
debug!("flushing install queue");
args.targets = install_queue.iter().map(|s| s.as_str()).collect();
exec::pacman(config, &args)?.success()?;
if config.devel {
save_devel_info(config, devel_info)?;
}
asdeps(config, deps)?;
asexp(config, exp)?;
deps.clear();
exp.clear();
install_queue.clear();
}
Ok(())
}
fn chroot(config: &Config) -> Chroot {
Chroot {
path: config.chroot_dir.clone(),
pacman_conf: config
.pacman_conf
.as_deref()
.unwrap_or("/etc/pacman.conf")
.to_string(),
makepkg_conf: config
.makepkg_conf
.as_deref()
.unwrap_or("/etc/makepkg.conf")
.to_string(),
mflags: config.mflags.clone(),
ro: repo::all_files(config),
rw: config.pacman.cache_dir.clone(),
}
}
async fn build_install_pkgbuilds<'a>(config: &mut Config, bi: &mut BuildInfo) -> Result<i32> {
let mut deps = Vec::new();
let mut exp = Vec::new();
let mut install_queue = Vec::new();
let chroot = chroot(config);
let (mut devel_info, mut new_devel_info) = if config.devel {
printtr!("fetching devel info...");
(
load_devel_info(config)?.unwrap_or_default(),
fetch_devel_info(config, &bi.bases, &bi.srcinfos).await?,
)
} else {
(DevelInfo::default(), DevelInfo::default())
};
let (_, repo) = repo::repo_aur_dbs(config);
let default_repo = repo.first();
if let Some(repo) = default_repo {
let file = repo::file(&repo).unwrap();
repo::init(config, file, repo.name())?;
std::env::set_var("PKGDEST", file);
}
if config.chroot {
if !chroot.exists() {
chroot.create(config, &["base-devel"])?;
} else {
chroot.update()?;
}
}
let repo_server =
default_repo.map(|r| (r.name().to_string(), repo::file(&r).unwrap().to_string()));
drop(repo);
for base in bi.build.iter_mut() {
bi.failed.push(base.clone());
let repo_server = repo_server
.as_ref()
.map(|rs| (rs.0.as_str(), rs.1.as_str()));
let err = build_install_pkgbuild(
config,
&bi.aur_repos,
&bi.conflicts,
&chroot,
base,
&mut deps,
&mut exp,
&mut install_queue,
&mut bi.conflict,
&mut devel_info,
&mut new_devel_info,
repo_server,
);
match err {
Ok(_) => {
bi.failed.pop().unwrap();
}
Err(e) => {
if config.fail_fast {
bi.failed.pop().unwrap();
return Err(e);
}
print_error(config.color.error, e);
}
}
}
if !config.chroot {
do_install(
config,
&mut deps,
&mut exp,
&mut install_queue,
bi.conflict,
&mut devel_info,
)?;
}
Ok(0)
}
fn sign_pkg(config: &Config, paths: &[&str], delete_sig: bool) -> Result<()> {
if config.sign != Sign::No {
let c = config.color;
println!(
"{} {}",
c.action.paint("::"),
c.bold.paint(tr!("Signing packages..."))
);
for path in paths {
let mut cmd = Command::new("gpg");
cmd.args(["--detach-sign", "--no-armor", "--batch"]);
if let Sign::Key(ref k) = config.sign {
cmd.arg("-u").arg(k);
}
let sig = format!("{}.sig", path);
if Path::new(&sig).exists() {
if delete_sig {
std::fs::remove_file(&sig)?;
} else {
continue;
}
}
cmd.arg("--output").arg(&sig).arg(path);
exec::command(&mut cmd)?;
}
}
Ok(())
}
fn is_ver_char(c: char) -> bool {
matches!(c, '<' | '=' | '>')
}
fn trim_dep_ver(dep: &str, trim: bool) -> &str {
if trim {
dep.split_once(is_ver_char).map_or(dep, |x| x.0)
} else {
dep
}
}
fn deps_not_satisfied(config: &Config, base: &Base) -> Result<Vec<String>> {
let nover = config.args.count("d", "nodeps") > 0;
let db = config.new_alpm()?;
let db = db.localdb().pkgs();
let mut res = base
.pkgs
.iter()
.flat_map(|pkg| {
let check = if config.no_check {
None
} else {
Some(&pkg.pkg.check_depends)
};
pkg.pkg
.depends
.iter()
.chain(&pkg.pkg.make_depends)
.chain(check.into_iter().flatten())
})
.filter(|dep| {
db.find_satisfier(trim_dep_ver(dep.as_str(), nover))
.is_none()
})
.map(|dep| dep.to_string())
.collect::<Vec<_>>();
if nover {
res.retain(|dep| {
!config
.alpm
.assume_installed()
.iter()
.any(|provide| satisfies_provide_nover(Depend::new(dep.as_str()), provide))
});
} else {
res.retain(|dep| {
!config
.alpm
.assume_installed()
.iter()
.any(|provide| satisfies_provide(Depend::new(dep.as_str()), provide))
});
}
Ok(res)
}
fn deps_not_satisfied_by_repo(config: &Config, base: &Base) -> Result<Vec<String>> {
let nover = config.args.count("d", "nodeps") > 0;
let db = config.new_alpm()?;
let db = db.syncdbs();
let res = base
.pkgs
.iter()
.flat_map(|pkg| {
let check = if config.no_check {
None
} else {
Some(&pkg.pkg.check_depends)
};
pkg.pkg
.depends
.iter()
.chain(&pkg.pkg.make_depends)
.chain(check.into_iter().flatten())
})
.filter(|dep| {
db.find_satisfier(trim_dep_ver(dep.as_str(), nover))
.is_none()
})
.map(|dep| dep.to_string())
.collect();
Ok(res)
}
#[allow(clippy::too_many_arguments)]
fn build_install_pkgbuild<'a>(
config: &mut Config,
aur_repos: &HashMap<String, String>,
conflicts: &HashSet<String>,
chroot: &Chroot,
base: &'a mut Base,
deps: &mut Vec<&'a str>,
exp: &mut Vec<&'a str>,
install_queue: &mut Vec<String>,
conflict: &mut bool,
devel_info: &mut DevelInfo,
new_devel_info: &mut DevelInfo,
repo: Option<(&str, &str)>,
) -> Result<()> {
let c = config.color;
let mut debug_paths = HashMap::new();
let dir = config.build_dir.join(base.package_base());
if !config.chroot && (!config.batch_install || !deps_not_satisfied(config, base)?.is_empty()) {
do_install(config, deps, exp, install_queue, *conflict, devel_info)?;
*conflict = false;
}
let missing = if config.args.count("d", "nodeps") > 1 {
Vec::new()
} else if config.chroot {
deps_not_satisfied_by_repo(config, base)?
} else {
deps_not_satisfied(config, base)?
};
if !missing.is_empty() {
bail!(tr!(
"can't build {base}, deps not satisfied: {deps}",
base = base,
deps = missing.join(" ")
));
}
if config.chroot {
chroot
.build(&dir, &["-c"], &["-ofA"])
.with_context(|| tr!("failed to download sources for '{}'", base))?;
} else {
let mut args = vec!["--verifysource", "-Af"];
if !config.keep_src {
args.push("-Cc");
}
exec::makepkg(config, &dir, &args)?
.success()
.with_context(|| tr!("failed to download sources for '{}'", base))?;
let mut args = vec!["-ofA"];
if !config.keep_src {
args.push("-C");
}
exec::makepkg(config, &dir, &args)?
.success()
.with_context(|| tr!("failed to build '{}'", base))?;
}
printtr!("{}: parsing pkg list...", base);
let (mut pkgdest, version) = parse_package_list(config, &dir)?;
if !base.pkgs.iter().all(|p| pkgdest.contains_key(&p.pkg.name)) {
bail!(tr!("package list does not match srcinfo"));
}
if config.install_debug {
let mut debug = Vec::new();
for dest in pkgdest.values() {
let file = dest.rsplit('/').next().unwrap();
for pkg in &base.pkgs {
let debug_pkg = format!("{}-debug-", pkg.pkg.name);
if file.starts_with(&debug_pkg) {
let debug_pkg = format!("{}-debug", pkg.pkg.name);
let mut pkg = pkg.clone();
let mut raur_pkg = (*pkg.pkg).clone();
raur_pkg.name = debug_pkg;
pkg.pkg = raur_pkg.into();
debug_paths.insert(pkg.pkg.name.clone(), dest.clone());
debug.push(pkg);
}
}
}
base.pkgs.extend(debug);
}
let needs_build = needs_build(config, base, &pkgdest, &version);
if needs_build {
if config.chroot {
chroot
.build(
&dir,
&[],
&["-feA", "--noconfirm", "--noprepare", "--holdver"],
)
.with_context(|| tr!("failed to build '{}'", base))?;
} else {
let mut args = vec!["-feA", "--noconfirm", "--noprepare", "--holdver"];
if !config.keep_src {
args.push("-c");
}
exec::makepkg(config, &dir, &args)?
.success()
.with_context(|| tr!("failed to build '{}'", base))?;
}
} else {
println!(
"{} {}",
c.warning.paint("::"),
tr!(
"{}-{} is up to date -- skipping build",
base.package_base(),
base.pkgs[0].pkg.version
)
)
}
for (pkg, path) in &debug_paths {
if !Path::new(path).exists() {
base.pkgs.retain(|p| p.pkg.name != *pkg);
} else {
printtr!("adding {} to the install list", pkg);
}
}
let paths = base
.pkgs
.iter()
.filter_map(|p| pkgdest.get(&p.pkg.name))
.chain(debug_paths.values())
.map(|s| s.as_str())
.collect::<Vec<_>>();
sign_pkg(config, &paths, needs_build)?;
if let Some(ref repo) = repo {
if let Some(repo) = aur_repos.get(base.package_base()) {
let repo = config
.alpm
.syncdbs()
.iter()
.find(|db| db.name() == *repo)
.unwrap();
let path = repo::file(&repo).unwrap();
let name = repo.name().to_string();
repo::add(config, path, &name, &paths)?;
repo::refresh(config, &[name])?;
} else {
let path = repo.1;
repo::add(config, path, repo.0, &paths)?;
repo::refresh(config, &[repo.0])?;
}
if let Some(info) = new_devel_info.info.remove(base.package_base()) {
devel_info
.info
.insert(base.package_base().to_string(), info);
} else {
devel_info.info.remove(base.package_base());
}
if config.devel {
save_devel_info(config, devel_info)?;
}
}
for pkg in &base.pkgs {
if !needs_install(config, base, &version, pkg) {
continue;
}
if config.args.has_arg("asexplicit", "asexp") {
exp.push(pkg.pkg.name.as_str());
} else if config.args.has_arg("asdeps", "asdeps") {
deps.push(pkg.pkg.name.as_str());
} else if config.alpm.localdb().pkg(&*pkg.pkg.name).is_err() {
if pkg.target {
exp.push(pkg.pkg.name.as_str())
} else {
deps.push(pkg.pkg.name.as_str())
}
}
let path = pkgdest.remove(&pkg.pkg.name).with_context(|| {
tr!(
"could not find package '{pkg}' in package list for '{base}'",
pkg = pkg.pkg.name,
base = base
)
})?;
*conflict |= base
.pkgs
.iter()
.any(|p| conflicts.contains(p.pkg.name.as_str()));
install_queue.push(path);
}
if repo.is_none() {
if let Some(info) = new_devel_info.info.remove(base.package_base()) {
devel_info
.info
.insert(base.package_base().to_string(), info);
} else {
devel_info.info.remove(base.package_base());
}
}
Ok(())
}
fn chroot_install(config: &Config, bi: &BuildInfo, repo_targs: &[String]) -> Result<i32> {
if !config.chroot {
return Ok(0);
}
if !config.args.has_arg("w", "downloadonly") {
let mut targets = bi
.build
.iter()
.filter(|b| {
!bi.failed
.iter()
.any(|f| b.package_base() == f.package_base())
})
.flat_map(|b| &b.pkgs)
.filter(|p| p.target)
.map(|p| p.pkg.name.as_str())
.collect::<Vec<_>>();
if config.args.has_arg("u", "sysupgrade") {
targets.retain(|&p| config.alpm.localdb().pkg(p).is_ok());
}
targets.extend(repo_targs.iter().map(|s| s.as_str()));
let mut args = config.pacman_globals();
args.op("sync");
copy_sync_args(config, &mut args);
if config.args.has_arg("asexplicit", "asexp") {
args.arg("asexplicit");
} else if config.args.has_arg("asdeps", "asdep") {
args.arg("asdeps");
}
if config.mode != Mode::Aur {
for _ in 0..config.args.count("y", "refresh") {
args.arg("y");
}
for _ in 0..config.args.count("u", "sysupgrade") {
args.arg("u");
}
}
args.targets = targets;
if !bi.conflict
&& !bi.build.is_empty()
&& (!config.args.has_arg("u", "sysupgrade")
|| config.combined_upgrade
|| config.mode == Mode::Aur)
{
args.arg("noconfirm");
}
if !args.targets.is_empty()
|| config.args.has_arg("u", "sysupgrade")
|| config.args.has_arg("y", "refresh")
{
exec::pacman(config, &args)?.success()?;
}
}
Ok(0)
}
fn asdeps(config: &Config, pkgs: &[&str]) -> Result<()> {
if pkgs.is_empty() {
return Ok(());
}
let mut args = config.pacman_globals();
args.op("database")
.arg("asdeps")
.targets(pkgs.iter().cloned());
let output = exec::pacman_output(config, &args)?;
ensure!(
output.status.success(),
"{}",
String::from_utf8_lossy(&output.stderr)
);
Ok(())
}
fn asexp(config: &Config, pkgs: &[&str]) -> Result<()> {
if pkgs.is_empty() {
return Ok(());
}
let mut args = config.pacman_globals();
args.op("database")
.arg("asexplicit")
.targets(pkgs.iter().cloned());
let output = exec::pacman_output(config, &args)?;
ensure!(
output.status.success(),
"{}",
String::from_utf8_lossy(&output.stderr)
);
Ok(())
}
fn parse_package_list(config: &Config, dir: &Path) -> Result<(HashMap<String, String>, String)> {
let output = exec::makepkg_output(config, dir, &["--packagelist"])?;
let output = String::from_utf8(output.stdout).context("pkgdest is not utf8")?;
let mut pkgdests = HashMap::new();
let mut version = String::new();
for line in output.trim().lines() {
let file = line.rsplit('/').next().unwrap();
let split = file.split('-').collect::<Vec<_>>();
ensure!(
split.len() >= 4,
"{}",
tr!("can't find package name in packagelist: {}", line)
);
let pkgname = split[..split.len() - 3].join("-");
version = split[split.len() - 3..split.len() - 1].join("-");
pkgdests.insert(pkgname, line.to_string());
}
Ok((pkgdests, version))
}
pub fn flags(config: &mut Config) -> aur_depends::Flags {
let mut flags = Flags::new();
if config.args.has_arg("needed", "needed") {
flags |= Flags::NEEDED;
}
if config.args.count("u", "sysupgrade") > 1 {
flags |= Flags::ENABLE_DOWNGRADE;
}
if config.args.count("d", "nodeps") > 0 {
flags |= Flags::NO_DEP_VERSION;
config.mflags.push("-d".to_string());
}
if config.args.count("d", "nodeps") > 1 {
flags |= Flags::NO_DEPS;
}
if config.no_check {
flags.remove(Flags::CHECK_DEPENDS);
config.mflags.push("--nocheck".into());
}
if config.mode == Mode::Aur {
flags |= Flags::AUR_ONLY;
}
if config.mode == Mode::Repo {
flags |= Flags::REPO_ONLY;
}
if !config.provides {
flags.remove(Flags::TARGET_PROVIDES | Flags::MISSING_PROVIDES);
}
if config.op == Op::Yay {
flags.remove(Flags::TARGET_PROVIDES);
}
if config.repos != LocalRepos::None {
flags |= Flags::LOCAL_REPO;
}
flags
}
fn resolver<'a, 'b>(
config: &Config,
alpm: &'a Alpm,
raur: &'b RaurHandle,
cache: &'b mut Cache,
flags: Flags,
) -> Resolver<'a, 'b, RaurHandle> {
let devel_suffixes = config.devel_suffixes.clone();
let c = config.color;
let no_confirm = config.no_confirm;
let mut resolver = aur_depends::Resolver::new(alpm, cache, raur, flags)
.custom_aur_namespace(Some(config.aur_namespace().to_string()))
.devel_pkgs(move |pkg| devel_suffixes.iter().any(|suff| pkg.ends_with(suff)))
.group_callback(move |groups| {
let total: usize = groups.iter().map(|g| g.group.packages().len()).sum();
let mut pkgs = Vec::new();
println!(
"{} {} {}:",
c.action.paint("::"),
c.bold.paint(tr!("There are {} members in group", total)),
c.group.paint(groups[0].group.name()),
);
let mut repo = String::new();
for group in groups {
if group.db.name() != repo {
repo = group.db.name().to_string();
println!(
"{} {} {}",
c.action.paint("::"),
c.bold.paint(tr!("Repository")),
color_repo(c.enabled, group.db.name())
);
print!(" ");
}
let mut n = 1;
for pkg in group.group.packages() {
print!("{}) {} ", n, pkg.name());
n += 1;
}
}
print!("{}", tr!("\n\nEnter a selection (default=all): "));
let _ = stdout().lock().flush();
let stdin = stdin();
let mut stdin = stdin.lock();
let mut input = String::new();
input.clear();
if !no_confirm {
let _ = stdin.read_line(&mut input);
}
let menu = NumberMenu::new(input.trim());
let mut n = 1;
for pkg in groups.iter().flat_map(|g| g.group.packages()) {
if menu.contains(n, "") {
pkgs.push(pkg);
}
n += 1;
}
pkgs
});
if !config.args.has_arg("u", "sysupgrade") {
resolver = resolver.provider_callback(move |dep, pkgs| {
let prompt = tr!(
"There are {n} providers available for {pkg}:",
n = pkgs.len(),
pkg = dep
);
println!("{} {}", c.action.paint("::"), c.bold.paint(prompt));
println!(
"{} {} {}:",
c.action.paint("::"),
c.bold.paint(tr!("Repository")),
color_repo(c.enabled, "AUR")
);
print!(" ");
for (n, pkg) in pkgs.iter().enumerate() {
print!("{}) {} ", n + 1, pkg);
}
get_provider(pkgs.len(), no_confirm)
});
}
resolver
}
fn is_debug(pkg: alpm::Package) -> bool {
if let Some(base) = pkg.base() {
if pkg.name().ends_with("-debug") && pkg.name().trim_end_matches("-debug") == base {
return true;
}
}
false
}
fn print_warnings(config: &Config, cache: &Cache, actions: Option<&Actions>) {
let mut warnings = crate::download::Warnings::default();
if config.mode == Mode::Repo {
return;
}
if config.args.has_arg("u", "sysupgrade") {
let (_, pkgs) = repo_aur_pkgs(config);
warnings.missing = pkgs
.iter()
.filter(|pkg| !cache.contains(pkg.name()))
.filter(|pkg| !is_debug(**pkg))
.map(|pkg| pkg.name())
.filter(|pkg| !config.no_warn.is_match(pkg))
.collect::<Vec<_>>();
warnings.ood = pkgs
.iter()
.filter(|pkg| !is_debug(**pkg))
.filter_map(|pkg| cache.get(pkg.name()))
.filter(|pkg| pkg.out_of_date.is_some())
.map(|pkg| pkg.name.as_str())
.filter(|pkg| !config.no_warn.is_match(pkg))
.collect::<Vec<_>>();
warnings.orphans = pkgs
.iter()
.filter(|pkg| !is_debug(**pkg))
.filter_map(|pkg| cache.get(pkg.name()))
.filter(|pkg| pkg.maintainer.is_none())
.map(|pkg| pkg.name.as_str())
.filter(|pkg| !config.no_warn.is_match(pkg))
.collect::<Vec<_>>();
}
if let Some(actions) = actions {
warnings.ood.extend(
actions
.iter_build_pkgs()
.map(|pkg| &pkg.pkg)
.filter(|pkg| pkg.out_of_date.is_some())
.filter(|pkg| !config.no_warn.is_match(&pkg.name))
.map(|pkg| pkg.name.as_str()),
);
warnings.orphans.extend(
actions
.iter_build_pkgs()
.map(|pkg| &pkg.pkg)
.filter(|pkg| pkg.maintainer.is_none())
.filter(|pkg| !config.no_warn.is_match(&pkg.name))
.map(|pkg| pkg.name.as_str()),
);
}
warnings.missing.sort_unstable();
warnings.ood.sort_unstable();
warnings.ood.dedup();
warnings.orphans.sort_unstable();
warnings.orphans.dedup();
warnings.all(config.color, config.cols);
}
fn needs_build(
config: &Config,
base: &Base,
pkgdest: &HashMap<String, String>,
version: &str,
) -> bool {
if (config.rebuild == YesNoAll::Yes && base.pkgs.iter().any(|p| p.target))
|| config.rebuild == YesNoAll::All
{
return true;
}
if config.args.has_arg("needed", "needed") {
let mut all_installed = true;
let c = config.color;
if config.repos != LocalRepos::None {
let (_, repos) = repo::repo_aur_dbs(config);
for pkg in &base.pkgs {
match repos.pkg(pkg.pkg.name.as_str()) {
Ok(pkg) if pkg.version() == version => continue,
_ => (),
}
all_installed = false;
break;
}
} else {
for pkg in &base.pkgs {
if let Ok(pkg) = config.alpm.localdb().pkg(&*pkg.pkg.name) {
if pkg.version() == version {
continue;
}
}
all_installed = false;
break;
}
}
if all_installed {
println!(
"{} {}",
c.warning.paint("::"),
tr!(
"{}-{} is up to date -- skipping",
base.package_base(),
base.pkgs[0].pkg.version
)
);
return false;
}
}
!base
.pkgs
.iter()
.all(|p| Path::new(pkgdest.get(&p.pkg.name).unwrap()).exists())
}
fn needs_install(config: &Config, base: &Base, version: &str, pkg: &AurPackage) -> bool {
if config.args.has_arg("needed", "needed") {
if let Ok(pkg) = config.alpm.localdb().pkg(&*pkg.pkg.name) {
if pkg.version().as_str() == version {
let c = config.color;
println!(
"{} {}",
c.warning.paint("::"),
tr!(
"{}-{} is up to date -- skipping install",
base.package_base(),
base.pkgs[0].pkg.version
)
);
return false;
}
}
}
true
}
fn update_aur_list(config: &Config) {
let url = config.aur_url.clone();
let dir = config.cache_dir.clone();
let interval = config.completion_interval;
tokio::spawn(async move {
let _ = update_aur_cache(&url, &dir, Some(interval)).await;
});
}