use std::collections::HashSet;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use clap::{Args, Subcommand};
use futures::TryFutureExt;
use glob::Pattern;
use tempfile::TempDir;
use tokio::runtime::Runtime;
use crate::config;
use crate::config::PrefixPaths;
use crate::diagnostic::Warnings;
use crate::error::*;
use crate::git::Git;
use crate::progress::{GitProgressOps, ProgressHandler};
use crate::sess::{DependencySource, Session};
use crate::{fmt_path, fmt_pkg, stageln};
#[derive(Clone)]
pub struct PatchLink {
pub patch_dir: Option<PathBuf>,
pub from_prefix: PathBuf,
pub to_prefix: PathBuf,
pub exclude: Vec<PathBuf>,
}
#[derive(Args, Debug)]
#[command(
about = "Copy source code from upstream external repositories into this repository",
long_about = "Copy source code from upstream external repositories into this repository. Functions similar to the lowrisc vendor.py script.",
after_help = "Type 'bender vendor <SUBCOMMAND> --help' for more information about a vendor subcommand.",
subcommand_required = true,
arg_required_else_help = true
)]
pub struct VendorArgs {
#[command(subcommand)]
pub vendor_subcommand: VendorSubcommand,
}
#[derive(Subcommand, Debug)]
pub enum VendorSubcommand {
Diff {
#[arg(long, num_args(0..=1), alias="err_on_diff")]
err_on_diff: Option<String>,
},
#[command(
long_about = "(Re-)initialize the external dependencies. Copies the upstream files into the target directories and applies existing patches."
)]
Init {
#[arg(short, long, alias = "no_patch")]
no_patch: bool,
},
Patch {
#[arg(
long,
help = "Generate a plain diff instead of a format-patch.",
long_help = "Generate a plain diff instead of a format-patch. Includes all local changes (not only the staged ones)."
)]
plain: bool,
#[arg(short, long)]
message: Option<String>,
},
}
pub fn run(sess: &Session, args: &VendorArgs) -> Result<()> {
let rt = Runtime::new()?;
for vendor_package in &sess.manifest.vendor_package {
let dep_src = DependencySource::from(&vendor_package.upstream);
let tmp_dir = TempDir::new()?;
let tmp_path = tmp_dir.path();
let dep_path = match dep_src {
DependencySource::Path(path) => path,
DependencySource::Git(ref url) => {
let git = Git::new(tmp_path, &sess.config.git, sess.git_throttle.clone());
rt.block_on(async {
let pb = ProgressHandler::new(
sess.multiprogress.clone(),
GitProgressOps::Clone,
vendor_package.name.as_str(),
);
git.clone().spawn_with(|c| c.arg("clone").arg(url).arg("."), Some(pb))
.map_err(move |cause| {
Warnings::GitInitFailed {
is_ssh: url.contains("git@"),
}.emit();
Error::chain(
format!("Failed to initialize git database in {:?}.", tmp_path),
cause,
)
}).await?;
let rev_hash = match vendor_package.upstream {
config::Dependency::GitRevision { ref rev, .. } => Ok(rev),
_ => Err(Error::new("Please ensure your vendor reference is a commit hash to avoid upstream changes impacting your checkout")),
}?;
let pb = ProgressHandler::new(
sess.multiprogress.clone(),
GitProgressOps::Checkout,
vendor_package.name.as_str(),
);
git.clone().spawn_with(|c| c.arg("checkout").arg(rev_hash), Some(pb)).await?;
if *rev_hash != git.spawn_with(|c| c.arg("rev-parse").arg("--verify").arg(format!("{}^{{commit}}", rev_hash)), None).await?.trim_end_matches('\n') {
Err(Error::new("Please ensure your vendor reference is a commit hash to avoid upstream changes impacting your checkout"))
} else {
Ok(())
}
})?;
tmp_path.to_path_buf()
}
DependencySource::Registry => unimplemented!(),
};
let mut patch_links: Vec<PatchLink> = Vec::new();
for link in vendor_package.mapping.clone() {
patch_links.push(PatchLink {
patch_dir: link.patch_dir,
from_prefix: link.from,
to_prefix: link.to,
exclude: vec![],
})
}
let patch_links = {
match patch_links[..] {
[] => vec![PatchLink {
patch_dir: vendor_package.patch_dir.clone(),
from_prefix: PathBuf::from(""),
to_prefix: PathBuf::from(""),
exclude: vec![],
}],
_ => patch_links,
}
};
let mut sorted_links: Vec<_> = patch_links.clone();
sorted_links.sort_by(|a, b| {
let a_is_file = a.to_prefix.is_file();
let b_is_file = b.to_prefix.is_file();
if a_is_file != b_is_file {
return b_is_file.cmp(&a_is_file);
}
let a_depth = a.to_prefix.iter().count();
let b_depth = b.to_prefix.iter().count();
b_depth.cmp(&a_depth)
});
let mut seen_paths: HashSet<PathBuf> = HashSet::new();
for patch_link in sorted_links.iter_mut() {
patch_link.exclude = seen_paths
.iter()
.filter(|path| path.starts_with(&patch_link.to_prefix)) .cloned()
.collect();
seen_paths.insert(patch_link.to_prefix.clone());
}
let git = Git::new(tmp_path, &sess.config.git, sess.git_throttle.clone());
match &args.vendor_subcommand {
VendorSubcommand::Diff { err_on_diff } => {
sorted_links
.clone()
.into_iter()
.try_for_each(|patch_link| {
apply_patches(&rt, git.clone(), vendor_package.name.clone(), patch_link)
.map(|_| ())
})?;
rt.block_on(git.clone().add_all())?;
sorted_links.into_iter().try_for_each(|patch_link| {
let get_diff = diff(&rt, git.clone(), vendor_package, patch_link, dep_path.clone())
.map_err(|cause| Error::chain("Failed to get diff.", cause))?;
if !get_diff.is_empty() {
let _ = write!(std::io::stdout(), "{}", get_diff);
if err_on_diff.is_some() {
let err_msg : Option<&String> = err_on_diff.as_ref();
let err_msg = match err_msg {
Some(err_msg) => err_msg.to_string(),
_ => "Found differences, please patch (e.g. using bender vendor patch).".to_string()
};
return Err(Error::new(err_msg))
}
}
Ok(())
})
}
VendorSubcommand::Init { no_patch } => {
sorted_links.into_iter().rev().try_for_each(|patch_link| {
let target_path = patch_link
.clone()
.to_prefix
.prefix_paths(&vendor_package.target_dir)?;
if target_path.exists() {
if target_path.is_dir() {
std::fs::remove_dir_all(target_path.clone())
} else {
std::fs::remove_file(target_path.clone())
}
.map_err(|cause| {
Error::chain(format!("Failed to remove {:?}.", target_path), cause)
})?;
}
let result = init(
&rt,
git.clone(),
vendor_package,
patch_link,
dep_path.clone(),
*no_patch,
);
stageln!(
"Copied",
"{} files from upstream",
fmt_pkg!(vendor_package.name)
);
result
})
}
VendorSubcommand::Patch { plain, message } => {
let mut num_patches = 0;
sorted_links
.clone()
.into_iter()
.try_for_each(|patch_link| {
apply_patches(&rt, git.clone(), vendor_package.name.clone(), patch_link)
.map(|num| num_patches += num)
})
.map_err(|cause| Error::chain("Failed to apply patch.", cause))?;
if num_patches > 0 {
rt.block_on(git.clone().add_all())?;
rt.block_on(git.clone().commit(Some(&"pre-patch".to_string())))?;
}
sorted_links.into_iter().try_for_each(|patch_link| {
match patch_link.patch_dir.clone() {
Some(patch_dir) => {
if *plain {
let get_diff = diff(
&rt,
git.clone(),
vendor_package,
patch_link,
dep_path.clone(),
)
.map_err(|cause| Error::chain("Failed to get diff.", cause))?;
gen_plain_patch(get_diff, patch_dir, false)
} else {
gen_format_patch(
&rt,
sess,
git.clone(),
patch_link,
vendor_package.target_dir.clone(),
message.as_ref(),
)
}
}
None => {
Warnings::NoPatchDir {
vendor_pkg: vendor_package.name.clone(),
from_prefix: patch_link.from_prefix.clone(),
to_prefix: patch_link.to_prefix.clone(),
}
.emit();
Ok(())
}
}
})
}
}?;
}
Ok(())
}
pub fn init(
rt: &Runtime,
git: Git,
vendor_package: &config::VendorPackage,
patch_link: PatchLink,
dep_path: impl AsRef<Path>,
no_patch: bool,
) -> Result<()> {
let dep_path = dep_path.as_ref();
let link_to = patch_link
.to_prefix
.clone()
.prefix_paths(&vendor_package.target_dir)?;
let link_from = patch_link.from_prefix.clone().prefix_paths(dep_path)?;
std::fs::create_dir_all(link_to.parent().unwrap()).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", link_to.parent()),
cause,
)
})?;
if !no_patch {
apply_patches(
rt,
git.clone(),
vendor_package.name.clone(),
patch_link.clone(),
)?;
}
for path in vendor_package.include_from_upstream.clone() {
if !PathBuf::from(extend_paths(std::slice::from_ref(&path), dep_path, true)?[0].clone())
.exists()
{
Warnings::NotInUpstream { path }.emit();
}
}
match link_from.is_dir() {
true => copy_recursively(
&link_from,
&link_to,
&extend_paths(&vendor_package.include_from_upstream, dep_path, false)?,
&vendor_package
.exclude_from_upstream
.clone()
.into_iter()
.map(|excl| format!("{}/{}", &dep_path.to_str().unwrap(), &excl))
.collect(),
)?,
false => {
if link_from.exists() {
std::fs::copy(&link_from, &link_to).map_err(|cause| {
Error::chain(
format!(
"Failed to copy {} to {}.",
link_from.to_str().unwrap(),
link_to.to_str().unwrap(),
),
cause,
)
})?;
} else {
Warnings::NotInUpstream {
path: link_from.to_str().unwrap().to_string(),
}
.emit();
}
}
};
Ok(())
}
pub fn apply_patches(
rt: &Runtime,
git: Git,
package_name: String,
patch_link: PatchLink,
) -> Result<usize> {
let patch_dir = match &patch_link.patch_dir {
Some(patch_dir) => patch_dir,
None => return Ok(0),
};
std::fs::create_dir_all(patch_dir).map_err(|cause| {
Error::chain(format!("Failed to create directory {patch_dir:?}"), cause)
})?;
let mut patches = std::fs::read_dir(patch_dir)?
.map(move |f| f.unwrap().path())
.filter(|f| f.extension().is_some())
.filter(|f| f.extension().unwrap() == "patch")
.collect::<Vec<_>>();
patches.sort_by_key(|patch_path| patch_path.to_str().unwrap().to_lowercase());
rt.block_on(async {
for patch in &patches {
git.clone()
.spawn_with(
|c| {
let is_file = patch_link
.from_prefix
.clone()
.prefix_paths(git.path)
.unwrap()
.is_file();
let current_patch_target = if is_file {
patch_link.from_prefix.parent().unwrap().to_str().unwrap()
} else {
patch_link.from_prefix.as_path().to_str().unwrap()
};
c.arg("apply")
.arg("--directory")
.arg(current_patch_target)
.arg("-p1")
.arg(patch);
if is_file {
let file_path = patch_link.from_prefix.to_str().unwrap();
c.arg("--include").arg(file_path);
}
c
},
None,
)
.await
.map_err(|cause| {
Error::chain(format!("Failed to apply patch {patch:?}."), cause)
})?;
stageln!(
"Patched",
"{} with {}",
fmt_pkg!(package_name),
fmt_path!(patch.display())
);
}
Ok::<(), Error>(())
})?;
Ok(patches.len())
}
pub fn diff(
rt: &Runtime,
git: Git,
vendor_package: &config::VendorPackage,
patch_link: PatchLink,
dep_path: impl AsRef<Path>,
) -> Result<String> {
let link_from = patch_link
.from_prefix
.clone()
.prefix_paths(dep_path.as_ref())?; let link_to = patch_link
.to_prefix
.clone()
.prefix_paths(vendor_package.target_dir.as_ref())?;
if !&link_to.exists() {
return Err(Error::new(format!(
"Could not find {}. Did you run bender vendor init?",
link_to.to_str().unwrap()
)));
}
match &link_to.is_dir() {
true => copy_recursively(
&link_to,
&link_from,
&extend_paths(
&vendor_package.include_from_upstream,
&vendor_package.target_dir,
false,
)?,
&vendor_package
.exclude_from_upstream
.clone()
.into_iter()
.map(|excl| format!("{}/{}", &vendor_package.target_dir.to_str().unwrap(), &excl))
.collect(),
)?,
false => {
std::fs::copy(&link_to, &link_from).map_err(|cause| {
Error::chain(
format!(
"Failed to copy {} to {}.",
link_to.to_str().unwrap(),
link_from.to_str().unwrap(),
),
cause,
)
})?;
}
};
rt.block_on(async {
git.spawn_with(
|c| {
c.arg("diff").arg(format!(
"--relative={}",
patch_link
.from_prefix
.to_str()
.expect("Failed to convert from_prefix to string.")
))
},
None,
)
.await
})
}
pub fn gen_plain_patch(diff: String, patch_dir: impl AsRef<Path>, no_patch: bool) -> Result<()> {
if !diff.is_empty() {
std::fs::create_dir_all(patch_dir.as_ref()).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", patch_dir.as_ref()),
cause,
)
})?;
let mut patches = std::fs::read_dir(patch_dir.as_ref())?
.map(move |f| f.unwrap().path())
.filter(|f| f.extension().unwrap() == "patch")
.collect::<Vec<_>>();
patches.sort_by_key(|patch_path| patch_path.to_str().unwrap().to_lowercase());
let new_patch = if no_patch || patches.is_empty() {
for patch_file in patches {
std::fs::remove_file(patch_file)?;
}
"0001-bender-vendor.patch".to_string()
} else {
let leading_numbers = patches
.iter()
.map(|file_path| file_path.file_name().unwrap().to_str().unwrap())
.map(|s| &s[..4])
.collect::<Vec<_>>();
if !leading_numbers
.iter()
.all(|s| s.chars().all(char::is_numeric))
{
Err(Error::new(format!(
"Please ensure all patches start with four numbers for proper ordering in {}",
patch_dir.as_ref().to_str().unwrap()
)))?;
}
let max_number = leading_numbers
.iter()
.map(|s| s.parse::<i32>().unwrap())
.max()
.unwrap();
format!("{:04}-bender-vendor.patch", max_number + 1)
};
std::fs::write(patch_dir.as_ref().join(new_patch), diff)?;
}
Ok(())
}
pub fn gen_format_patch(
rt: &Runtime,
sess: &Session,
git: Git,
patch_link: PatchLink,
target_dir: impl AsRef<Path>,
message: Option<&String>,
) -> Result<()> {
let to_path = patch_link
.to_prefix
.clone()
.prefix_paths(target_dir.as_ref())?;
if !&to_path.exists() {
return Err(Error::new(format!(
"Could not find {}. Did you run bender vendor init?",
to_path.to_str().unwrap()
)));
}
let git_parent = Git::new(
if to_path.is_dir() {
&to_path
} else {
to_path.parent().unwrap()
},
&sess.config.git,
sess.git_throttle.clone(),
);
let from_path_relative = if to_path.is_dir() {
patch_link.from_prefix.clone()
} else {
patch_link.from_prefix.parent().unwrap().to_path_buf()
};
let patch_dir = patch_link.patch_dir.clone().unwrap();
let include_pathspec = if !to_path.is_dir() {
patch_link
.to_prefix
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string()
} else {
".".to_string()
};
let exclude_pathspecs: Vec<String> = patch_link
.exclude
.iter()
.map(|path| format!(":!{}", path.to_str().unwrap()))
.collect();
let mut diff_args = vec![
"diff".to_string(),
"--relative".to_string(),
"--cached".to_string(),
];
diff_args.push(include_pathspec);
for exclude_path in exclude_pathspecs {
diff_args.push(exclude_path);
}
let get_diff_cached = rt
.block_on(async { git_parent.spawn_with(|c| c.args(&diff_args), None).await })
.map_err(|cause| Error::chain("Failed to generate diff", cause))?;
if !get_diff_cached.is_empty() {
let tmp_format_dir = TempDir::new()?;
let tmp_format_path = tmp_format_dir.keep();
let diff_cached_path = tmp_format_path.join("staged.diff");
std::fs::write(diff_cached_path.clone(), get_diff_cached)?;
rt.block_on(async {
git.clone().spawn_with(|c| {
c.arg("apply")
.arg("--directory")
.arg(&from_path_relative)
.arg("-p1")
.arg(&diff_cached_path)
}, None)
.and_then(|_| git.clone().spawn_with(|c| c.arg("add").arg("--all"), None))
.await
}).map_err(|cause| Error::chain("Could not apply staged changes on top of patched upstream repository. Did you commit all previously patched modifications?", cause))?;
rt.block_on(git.clone().commit(message))?;
std::fs::create_dir_all(patch_dir.clone()).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", patch_dir.clone()),
cause,
)
})?;
let mut patches = std::fs::read_dir(patch_dir.clone())?
.map(move |f| f.unwrap().path())
.filter(|f| f.extension().is_some())
.filter(|f| f.extension().unwrap() == "patch")
.collect::<Vec<_>>();
patches.sort_by_key(|patch_path| patch_path.to_str().unwrap().to_lowercase());
let max_number = if patches.is_empty() {
0
} else {
let leading_numbers = patches
.iter()
.map(|file_path| file_path.file_name().unwrap().to_str().unwrap())
.map(|s| &s[..4])
.collect::<Vec<_>>();
if !leading_numbers
.iter()
.all(|s| s.chars().all(char::is_numeric))
{
Err(Error::new(format!(
"Please ensure all patches start with four numbers for proper ordering in {}",
patch_dir.to_str().unwrap()
)))?;
}
leading_numbers
.iter()
.map(|s| s.parse::<i32>().unwrap())
.max()
.unwrap()
};
rt.block_on(async {
git.spawn_with(
|c| {
c.arg("format-patch")
.arg("-o")
.arg(patch_dir.to_str().unwrap())
.arg("-1")
.arg(format!("--start-number={}", max_number + 1))
.arg(format!(
"--relative={}",
from_path_relative.to_str().unwrap()
))
.arg("HEAD")
},
None,
)
.await
})?;
}
Ok(())
}
pub fn copy_recursively(
source: impl AsRef<Path> + std::fmt::Debug,
destination: impl AsRef<Path> + std::fmt::Debug,
includes: &Vec<String>,
ignore: &Vec<String>,
) -> Result<()> {
std::fs::create_dir_all(&destination).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", &destination),
cause,
)
})?;
for entry in std::fs::read_dir(source)? {
let entry = entry?;
if !includes.iter().any(|include| {
PathBuf::from(include).ancestors().any(|include_path| {
Pattern::new(include_path.to_str().unwrap())
.unwrap()
.matches_path(&entry.path())
})
}) || ignore.iter().any(|ignore_path| {
Pattern::new(ignore_path)
.unwrap()
.matches_path(&entry.path())
}) {
continue;
}
let filetype = entry.file_type()?;
let canonical_path_filetype =
std::fs::metadata(std::fs::canonicalize(entry.path()).map_err(|cause| {
Error::chain(
format!(
"Failed to canonicalize {:?}.",
entry.path().to_str().unwrap()
),
cause,
)
})?)?
.file_type();
if filetype.is_dir() {
copy_recursively(
entry.path(),
destination.as_ref().join(entry.file_name()),
includes,
ignore,
)?;
} else if filetype.is_symlink() && canonical_path_filetype.is_dir() {
let orig = std::fs::read_link(entry.path());
symlink_dir(orig.unwrap(), destination.as_ref().join(entry.file_name()))?;
} else {
std::fs::copy(entry.path(), destination.as_ref().join(entry.file_name())).map_err(
|cause| {
Error::chain(
format!(
"Failed to copy {} to {}.",
entry.path().to_str().unwrap(),
destination
.as_ref()
.join(entry.file_name())
.to_str()
.unwrap()
),
cause,
)
},
)?;
}
}
Ok(())
}
pub fn extend_paths(
include_from_upstream: &[String],
prefix: impl AsRef<Path>,
dir_only: bool,
) -> Result<Vec<String>> {
include_from_upstream
.iter()
.map(|pattern| {
let pattern_long = PathBuf::from(pattern).prefix_paths(prefix.as_ref())?;
if pattern_long.is_dir() && !dir_only {
Ok(String::from(pattern_long.join("**").to_str().unwrap()))
} else {
Ok(String::from(pattern_long.to_str().unwrap()))
}
})
.collect::<Result<_>>()
}
#[cfg(unix)]
fn symlink_dir(p: PathBuf, q: PathBuf) -> Result<()> {
Ok(std::os::unix::fs::symlink(p, q)?)
}
#[cfg(windows)]
fn symlink_dir(p: PathBuf, q: PathBuf) -> Result<()> {
Ok(std::os::windows::fs::symlink_dir(p, q)?)
}