use crate::config::PrefixPaths;
use crate::futures::TryFutureExt;
use clap::{Arg, ArgAction, ArgMatches, Command};
use futures::future::{self};
use tokio::runtime::Runtime;
use crate::config;
use crate::error::*;
use crate::git::Git;
use crate::sess::{DependencySource, Session};
use glob::Pattern;
use std::path::Path;
use std::path::PathBuf;
use tempdir::TempDir;
#[derive(Clone)]
pub struct PatchLink {
pub patch_dir: Option<PathBuf>,
pub from_prefix: PathBuf,
pub to_prefix: PathBuf,
}
pub fn new() -> Command {
Command::new("vendor")
.subcommand_required(true).arg_required_else_help(true)
.about("Copy source code from upstream external repositories into this repository. Functions similar to the lowrisc vendor.py script. Type bender vendor <SUBCOMMAND> --help for more information about the subcommands.")
.subcommand(Command::new("diff")
.about("Display a diff of the local tree and the upstream tree with patches applied.")
.arg(
Arg::new("err_on_diff")
.long("err_on_diff")
.short('e')
.num_args(0..=1)
.help("Return error code 1 when a diff is encountered. (Optional) override the error message by providing a value."),
)
)
.subcommand(Command::new("init")
.about("(Re-)initialize the external dependencies. Copies the upstream files into the target directories and applies existing patches.")
.arg(
Arg::new("no_patch")
.short('n')
.action(ArgAction::SetTrue)
.long("no_patch")
.help("Do not apply patches when initializing dependencies"),
)
)
.subcommand(Command::new("patch")
.about("Generate a patch file from staged local changes")
.arg(
Arg::new("plain")
.action(ArgAction::SetTrue)
.long("plain")
.help("Generate a plain diff instead of a format-patch. Includes all local changes (not only the staged ones)."),
)
.arg(
Arg::new("message")
.long("message")
.short('m')
.num_args(1)
.action(ArgAction::Append)
.help("The message to be associated with the format-patch."),
)
)
}
pub fn run(sess: &Session, matches: &ArgMatches) -> Result<()> {
let rt = Runtime::new()?;
for vendor_package in &sess.manifest.vendor_package {
let dep_src = DependencySource::from(&vendor_package.upstream);
let tmp_dir = TempDir::new(&vendor_package.name)?;
let tmp_path = tmp_dir.path();
let dep_path = match dep_src {
DependencySource::Path(path) => path,
DependencySource::Git(ref url) => {
let git = Git::new(tmp_path, &sess.config.git);
rt.block_on(async {
future::lazy(|_| {
stageln!("Cloning", "{} ({})", vendor_package.name, url);
Ok(())
})
.and_then(|_| git.spawn_with(|c| c.arg("clone").arg(url).arg(".")))
.map_err(move |cause| {
if url.contains("git@") {
warnln!("Please ensure your public ssh key is added to the git server.");
}
warnln!("Please ensure the url is correct and you have access to the repository.");
Error::chain(
format!("Failed to initialize git database in {:?}.", tmp_path),
cause,
)
})
.and_then(|_| git.spawn_with(|c| c.arg("checkout").arg(match vendor_package.upstream {
config::Dependency::GitRevision(_, ref rev) => rev,
_ => unimplemented!(),
})))
.and_then(|_| async {
let rev_hash = match vendor_package.upstream {
config::Dependency::GitRevision(_, ref rev) => rev,
_ => unimplemented!(),
};
if *rev_hash != git.spawn_with(|c| c.arg("rev-parse").arg("--verify").arg(format!("{}^{{commit}}", rev_hash))).await?.trim_end_matches('\n') {
Err(Error::new("Please ensure your vendor reference is a commit hash to avoid upstream changes impacting your checkout"))
} else {
Ok(())
}
})
.await
})?;
tmp_path.to_path_buf()
}
DependencySource::Registry => unimplemented!(),
};
let mut patch_links: Vec<PatchLink> = Vec::new();
for link in vendor_package.mapping.clone() {
patch_links.push(PatchLink {
patch_dir: link.patch_dir,
from_prefix: link.from,
to_prefix: link.to,
})
}
let patch_links = {
match patch_links[..] {
[] => vec![PatchLink {
patch_dir: vendor_package.patch_dir.clone(),
from_prefix: PathBuf::from(""),
to_prefix: PathBuf::from(""),
}],
_ => patch_links,
}
};
let git = Git::new(tmp_path, &sess.config.git);
match matches.subcommand() {
Some(("diff", matches)) => {
patch_links.clone().into_iter().try_for_each(|patch_link| {
apply_patches(&rt, git, vendor_package.name.clone(), patch_link).map(|_| ())
})?;
rt.block_on(git.add_all())?;
patch_links.into_iter().try_for_each(|patch_link| {
let get_diff = diff(&rt, git, vendor_package, patch_link, dep_path.clone())
.map_err(|cause| Error::chain("Failed to get diff.", cause))?;
if !get_diff.is_empty() {
print!("{}", get_diff);
if matches.contains_id("err_on_diff") {
let err_msg : Option<&String> = matches.get_one("err_on_diff");
let err_msg = match err_msg {
Some(err_msg) => err_msg.to_string(),
_ => "Found differences, please patch (e.g. using bender vendor patch).".to_string()
};
return Err(Error::new(err_msg))
}
}
Ok(())
})
}
Some(("init", matches)) => {
patch_links.clone().into_iter().try_for_each(|patch_link| {
stageln!("Copying", "{} files from upstream", vendor_package.name);
let target_path = patch_link
.clone()
.to_prefix
.prefix_paths(&vendor_package.target_dir);
if target_path.exists() {
if target_path.is_dir() {
std::fs::remove_dir_all(target_path.clone())
} else {
std::fs::remove_file(target_path.clone())
}
.map_err(|cause| {
Error::chain(format!("Failed to remove {:?}.", target_path), cause)
})?;
}
init(
&rt,
git,
vendor_package,
patch_link,
dep_path.clone(),
matches,
)
})
}
Some(("patch", matches)) => {
let mut num_patches = 0;
patch_links
.clone()
.into_iter()
.try_for_each(|patch_link| {
apply_patches(&rt, git, vendor_package.name.clone(), patch_link)
.map(|num| num_patches += num)
})
.map_err(|cause| Error::chain("Failed to apply patch.", cause))?;
if num_patches > 0 {
rt.block_on(git.add_all())?;
rt.block_on(git.commit(Some(&"pre-patch".to_string())))?;
}
patch_links.clone().into_iter().try_for_each( |patch_link| {
match patch_link.patch_dir.clone() {
Some(patch_dir) => {
if matches.get_flag("plain") {
let get_diff = diff(&rt,
git,
vendor_package,
patch_link,
dep_path.clone())
.map_err(|cause| Error::chain("Failed to get diff.", cause))?;
gen_plain_patch(get_diff, patch_dir, false)
} else {
gen_format_patch(&rt, sess, git, patch_link, vendor_package.target_dir.clone(), matches.get_one("message"))
}
},
None => {
warnln!("No patch directory specified for package {}, mapping {} => {}. Skipping patch generation.", vendor_package.name.clone(), patch_link.from_prefix.to_str().unwrap(), patch_link.to_prefix.to_str().unwrap());
Ok(())
},
}
})
}
_ => Ok(()),
}?;
}
Ok(())
}
pub fn init(
rt: &Runtime,
git: Git,
vendor_package: &config::VendorPackage,
patch_link: PatchLink,
dep_path: impl AsRef<Path>,
matches: &ArgMatches,
) -> Result<()> {
let dep_path = dep_path.as_ref();
let link_to = patch_link
.to_prefix
.clone()
.prefix_paths(&vendor_package.target_dir);
let link_from = patch_link.from_prefix.clone().prefix_paths(dep_path);
std::fs::create_dir_all(link_to.parent().unwrap()).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", link_to.parent()),
cause,
)
})?;
if !matches.get_flag("no_patch") {
apply_patches(rt, git, vendor_package.name.clone(), patch_link.clone())?;
}
match &patch_link.from_prefix.prefix_paths(dep_path).is_dir() {
true => copy_recursively(
&link_from,
&link_to,
&extend_paths(&vendor_package.include_from_upstream, dep_path),
&vendor_package
.exclude_from_upstream
.clone()
.into_iter()
.map(|excl| format!("{}/{}", &dep_path.to_str().unwrap(), &excl))
.collect(),
)?,
false => {
std::fs::copy(&link_from, &link_to).map_err(|cause| {
Error::chain(
format!(
"Failed to copy {} to {}.",
link_to.to_str().unwrap(),
link_from.to_str().unwrap()
),
cause,
)
})?;
}
};
Ok(())
}
pub fn apply_patches(
rt: &Runtime,
git: Git,
package_name: String,
patch_link: PatchLink,
) -> Result<usize> {
if let Some(patch_dir) = patch_link.patch_dir.clone() {
std::fs::create_dir_all(patch_dir.clone()).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", patch_dir.clone()),
cause,
)
})?;
let mut patches = std::fs::read_dir(patch_dir)?
.map(move |f| f.unwrap().path())
.filter(|f| f.extension().is_some())
.filter(|f| f.extension().unwrap() == "patch")
.collect::<Vec<_>>();
patches.sort_by_key(|patch_path| patch_path.to_str().unwrap().to_lowercase());
for patch in patches.clone() {
rt.block_on(async {
future::lazy(|_| {
stageln!(
"Patching",
"{} with {}",
package_name,
patch.file_name().unwrap().to_str().unwrap()
);
Ok(())
})
.and_then(|_| {
git.spawn_with(|c| {
let current_patch_target = if !patch_link
.from_prefix
.clone()
.prefix_paths(git.path)
.is_file()
{
patch_link.from_prefix.as_path()
} else {
patch_link.from_prefix.parent().unwrap()
}
.to_str()
.unwrap();
c.arg("apply")
.arg("--directory")
.arg(current_patch_target)
.arg("-p1")
.arg(&patch)
})
})
.await
.map_err(move |cause| {
Error::chain(format!("Failed to apply patch {:?}.", patch), cause)
})
.map(move |_| git)
})?;
}
Ok(patches.len())
} else {
Ok(0)
}
}
pub fn diff(
rt: &Runtime,
git: Git,
vendor_package: &config::VendorPackage,
patch_link: PatchLink,
dep_path: impl AsRef<Path>,
) -> Result<String> {
let link_from = patch_link
.from_prefix
.clone()
.prefix_paths(dep_path.as_ref()); let link_to = patch_link
.to_prefix
.clone()
.prefix_paths(vendor_package.target_dir.as_ref());
if !&link_to.exists() {
return Err(Error::new(format!(
"Could not find {}. Did you run bender vendor init?",
link_to.to_str().unwrap()
)));
}
match &link_to.is_dir() {
true => copy_recursively(
&link_to,
&link_from,
&extend_paths(
&vendor_package.include_from_upstream,
&vendor_package.target_dir,
),
&vendor_package
.exclude_from_upstream
.clone()
.into_iter()
.map(|excl| format!("{}/{}", &vendor_package.target_dir.to_str().unwrap(), &excl))
.collect(),
)?,
false => {
std::fs::copy(&link_to, &link_from).map_err(|cause| {
Error::chain(
format!(
"Failed to copy {} to {}.",
link_to.to_str().unwrap(),
link_from.to_str().unwrap(),
),
cause,
)
})?;
}
};
rt.block_on(async {
git.spawn_with(|c| {
c.arg("diff").arg(format!(
"--relative={}",
patch_link
.from_prefix
.to_str()
.expect("Failed to convert from_prefix to string.")
))
})
.await
})
}
pub fn gen_plain_patch(diff: String, patch_dir: impl AsRef<Path>, no_patch: bool) -> Result<()> {
if !diff.is_empty() {
std::fs::create_dir_all(patch_dir.as_ref()).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", patch_dir.as_ref()),
cause,
)
})?;
let mut patches = std::fs::read_dir(patch_dir.as_ref())?
.map(move |f| f.unwrap().path())
.filter(|f| f.extension().unwrap() == "patch")
.collect::<Vec<_>>();
patches.sort_by_key(|patch_path| patch_path.to_str().unwrap().to_lowercase());
let new_patch = if no_patch || patches.is_empty() {
for patch_file in patches {
std::fs::remove_file(patch_file)?;
}
"0001-bender-vendor.patch".to_string()
} else {
let leading_numbers = patches
.iter()
.map(|file_path| file_path.file_name().unwrap().to_str().unwrap())
.map(|s| &s[..4])
.collect::<Vec<_>>();
if !leading_numbers
.iter()
.all(|s| s.chars().all(char::is_numeric))
{
Err(Error::new(format!(
"Please ensure all patches start with four numbers for proper ordering in {}",
patch_dir.as_ref().to_str().unwrap()
)))?;
}
let max_number = leading_numbers
.iter()
.map(|s| s.parse::<i32>().unwrap())
.max()
.unwrap();
format!("{:04}-bender-vendor.patch", max_number + 1)
};
std::fs::write(patch_dir.as_ref().join(new_patch), diff)?;
}
Ok(())
}
pub fn gen_format_patch(
rt: &Runtime,
sess: &Session,
git: Git,
patch_link: PatchLink,
target_dir: impl AsRef<Path>,
message: Option<&String>,
) -> Result<()> {
let to_path = patch_link
.to_prefix
.clone()
.prefix_paths(target_dir.as_ref());
if !&to_path.exists() {
return Err(Error::new(format!(
"Could not find {}. Did you run bender vendor init?",
to_path.to_str().unwrap()
)));
}
let git_parent = Git::new(
if to_path.is_dir() {
&to_path
} else {
to_path.parent().unwrap()
},
&sess.config.git,
);
let from_path_relative = if to_path.is_dir() {
patch_link.from_prefix.clone()
} else {
patch_link.from_prefix.parent().unwrap().to_path_buf()
};
let patch_dir = patch_link.patch_dir.clone().unwrap();
let get_diff_cached = rt
.block_on(async {
git_parent
.spawn_with(|c| {
c.arg("diff")
.arg("--relative")
.arg("--cached")
.arg(if !to_path.is_dir() {
patch_link.to_prefix.file_name().unwrap().to_str().unwrap()
} else {
"."
})
})
.await
})
.map_err(|cause| Error::chain("Failed to generate diff", cause))?;
if !get_diff_cached.is_empty() {
let tmp_format_dir = TempDir::new(".bender.format.tmp")?;
let tmp_format_path = tmp_format_dir.path();
let diff_cached_path = tmp_format_path.join("staged.diff");
std::fs::write(diff_cached_path.clone(), get_diff_cached)?;
rt.block_on(async {
git.spawn_with(|c| {
c.arg("apply")
.arg("--directory")
.arg(&from_path_relative)
.arg("-p1")
.arg(&diff_cached_path)
})
.and_then(|_| git.spawn_with(|c| c.arg("add").arg("--all")))
.await
}).map_err(|cause| Error::chain("Could not apply staged changes on top of patched upstream repository. Did you commit all previously patched modifications?", cause))?;
rt.block_on(git.commit(message))?;
std::fs::create_dir_all(patch_dir.clone()).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", patch_dir.clone()),
cause,
)
})?;
let mut patches = std::fs::read_dir(patch_dir.clone())?
.map(move |f| f.unwrap().path())
.filter(|f| f.extension().is_some())
.filter(|f| f.extension().unwrap() == "patch")
.collect::<Vec<_>>();
patches.sort_by_key(|patch_path| patch_path.to_str().unwrap().to_lowercase());
let max_number = if patches.is_empty() {
0
} else {
let leading_numbers = patches
.iter()
.map(|file_path| file_path.file_name().unwrap().to_str().unwrap())
.map(|s| &s[..4])
.collect::<Vec<_>>();
if !leading_numbers
.iter()
.all(|s| s.chars().all(char::is_numeric))
{
Err(Error::new(format!(
"Please ensure all patches start with four numbers for proper ordering in {}",
patch_dir.to_str().unwrap()
)))?;
}
leading_numbers
.iter()
.map(|s| s.parse::<i32>().unwrap())
.max()
.unwrap()
};
rt.block_on(async {
git.spawn_with(|c| {
c.arg("format-patch")
.arg("-o")
.arg(patch_dir.to_str().unwrap())
.arg("-1")
.arg(format!("--start-number={}", max_number + 1))
.arg(format!(
"--relative={}",
from_path_relative.to_str().unwrap()
))
.arg("HEAD")
})
.await
})?;
}
Ok(())
}
pub fn copy_recursively(
source: impl AsRef<Path> + std::fmt::Debug,
destination: impl AsRef<Path> + std::fmt::Debug,
includes: &Vec<String>,
ignore: &Vec<String>,
) -> Result<()> {
std::fs::create_dir_all(&destination).map_err(|cause| {
Error::chain(
format!("Failed to create directory {:?}", &destination),
cause,
)
})?;
for entry in std::fs::read_dir(source)? {
let entry = entry?;
if !includes.iter().any(|include| {
PathBuf::from(include).ancestors().any(|include_path| {
Pattern::new(include_path.to_str().unwrap())
.unwrap()
.matches_path(&entry.path())
})
}) || ignore.iter().any(|ignore_path| {
Pattern::new(ignore_path)
.unwrap()
.matches_path(&entry.path())
}) {
continue;
}
let filetype = entry.file_type()?;
if filetype.is_dir() {
copy_recursively(
entry.path(),
destination.as_ref().join(entry.file_name()),
includes,
ignore,
)?;
} else if filetype.is_symlink() {
let orig = std::fs::read_link(entry.path());
std::os::unix::fs::symlink(
orig.unwrap(),
destination.as_ref().join(entry.file_name()),
)?;
} else {
std::fs::copy(entry.path(), destination.as_ref().join(entry.file_name())).map_err(
|cause| {
Error::chain(
format!(
"Failed to copy {} to {}.",
entry.path().to_str().unwrap(),
destination
.as_ref()
.join(entry.file_name())
.to_str()
.unwrap()
),
cause,
)
},
)?;
}
}
Ok(())
}
pub fn extend_paths(include_from_upstream: &[String], prefix: impl AsRef<Path>) -> Vec<String> {
include_from_upstream
.iter()
.map(|pattern| {
let pattern_long = PathBuf::from(pattern).prefix_paths(prefix.as_ref());
if pattern_long.is_dir() {
String::from(pattern_long.join("**").to_str().unwrap())
} else {
String::from(pattern_long.to_str().unwrap())
}
})
.collect()
}