use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::fs;
use std::path::Path;
use xshell::Shell;
use crate::environment::{
get_workspace_packages, get_workspace_root, CmdExt, Package, PackageManifest, ProgressGuard,
};
use crate::lock::LockFile;
use crate::toolchain::{prepare_toolchain, Toolchain};
#[derive(Debug, serde::Deserialize, Default)]
#[serde(default)]
struct LintConfig {
allowed_duplicates: Vec<String>,
}
impl LintConfig {
fn load(package_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
#[derive(serde::Deserialize, Default)]
struct RbmtTable {
#[serde(default)]
lint: LintConfig,
}
let path = package_dir.join("Cargo.toml");
if !path.exists() {
return Ok(Self::default());
}
let contents = std::fs::read_to_string(&path)?;
Ok(toml::from_str::<PackageManifest<RbmtTable>>(&contents)?.package.metadata.rbmt.lint)
}
}
pub fn run(
sh: &Shell,
lockfile: LockFile,
packages: &[String],
) -> Result<(), Box<dyn std::error::Error>> {
let packages = get_workspace_packages(sh, packages)?;
let _lockfile_guard = lockfile.activate(sh)?;
let _progress = ProgressGuard::new();
prepare_toolchain(sh, Toolchain::Nightly)?;
rbmt_eprintln!("Running lint task...");
lint_workspace(sh)?;
lint_packages(sh, &packages)?;
check_duplicate_deps(sh, &packages)?;
check_cross_package_duplicate_deps(sh)?;
check_clippy_toml_msrv(sh, &packages)?;
rbmt_eprintln!("Lint task completed successfully");
Ok(())
}
fn lint_workspace(sh: &Shell) -> Result<(), Box<dyn std::error::Error>> {
rbmt_eprintln!("Linting workspace...");
rbmt_cmd!(sh, "cargo --locked clippy --workspace --all-targets --all-features --keep-going")
.args(&["--", "-D", "warnings"])
.run_verbose()?;
rbmt_cmd!(sh, "cargo --locked clippy --workspace --all-targets --keep-going")
.args(&["--", "-D", "warnings"])
.run_verbose()?;
Ok(())
}
fn lint_packages(sh: &Shell, packages: &[Package]) -> Result<(), Box<dyn std::error::Error>> {
rbmt_eprintln!("Running package-specific lints...");
let package_names: Vec<_> = packages.iter().map(|p| p.name.as_str()).collect();
rbmt_eprintln!("Found crates: {}", package_names.join(", "));
for package in packages {
let _old_dir = sh.push_dir(&package.dir);
rbmt_cmd!(sh, "cargo --locked clippy --all-targets --no-default-features --keep-going")
.args(&["--", "-D", "warnings"])
.run_verbose()?;
}
Ok(())
}
fn check_duplicate_deps(
sh: &Shell,
packages: &[Package],
) -> Result<(), Box<dyn std::error::Error>> {
rbmt_eprintln!("Checking for duplicate dependencies...");
let mut found_duplicates = false;
for package in packages {
let config = LintConfig::load(&package.dir)?;
let _old_dir = sh.push_dir(&package.dir);
let output = rbmt_cmd!(
sh,
"cargo --locked tree --target=all --all-features --duplicates --edges no-dev --prefix depth"
)
.ignore_status()
.read()?;
let tree = DuplicateTree::parse(
&output,
&[package.name.as_str()].into(),
&config.allowed_duplicates,
);
if !tree.duplicates().is_empty() {
found_duplicates = true;
println!("{}", output);
println!("Error: Found duplicate dependencies in package '{}'!", package.name);
for (name, versions) in tree.duplicates() {
for version in versions.keys() {
eprintln!(" {} {}", name, version);
}
}
}
}
if found_duplicates {
return Err("Dependency tree contains duplicates".into());
}
rbmt_eprintln!("No duplicate dependencies found");
Ok(())
}
fn check_cross_package_duplicate_deps(sh: &Shell) -> Result<(), Box<dyn std::error::Error>> {
let package_info = get_workspace_packages(sh, &[])?;
if package_info.len() <= 1 {
return Ok(());
}
rbmt_eprintln!("Checking for cross-package duplicate dependencies...");
let package_names: HashSet<&str> = package_info.iter().map(|pkg| pkg.name.as_str()).collect();
let output = rbmt_cmd!(
sh,
"cargo --locked tree --target=all --all-features --duplicates --edges no-dev --prefix depth"
)
.ignore_status()
.read()?;
let tree = DuplicateTree::parse(&output, &package_names, &[]);
let cross_package_dupes = tree.cross_package_duplicates();
if !cross_package_dupes.is_empty() {
println!("Warning: found duplicate dependencies spanning multiple workspace members.");
println!(" These may cause duplicates in consumers that depend on multiple packages from this workspace.");
for (crate_name, versions) in &cross_package_dupes {
for (version, members) in *versions {
let members: Vec<&str> = members.iter().map(String::as_str).collect();
println!(" {} {}: {}", crate_name, version, members.join(", "));
}
}
println!("Consider aligning dependency versions across workspace members.");
}
rbmt_eprintln!("No cross-package duplicate dependencies found");
Ok(())
}
struct Dependency {
depth: u32,
name: String,
version: String,
}
impl Dependency {
fn parse(line: &str) -> Option<Self> {
let depth_digits = line.chars().take_while(char::is_ascii_digit).count();
let depth: u32 = line[..depth_digits].parse().ok()?;
let rest = &line[depth_digits..];
let mut tokens = rest.split_whitespace();
let name = tokens.next()?.to_string();
let version = tokens.next()?.to_string();
Some(Self { depth, name, version })
}
}
struct DuplicateTree {
inner: BTreeMap<String, BTreeMap<String, BTreeSet<String>>>,
}
impl DuplicateTree {
fn parse(output: &str, member_packages: &HashSet<&str>, allowed_duplicates: &[String]) -> Self {
let mut inner: BTreeMap<String, BTreeMap<String, BTreeSet<String>>> = BTreeMap::new();
let mut current_duplicate: Option<(String, String)> = None;
for line in output.lines() {
let Some(dep) = Dependency::parse(line) else { continue };
if dep.depth == 0 {
if allowed_duplicates.iter().any(|a| a == &dep.name) {
current_duplicate = None;
continue;
}
inner.entry(dep.name.clone()).or_default().entry(dep.version.clone()).or_default();
current_duplicate = Some((dep.name, dep.version));
} else if let Some((ref name, ref version)) = current_duplicate {
if member_packages.contains(dep.name.as_str()) {
if let Some(members) =
inner.get_mut(name).and_then(|versions| versions.get_mut(version))
{
members.insert(dep.name.clone());
}
}
}
}
Self { inner }
}
fn duplicates(&self) -> &BTreeMap<String, BTreeMap<String, BTreeSet<String>>> { &self.inner }
fn cross_package_duplicates(&self) -> BTreeMap<&str, &BTreeMap<String, BTreeSet<String>>> {
self.inner
.iter()
.filter(|(_, versions)| {
!versions
.values()
.flat_map(|members| members.iter())
.any(|m| versions.values().all(|s| s.contains(m)))
})
.map(|(crate_name, versions)| (crate_name.as_str(), versions))
.collect()
}
}
fn check_clippy_toml_msrv(
sh: &Shell,
packages: &[Package],
) -> Result<(), Box<dyn std::error::Error>> {
const CLIPPY_CONFIG_FILES: &[&str] = &["clippy.toml", ".clippy.toml"];
rbmt_eprintln!("Checking for deprecated clippy.toml MSRV settings...");
let mut clippy_files = Vec::new();
let workspace_root = get_workspace_root(sh)?;
for filename in CLIPPY_CONFIG_FILES {
let path = workspace_root.join(filename);
if path.exists() {
clippy_files.push(path);
}
}
for package in packages {
for filename in CLIPPY_CONFIG_FILES {
let path = package.dir.join(filename);
if path.exists() {
clippy_files.push(path);
}
}
}
let mut problematic_files = Vec::new();
for path in clippy_files {
let contents = fs::read_to_string(&path)?;
let config: toml::Value = toml::from_str(&contents)?;
if config.get("msrv").is_some() {
problematic_files.push(path.display().to_string());
}
}
if !problematic_files.is_empty() {
println!(
"\nError: Found MSRV in clippy.toml, use Cargo.toml package.rust-version instead:"
);
for file in &problematic_files {
println!(" {}", file);
}
return Err("MSRV should be specified in Cargo.toml, not clippy.toml".into());
}
rbmt_eprintln!("No deprecated clippy.toml MSRV settings found");
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn cross_package_duplicate() {
let output = "\
0bitcoin_hashes v0.13.0
1pkg1 v0.1.0
0bitcoin_hashes v0.14.1
1pkg2 v0.1.0
0hex-conservative v0.1.2
1bitcoin_hashes v0.13.0 (*)
2pkg1 v0.1.0
0hex-conservative v0.2.2
1bitcoin_hashes v0.14.1 (*)
2pkg2 v0.1.0
";
let tree = DuplicateTree::parse(output, &["pkg1", "pkg2", "pkg3"].into(), &[]);
let dupes = tree.cross_package_duplicates();
assert!(dupes.contains_key("bitcoin_hashes"));
assert!(dupes.contains_key("hex-conservative"));
assert!(dupes["bitcoin_hashes"].contains_key("v0.13.0"));
assert!(dupes["bitcoin_hashes"].contains_key("v0.14.1"));
assert!(dupes["hex-conservative"].contains_key("v0.1.2"));
assert!(dupes["hex-conservative"].contains_key("v0.2.2"));
}
#[test]
fn cross_package_transitive_duplicates() {
let output = "\
0hex-conservative v0.1.2
1some-lib v1.0.0
2pkg1 v0.1.0
0hex-conservative v0.2.2
1some-lib v2.0.0
2pkg2 v0.1.0
";
let tree = DuplicateTree::parse(output, &["pkg1", "pkg2", "pkg3"].into(), &[]);
let dupes = tree.cross_package_duplicates();
assert!(dupes.contains_key("hex-conservative"));
assert!(dupes["hex-conservative"].contains_key("v0.1.2"));
assert!(dupes["hex-conservative"].contains_key("v0.2.2"));
}
#[test]
fn cross_package_single_package_not_reported() {
let output = "\
0foo v0.1.0
1pkg1 v0.1.0
0foo v0.2.0
1pkg1 v0.1.0
";
let tree = DuplicateTree::parse(output, &["pkg1", "pkg2", "pkg3"].into(), &[]);
assert!(tree.cross_package_duplicates().is_empty());
}
#[test]
fn cross_package_dedupe_output() {
let output = "\
0bitcoin_hashes v0.13.0
1pkg1 v0.1.0
0bitcoin_hashes v0.14.1
1pkg2 v0.1.0
0bitcoin_hashes v0.13.0
1pkg1 v0.1.0
0bitcoin_hashes v0.14.1
1pkg2 v0.1.0
";
let tree = DuplicateTree::parse(output, &["pkg1", "pkg2", "pkg3"].into(), &[]);
let dupes = tree.cross_package_duplicates();
assert_eq!(dupes.len(), 1);
assert_eq!(dupes["bitcoin_hashes"]["v0.13.0"], BTreeSet::from(["pkg1".to_string()]));
assert_eq!(dupes["bitcoin_hashes"]["v0.14.1"], BTreeSet::from(["pkg2".to_string()]));
}
#[test]
fn cross_package_shared_packages_across_all_dupes() {
let output = "\
0foo v0.1.0
1pkg1 v0.1.0
1pkg2 v0.1.0
0foo v0.2.0
1pkg1 v0.1.0
1pkg2 v0.1.0
";
let tree = DuplicateTree::parse(output, &["pkg1", "pkg2", "pkg3"].into(), &[]);
assert!(tree.cross_package_duplicates().is_empty());
}
#[test]
fn cross_package_empty_output_no_dupes() {
let tree = DuplicateTree::parse("", &["pkg1", "pkg2", "pkg3"].into(), &[]);
assert!(tree.cross_package_duplicates().is_empty());
}
#[test]
fn allowed_duplicates_not_reported() {
let output = "\
0bitcoin_hashes v0.13.0
1pkg1 v0.1.0
0bitcoin_hashes v0.14.1
1pkg2 v0.1.0
0hex-conservative v0.1.2
1pkg1 v0.1.0
0hex-conservative v0.2.2
1pkg2 v0.1.0
";
let allowed = vec!["bitcoin_hashes".to_string()];
let tree = DuplicateTree::parse(output, &["pkg1", "pkg2", "pkg3"].into(), &allowed);
let dupes = tree.duplicates();
assert!(!dupes.contains_key("bitcoin_hashes"), "allowed duplicate should be filtered");
assert!(dupes.contains_key("hex-conservative"), "non-allowed duplicate should be reported");
}
}