use clap::Parser;
use crossbeam_channel::Sender;
use std::{
io::{stdin, stdout, Write},
path::{Path, PathBuf},
time::SystemTime,
};
#[derive(Debug, Parser)]
#[clap(author, version, about, bin_name = "cargo clean-all", long_about = None)]
struct AppArgs {
#[clap(default_value_t = String::from("."), value_name = "DIR")]
root_dir: String,
#[clap(short = 'y', long = "yes")]
yes: bool,
#[clap(
short = 's',
long = "keep-size",
value_name = "SIZE",
default_value_t = 0
)]
keep_size: u64,
#[clap(
short = 'd',
long = "keep-days",
value_name = "DAYS",
default_value_t = 0
)]
keep_last_modified: u32,
#[clap(long = "dry-run")]
dry_run: bool,
#[clap(
short = 't',
long = "threads",
value_name = "THREADS",
default_value_t = 0
)]
number_of_threads: usize,
}
fn main() {
let mut args = std::env::args();
if let Some("clean-all") = std::env::args().skip(1).next().as_deref() {
args.next();
}
let args = AppArgs::parse_from(args);
let scan_path = Path::new(&args.root_dir);
let project_dirs = find_cargo_projects(scan_path, args.number_of_threads);
let (mut projects, mut ignored): (Vec<_>, Vec<_>) = project_dirs
.into_iter()
.filter_map(|it| it.1.then(|| ProjectTargetAnalysis::analyze(&it.0)))
.partition(|tgt| {
let secs_elapsed = tgt
.last_modified
.elapsed()
.unwrap_or_default()
.as_secs_f32();
let days_elapsed = secs_elapsed / (60.0 * 60.0 * 24.0);
days_elapsed >= args.keep_last_modified as f32 && tgt.size > args.keep_size
});
projects.sort_by_key(|it| it.size);
ignored.sort_by_key(|it| it.size);
let total_size: u64 = projects.iter().map(|it| it.size).sum();
println!("Ignoring the following project directories:");
ignored
.iter()
.for_each(ProjectTargetAnalysis::print_listformat);
println!("\nSelected the following project directories for cleaning:");
projects
.iter()
.for_each(ProjectTargetAnalysis::print_listformat);
println!(
"\nSelected {}/{} projects, total freeable size: {}",
projects.len(),
projects.len() + ignored.len(),
bytefmt::format(total_size)
);
if args.dry_run {
println!("Dry run. Not doing any cleanup");
return;
}
if !args.yes {
let mut inp = String::new();
print!("Clean the project directories shown above? (yes/no): ");
stdout().flush().unwrap();
stdin().read_line(&mut inp).unwrap();
if inp.trim().to_lowercase() != "yes" {
println!("Cleanup cancelled");
return;
}
}
println!("Starting cleanup...");
projects.iter().for_each(|p| {
match remove_dir_all::remove_dir_all(&p.project_path.join("target")) {
Ok(_) => println!("- Successfully cleaned {}", p.project_path.display()),
Err(err) => {
println!("- Failed to clean {}", p.project_path.display());
println!(" Error: {}", err);
}
}
});
println!("\nDone!");
}
struct Job(PathBuf, Sender<Job>);
struct ProjectDir(PathBuf, bool);
fn find_cargo_projects(path: &Path, mut num_threads: usize) -> Vec<ProjectDir> {
if num_threads == 0 {
num_threads = num_cpus::get();
}
{
let (job_sender, job_receiver) = crossbeam_channel::unbounded::<Job>();
let (result_sender, result_receiver) = crossbeam_channel::unbounded::<ProjectDir>();
(0..num_threads)
.map(|_| (job_receiver.clone(), result_sender.clone()))
.for_each(|(jr, rs)| {
std::thread::spawn(move || {
jr.into_iter()
.for_each(|job| find_cargo_projects_task(&job.0, job.1, rs.clone()))
});
});
job_sender
.clone()
.send(Job(path.to_path_buf(), job_sender))
.unwrap();
result_receiver
}
.into_iter()
.collect()
}
fn find_cargo_projects_task(path: &Path, job_sender: Sender<Job>, results: Sender<ProjectDir>) {
let mut has_target = false;
let read_dir = match path.read_dir() {
Ok(it) => it,
Err(e) => {
eprintln!("Error reading directory: '{}' {}", path.display(), e);
return;
}
};
let (dirs, files): (Vec<_>, Vec<_>) = read_dir
.filter_map(|it| it.ok().map(|it| it.path()))
.partition(|it| it.is_dir());
let has_cargo_toml = files
.iter()
.find(|it| it.file_name().unwrap_or_default().to_string_lossy() == "Cargo.toml")
.is_some();
for it in dirs {
let filename = it.file_name().unwrap_or_default().to_string_lossy();
match filename.as_ref() {
".git" => (),
"target" if has_cargo_toml => has_target = true,
_ => job_sender
.send(Job(it.to_path_buf(), job_sender.clone()))
.unwrap(),
}
}
if has_cargo_toml {
results
.send(ProjectDir(path.to_path_buf(), has_target))
.unwrap();
}
}
struct ProjectTargetAnalysis {
project_path: PathBuf,
size: u64,
last_modified: SystemTime,
}
impl ProjectTargetAnalysis {
pub fn analyze(path: &Path) -> Self {
let (size, last_modified) = Self::recursive_scan_target(&path.join("target"));
Self {
project_path: path.to_owned(),
size,
last_modified,
}
}
fn recursive_scan_target<T: AsRef<Path>>(path: T) -> (u64, SystemTime) {
let path = path.as_ref();
let default = (0, SystemTime::UNIX_EPOCH);
if !path.exists() {
return default;
}
match (path.is_file(), path.metadata()) {
(true, Ok(md)) => return (md.len(), md.modified().unwrap_or(default.1)),
_ => path
.read_dir()
.unwrap()
.filter_map(|it| it.ok().map(|it| it.path()))
.map(Self::recursive_scan_target)
.fold(default, |a, b| (a.0 + b.0, a.1.max(b.1))),
}
}
fn print_listformat(&self) {
let path = std::fs::canonicalize(&self.project_path).unwrap();
let project_name = path.file_name().unwrap_or_default().to_string_lossy();
let last_modified: chrono::DateTime<chrono::Local> = self.last_modified.into();
println!(
" {} : {}\n {}, {}",
project_name,
path.display(),
last_modified.format("%Y-%m-%d %H:%M"),
bytefmt::format(self.size)
)
}
}