use clap::Args;
use rc_core::{AliasManager, ListOptions, ObjectStore as _, RemotePath};
use rc_s3::S3Client;
use serde::Serialize;
use std::io::Write as _;
use std::process::{Command, Output};
use crate::exit_code::ExitCode;
use crate::output::{Formatter, OutputConfig};
#[derive(Args, Debug)]
pub struct FindArgs {
pub path: String,
#[arg(long)]
pub name: Option<String>,
#[arg(long)]
pub larger: Option<String>,
#[arg(long)]
pub smaller: Option<String>,
#[arg(long)]
pub newer: Option<String>,
#[arg(long)]
pub older: Option<String>,
#[arg(long, default_value = "0")]
pub maxdepth: usize,
#[arg(long)]
pub count: bool,
#[arg(long)]
pub exec: Option<String>,
#[arg(long)]
pub print: bool,
}
#[derive(Debug, Serialize)]
struct FindOutput {
matches: Vec<MatchInfo>,
total_count: usize,
total_size_bytes: i64,
total_size_human: String,
}
#[derive(Debug, Serialize)]
struct MatchInfo {
key: String,
#[serde(skip_serializing_if = "Option::is_none")]
size_bytes: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
size_human: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
last_modified: Option<String>,
}
pub async fn execute(args: FindArgs, output_config: OutputConfig) -> ExitCode {
let formatter = Formatter::new(output_config);
let (alias_name, bucket, prefix) = match parse_find_path(&args.path) {
Ok(p) => p,
Err(e) => {
formatter.error(&e);
return ExitCode::UsageError;
}
};
let alias_manager = match AliasManager::new() {
Ok(am) => am,
Err(e) => {
formatter.error(&format!("Failed to load aliases: {e}"));
return ExitCode::GeneralError;
}
};
let alias = match alias_manager.get(&alias_name) {
Ok(a) => a,
Err(_) => {
formatter.error(&format!("Alias '{alias_name}' not found"));
return ExitCode::NotFound;
}
};
let client = match S3Client::new(alias).await {
Ok(c) => c,
Err(e) => {
formatter.error(&format!("Failed to create S3 client: {e}"));
return ExitCode::NetworkError;
}
};
let filters = match build_filters(&args) {
Ok(f) => f,
Err(e) => {
formatter.error(&e);
return ExitCode::UsageError;
}
};
let remote_path = RemotePath::new(&alias_name, &bucket, prefix.as_deref().unwrap_or(""));
let matches = match find_objects(&client, &remote_path, &filters, args.maxdepth).await {
Ok(m) => m,
Err(e) => {
formatter.error(&format!("Search failed: {e}"));
return ExitCode::NetworkError;
}
};
if let Some(exec_template) = args.exec.as_deref() {
if formatter.is_json() {
formatter.error("--exec cannot be used with --json output");
return ExitCode::UsageError;
}
let exec_argv_template = match parse_exec_template(exec_template) {
Ok(template) => template,
Err(e) => {
formatter.error(&e);
return ExitCode::UsageError;
}
};
for m in &matches {
let object_path = full_object_path(&alias_name, &bucket, &m.key);
let (program, exec_args, command_text) =
render_exec_command(&exec_argv_template, &object_path);
let output = match run_exec_command(&program, &exec_args) {
Ok(output) => output,
Err(e) => {
formatter.error(&format!(
"Failed to run command for {}: {} ({})",
object_path, command_text, e
));
return ExitCode::GeneralError;
}
};
if std::io::stdout().write_all(&output.stdout).is_err() {
formatter.error("Failed to write command stdout");
return ExitCode::GeneralError;
}
if std::io::stderr().write_all(&output.stderr).is_err() {
formatter.error("Failed to write command stderr");
return ExitCode::GeneralError;
}
if !output.status.success() {
formatter.error(&format!(
"Command failed for {} (status {}): {}",
object_path, output.status, command_text
));
return ExitCode::GeneralError;
}
}
}
let mut display_matches = matches;
if args.print {
for m in &mut display_matches {
m.key = full_object_path(&alias_name, &bucket, &m.key);
}
}
let total_count = display_matches.len();
let total_size: i64 = display_matches.iter().filter_map(|m| m.size_bytes).sum();
if args.count {
if formatter.is_json() {
let output = serde_json::json!({
"count": total_count,
"total_size_bytes": total_size,
"total_size_human": humansize::format_size(total_size as u64, humansize::BINARY)
});
formatter.json(&output);
} else {
let total_size_human = humansize::format_size(total_size as u64, humansize::BINARY);
formatter.println(&format!(
"Found {} object(s), total size: {}",
formatter.style_size(&total_count.to_string()),
formatter.style_size(&total_size_human)
));
}
} else if formatter.is_json() {
let output = FindOutput {
matches: display_matches,
total_count,
total_size_bytes: total_size,
total_size_human: humansize::format_size(total_size as u64, humansize::BINARY),
};
formatter.json(&output);
} else if display_matches.is_empty() {
formatter.println("No matches found.");
} else {
for m in &display_matches {
let size = m.size_human.as_deref().unwrap_or("0B");
let styled_size = formatter.style_size(&format!("{:>10}", size));
let styled_key = formatter.style_file(&m.key);
formatter.println(&format!("{styled_size} {styled_key}"));
}
let total_size_human = humansize::format_size(total_size as u64, humansize::BINARY);
formatter.println(&format!(
"\nTotal: {} object(s), {}",
formatter.style_size(&total_count.to_string()),
formatter.style_size(&total_size_human)
));
}
ExitCode::Success
}
fn full_object_path(alias: &str, bucket: &str, key: &str) -> String {
RemotePath::new(alias, bucket, key).to_full_path()
}
fn parse_exec_template(exec_template: &str) -> Result<Vec<String>, String> {
let args = shlex::split(exec_template)
.ok_or_else(|| "Invalid --exec template: unbalanced quotes".to_string())?;
if args.is_empty() {
return Err("Invalid --exec template: command cannot be empty".to_string());
}
Ok(args)
}
fn render_exec_command(
argv_template: &[String],
object_path: &str,
) -> (String, Vec<String>, String) {
let rendered: Vec<String> = argv_template
.iter()
.map(|arg| arg.replace("{}", object_path))
.collect();
let program = rendered[0].clone();
let args = rendered[1..].to_vec();
let command_text = rendered.join(" ");
(program, args, command_text)
}
fn run_exec_command(program: &str, args: &[String]) -> std::io::Result<Output> {
Command::new(program).args(args).output()
}
struct FindFilters {
name_pattern: Option<glob::Pattern>,
min_size: Option<i64>,
max_size: Option<i64>,
newer_than: Option<jiff::Timestamp>,
older_than: Option<jiff::Timestamp>,
}
fn build_filters(args: &FindArgs) -> Result<FindFilters, String> {
let name_pattern = if let Some(ref pattern) = args.name {
Some(glob::Pattern::new(pattern).map_err(|e| format!("Invalid name pattern: {e}"))?)
} else {
None
};
let min_size = args.larger.as_ref().map(|s| parse_size(s)).transpose()?;
let max_size = args.smaller.as_ref().map(|s| parse_size(s)).transpose()?;
let now = jiff::Timestamp::now();
let newer_than = args
.newer
.as_ref()
.map(|d| parse_duration_ago(d, now))
.transpose()?;
let older_than = args
.older
.as_ref()
.map(|d| parse_duration_ago(d, now))
.transpose()?;
Ok(FindFilters {
name_pattern,
min_size,
max_size,
newer_than,
older_than,
})
}
fn parse_size(s: &str) -> Result<i64, String> {
let s = s.trim();
if s.is_empty() {
return Err("Size cannot be empty".to_string());
}
let suffix_start = s.find(|c: char| c.is_ascii_alphabetic()).unwrap_or(s.len());
let num_str = &s[..suffix_start];
let suffix = &s[suffix_start..];
let num: i64 = num_str
.parse()
.map_err(|_| format!("Invalid size number: {num_str}"))?;
let multiplier = match suffix.to_uppercase().as_str() {
"" | "B" => 1,
"K" | "KB" => 1024,
"M" | "MB" => 1024 * 1024,
"G" | "GB" => 1024 * 1024 * 1024,
"T" | "TB" => 1024 * 1024 * 1024 * 1024,
_ => return Err(format!("Unknown size suffix: {suffix}")),
};
Ok(num * multiplier)
}
fn parse_duration_ago(s: &str, now: jiff::Timestamp) -> Result<jiff::Timestamp, String> {
let s = s.trim();
if s.is_empty() {
return Err("Duration cannot be empty".to_string());
}
let (num_str, suffix) = if s.ends_with(|c: char| c.is_ascii_alphabetic()) {
let idx = s.len() - 1;
(&s[..idx], &s[idx..])
} else {
(s, "s") };
let num: i64 = num_str
.parse()
.map_err(|_| format!("Invalid duration number: {num_str}"))?;
let seconds = match suffix.to_lowercase().as_str() {
"s" => num,
"m" => num * 60,
"h" => num * 3600,
"d" => num * 86400,
"w" => num * 604800,
_ => return Err(format!("Unknown duration suffix: {suffix}")),
};
let duration = jiff::Span::new().seconds(seconds);
now.checked_sub(duration)
.map_err(|e| format!("Duration overflow: {e}"))
}
async fn find_objects(
client: &S3Client,
path: &RemotePath,
filters: &FindFilters,
maxdepth: usize,
) -> Result<Vec<MatchInfo>, rc_core::Error> {
let mut matches = Vec::new();
let mut continuation_token: Option<String> = None;
let base_prefix = &path.key;
let base_depth = base_prefix.matches('/').count();
loop {
let options = ListOptions {
recursive: true,
max_keys: Some(1000),
continuation_token: continuation_token.clone(),
..Default::default()
};
let result = client.list_objects(path, options).await?;
for item in result.items {
if item.is_dir {
continue;
}
if maxdepth > 0 {
let item_depth = item.key.matches('/').count();
if item_depth - base_depth > maxdepth {
continue;
}
}
if let Some(ref pattern) = filters.name_pattern {
let filename = item.key.rsplit('/').next().unwrap_or(&item.key);
if !pattern.matches(filename) {
continue;
}
}
if let Some(size) = item.size_bytes {
if let Some(min) = filters.min_size
&& size < min
{
continue;
}
if let Some(max) = filters.max_size
&& size > max
{
continue;
}
}
if let Some(modified) = item.last_modified {
if let Some(ref newer) = filters.newer_than
&& modified < *newer
{
continue;
}
if let Some(ref older) = filters.older_than
&& modified > *older
{
continue;
}
}
matches.push(MatchInfo {
key: item.key,
size_bytes: item.size_bytes,
size_human: item.size_human,
last_modified: item.last_modified.map(|t| t.to_string()),
});
}
if result.truncated {
continuation_token = result.continuation_token;
} else {
break;
}
}
Ok(matches)
}
fn parse_find_path(path: &str) -> Result<(String, String, Option<String>), String> {
if path.is_empty() {
return Err("Path cannot be empty".to_string());
}
let parts: Vec<&str> = path.splitn(3, '/').collect();
match parts.len() {
1 => Err("Bucket name is required".to_string()),
2 => Ok((parts[0].to_string(), parts[1].to_string(), None)),
3 => Ok((
parts[0].to_string(),
parts[1].to_string(),
Some(parts[2].to_string()),
)),
_ => Err(format!("Invalid path format: '{path}'")),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_size() {
assert_eq!(parse_size("1024").unwrap(), 1024);
assert_eq!(parse_size("1K").unwrap(), 1024);
assert_eq!(parse_size("1M").unwrap(), 1024 * 1024);
assert_eq!(parse_size("1G").unwrap(), 1024 * 1024 * 1024);
assert_eq!(parse_size("10KB").unwrap(), 10 * 1024);
}
#[test]
fn test_parse_size_invalid() {
assert!(parse_size("").is_err());
assert!(parse_size("abc").is_err());
assert!(parse_size("1X").is_err());
}
#[test]
fn test_parse_find_path() {
let (alias, bucket, prefix) = parse_find_path("myalias/mybucket").unwrap();
assert_eq!(alias, "myalias");
assert_eq!(bucket, "mybucket");
assert!(prefix.is_none());
let (alias, bucket, prefix) = parse_find_path("myalias/mybucket/path/to").unwrap();
assert_eq!(alias, "myalias");
assert_eq!(bucket, "mybucket");
assert_eq!(prefix, Some("path/to".to_string()));
}
#[test]
fn test_parse_find_path_errors() {
assert!(parse_find_path("").is_err());
assert!(parse_find_path("myalias").is_err());
}
#[test]
fn test_full_object_path() {
assert_eq!(
full_object_path("test", "bucket", "a/b.txt"),
"test/bucket/a/b.txt"
);
assert_eq!(full_object_path("test", "bucket", ""), "test/bucket");
}
#[test]
fn test_parse_exec_template() {
assert_eq!(
parse_exec_template("echo EXEC:{}").unwrap(),
vec!["echo".to_string(), "EXEC:{}".to_string()]
);
assert_eq!(
parse_exec_template(r#"printf '%s\n' "{}""#).unwrap(),
vec!["printf".to_string(), "%s\\n".to_string(), "{}".to_string()]
);
}
#[test]
fn test_parse_exec_template_errors() {
assert!(parse_exec_template("").is_err());
assert!(parse_exec_template("'unterminated").is_err());
}
#[test]
fn test_render_exec_command() {
let template = vec![
"echo".to_string(),
"prefix:{}".to_string(),
"{}".to_string(),
];
let (program, args, text) = render_exec_command(&template, "test/bucket/a.txt");
assert_eq!(program, "echo");
assert_eq!(
args,
vec![
"prefix:test/bucket/a.txt".to_string(),
"test/bucket/a.txt".to_string()
]
);
assert_eq!(
text,
"echo prefix:test/bucket/a.txt test/bucket/a.txt".to_string()
);
}
}