#![cfg(feature = "integration")]
use std::process::{Command, Output};
use std::time::Duration;
use tempfile::TempDir;
fn rc_binary() -> std::path::PathBuf {
if let Ok(path) = std::env::var("CARGO_BIN_EXE_rc") {
return std::path::PathBuf::from(path);
}
let debug = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap()
.join("target/debug/rc");
if debug.exists() {
return debug;
}
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap()
.join("target/release/rc")
}
fn setup_test_env(config_dir: &std::path::Path) -> Vec<(String, String)> {
vec![(
"RC_CONFIG_DIR".to_string(),
config_dir.to_string_lossy().to_string(),
)]
}
fn run_rc(args: &[&str], config_dir: &std::path::Path) -> Output {
let mut cmd = Command::new(rc_binary());
cmd.args(args);
for (key, value) in setup_test_env(config_dir) {
cmd.env(key, value);
}
cmd.output().expect("Failed to execute rc command")
}
fn run_rc_with_stdin(args: &[&str], config_dir: &std::path::Path, stdin: &str) -> Output {
let mut cmd = Command::new(rc_binary());
cmd.args(args);
for (key, value) in setup_test_env(config_dir) {
cmd.env(key, value);
}
cmd.stdin(std::process::Stdio::piped());
cmd.stdout(std::process::Stdio::piped());
cmd.stderr(std::process::Stdio::piped());
let mut child = cmd.spawn().expect("Failed to spawn rc command");
{
use std::io::Write;
let child_stdin = child.stdin.as_mut().expect("Failed to open stdin");
child_stdin
.write_all(stdin.as_bytes())
.expect("Failed to write to stdin");
}
child.wait_with_output().expect("Failed to wait for rc")
}
fn wait_for_s3_ready(config_dir: &std::path::Path) -> bool {
for _ in 0..30 {
let output = run_rc(&["ls", "test/", "--json"], config_dir);
if output.status.success() {
return true;
}
std::thread::sleep(Duration::from_secs(1));
}
false
}
fn get_test_config() -> Option<(String, String, String)> {
let endpoint = std::env::var("TEST_S3_ENDPOINT").ok()?;
let access_key = std::env::var("TEST_S3_ACCESS_KEY").ok()?;
let secret_key = std::env::var("TEST_S3_SECRET_KEY").ok()?;
Some((endpoint, access_key, secret_key))
}
fn setup_with_alias(bucket: &str) -> Option<(TempDir, String)> {
let config = get_test_config()?;
let config_dir = tempfile::tempdir().ok()?;
let bucket_name = format!("test-{}-{}", bucket, uuid_suffix());
let output = run_rc(
&[
"alias",
"set",
"test",
&config.0,
&config.1,
&config.2,
"--bucket-lookup",
"path",
],
config_dir.path(),
);
if !output.status.success() {
eprintln!(
"Failed to set alias: {}",
String::from_utf8_lossy(&output.stderr)
);
return None;
}
if !wait_for_s3_ready(config_dir.path()) {
eprintln!("S3 service did not become ready in time");
return None;
}
let output = run_rc(&["mb", &format!("test/{}", bucket_name)], config_dir.path());
if !output.status.success() {
eprintln!(
"Failed to create bucket: {}",
String::from_utf8_lossy(&output.stderr)
);
return None;
}
Some((config_dir, bucket_name))
}
fn setup_alias_only() -> Option<TempDir> {
let config = get_test_config()?;
let config_dir = tempfile::tempdir().ok()?;
let output = run_rc(
&[
"alias",
"set",
"test",
&config.0,
&config.1,
&config.2,
"--bucket-lookup",
"path",
],
config_dir.path(),
);
if !output.status.success() {
eprintln!(
"Failed to set alias: {}",
String::from_utf8_lossy(&output.stderr)
);
return None;
}
if !wait_for_s3_ready(config_dir.path()) {
eprintln!("S3 service did not become ready in time");
return None;
}
Some(config_dir)
}
fn uuid_suffix() -> String {
use std::time::{SystemTime, UNIX_EPOCH};
let duration = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default();
format!("{:x}", duration.as_nanos() % 0xFFFFFFFF)
}
fn cleanup_bucket(config_dir: &std::path::Path, bucket: &str) {
let _ = run_rc(
&["rm", "--recursive", "--force", &format!("test/{}/", bucket)],
config_dir,
);
let _ = run_rc(&["rb", &format!("test/{}", bucket)], config_dir);
}
mod bucket_operations {
use super::*;
#[test]
fn test_create_and_delete_bucket() {
let (endpoint, access_key, secret_key) = match get_test_config() {
Some(c) => c,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let config_dir = tempfile::tempdir().expect("Failed to create temp dir");
let bucket_name = format!("test-bucket-{}", uuid_suffix());
let output = run_rc(
&[
"alias",
"set",
"test",
&endpoint,
&access_key,
&secret_key,
"--bucket-lookup",
"path",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to set alias");
assert!(
wait_for_s3_ready(config_dir.path()),
"S3 service did not become ready in time"
);
let output = run_rc(
&["mb", &format!("test/{}", bucket_name), "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to create bucket: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("success"), "Expected success in output");
assert!(
stdout.contains(&bucket_name),
"Expected bucket name in output"
);
let output = run_rc(&["ls", "test/", "--json"], config_dir.path());
assert!(output.status.success(), "Failed to list buckets");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains(&bucket_name), "Bucket not found in listing");
let output = run_rc(
&["rb", &format!("test/{}", bucket_name), "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to delete bucket: {}",
String::from_utf8_lossy(&output.stderr)
);
}
#[test]
fn test_mb_ignore_existing_first_and_second_run() {
let (endpoint, access_key, secret_key) = match get_test_config() {
Some(c) => c,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let config_dir = tempfile::tempdir().expect("Failed to create temp dir");
let bucket_name = format!("test-ignore-existing-{}", uuid_suffix());
let output = run_rc(
&[
"alias",
"set",
"test",
&endpoint,
&access_key,
&secret_key,
"--bucket-lookup",
"path",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to set alias");
assert!(
wait_for_s3_ready(config_dir.path()),
"S3 service did not become ready in time"
);
let output = run_rc(
&[
"mb",
&format!("test/{}", bucket_name),
"--ignore-existing",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"First mb --ignore-existing failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("success"),
"Expected success in first run output"
);
let output = run_rc(
&[
"mb",
&format!("test/{}", bucket_name),
"--ignore-existing",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Second mb --ignore-existing failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("success"),
"Expected success in second run output"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_bucket_cors_set_accepts_stdin_source() {
let (config_dir, bucket_name) = match setup_with_alias("corsstdin") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let cors_config = r#"{
"rules": [
{
"id": "stdin-rule",
"allowedOrigins": ["https://app.example.com"],
"allowedMethods": ["get", "put"],
"allowedHeaders": ["Authorization"],
"exposeHeaders": ["ETag"],
"maxAgeSeconds": 600
}
]
}"#;
let set_output = run_rc_with_stdin(
&[
"bucket",
"cors",
"set",
&format!("test/{}", bucket_name),
"-",
"--json",
],
config_dir.path(),
cors_config,
);
assert!(
set_output.status.success(),
"Failed to set bucket CORS from stdin: {}",
String::from_utf8_lossy(&set_output.stderr)
);
let list_output = run_rc(
&[
"bucket",
"cors",
"list",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
list_output.status.success(),
"Failed to list bucket CORS after stdin set: {}",
String::from_utf8_lossy(&list_output.stderr)
);
let stdout = String::from_utf8_lossy(&list_output.stdout);
let listed: serde_json::Value =
serde_json::from_str(&stdout).expect("Invalid JSON CORS list");
assert_eq!(listed["bucket"].as_str(), Some(bucket_name.as_str()));
let rules = listed["rules"]
.as_array()
.expect("CORS rules should be a JSON array");
assert_eq!(rules.len(), 1, "Expected one CORS rule from stdin input");
assert_eq!(rules[0]["id"].as_str(), Some("stdin-rule"));
assert_eq!(
rules[0]["allowed_methods"].as_array().map(|methods| methods
.iter()
.filter_map(|method| method.as_str())
.collect::<Vec<_>>()),
Some(vec!["GET", "PUT"])
);
let _ = run_rc(
&["bucket", "cors", "remove", &format!("test/{}", bucket_name)],
config_dir.path(),
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod object_operations {
use super::*;
use std::io::Write;
#[test]
fn test_upload_and_download_small_file() {
let (config_dir, bucket_name) = match setup_with_alias("small") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
let test_content = "Hello, S3 integration test!";
std::fs::write(temp_file.path(), test_content).expect("Failed to write test file");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/test.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&["stat", &format!("test/{}/test.txt", bucket_name), "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to stat: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("test.txt"), "Expected filename in output");
let download_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
let output = run_rc(
&[
"cp",
&format!("test/{}/test.txt", bucket_name),
download_file.path().to_str().unwrap(),
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to download: {}",
String::from_utf8_lossy(&output.stderr)
);
let downloaded_content =
std::fs::read_to_string(download_file.path()).expect("Failed to read downloaded file");
assert_eq!(
downloaded_content, test_content,
"Downloaded content doesn't match"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_upload_download_large_file_multipart() {
let (config_dir, bucket_name) = match setup_with_alias("large") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let file_size = 15 * 1024 * 1024;
let temp_file = tempfile::Builder::new()
.suffix(".bin")
.tempfile()
.expect("Failed to create temp file");
{
let mut file = std::fs::File::create(temp_file.path()).expect("Failed to create file");
let pattern: Vec<u8> = (0..1024).map(|i| (i % 256) as u8).collect();
for _ in 0..(file_size / 1024) {
file.write_all(&pattern).expect("Failed to write");
}
}
let start = std::time::Instant::now();
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/large.bin", bucket_name),
"--json",
],
config_dir.path(),
);
let upload_time = start.elapsed();
assert!(
output.status.success(),
"Failed to upload large file: {}",
String::from_utf8_lossy(&output.stderr)
);
println!("Uploaded {} bytes in {:?}", file_size, upload_time);
let output = run_rc(
&["stat", &format!("test/{}/large.bin", bucket_name), "--json"],
config_dir.path(),
);
assert!(output.status.success(), "Failed to stat large file");
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
let size = json["size_bytes"].as_i64().unwrap_or(0);
assert_eq!(
size, file_size as i64,
"File size mismatch: expected {}, got {}",
file_size, size
);
let download_file = tempfile::Builder::new()
.suffix(".bin")
.tempfile()
.expect("Failed to create download file");
let output = run_rc(
&[
"cp",
&format!("test/{}/large.bin", bucket_name),
download_file.path().to_str().unwrap(),
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to download large file: {}",
String::from_utf8_lossy(&output.stderr)
);
let downloaded_size = std::fs::metadata(download_file.path())
.expect("Failed to get metadata")
.len();
assert_eq!(
downloaded_size, file_size as u64,
"Downloaded file size mismatch"
);
let downloaded_content =
std::fs::read(download_file.path()).expect("Failed to read downloaded file");
assert_eq!(
downloaded_content.len(),
file_size,
"Content length mismatch"
);
let pattern: Vec<u8> = (0..1024).map(|i| (i % 256) as u8).collect();
assert_eq!(
&downloaded_content[0..1024],
&pattern[..],
"Content pattern mismatch at start"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_copy_object_between_paths() {
let (config_dir, bucket_name) = match setup_with_alias("copy") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "copy test content").expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/original.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload original");
let output = run_rc(
&[
"cp",
&format!("test/{}/original.txt", bucket_name),
&format!("test/{}/copied.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to copy: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&["ls", &format!("test/{}/", bucket_name), "--json"],
config_dir.path(),
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("original.txt"), "Original file missing");
assert!(stdout.contains("copied.txt"), "Copied file missing");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_move_object() {
let (config_dir, bucket_name) = match setup_with_alias("move") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "move test content").expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/source.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload");
let output = run_rc(
&[
"mv",
&format!("test/{}/source.txt", bucket_name),
&format!("test/{}/dest.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to move: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&["ls", &format!("test/{}/", bucket_name), "--json"],
config_dir.path(),
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(!stdout.contains("source.txt"), "Source file should be gone");
assert!(stdout.contains("dest.txt"), "Dest file should exist");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_move_recursive_prefix_s3_to_s3() {
let (config_dir, bucket_name) = match setup_with_alias("mvrec") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let source_files = ["src/dir/a.txt", "src/dir/sub/b.txt"];
for key in &source_files {
let tmp = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(tmp.path(), format!("content for {}", key)).expect("Failed to write");
let output = run_rc(
&[
"cp",
tmp.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, key),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", key);
}
let output = run_rc(
&[
"mv",
"--recursive",
"--continue-on-error",
&format!("test/{}/src/", bucket_name),
&format!("test/{}/dst/", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to recursive move: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["status"], "success");
assert_eq!(json["errors"], 0);
assert_eq!(json["moved"], 2);
let output = run_rc(
&[
"ls",
"--recursive",
&format!("test/{}/", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to list bucket after recursive move"
);
let listing = String::from_utf8_lossy(&output.stdout);
assert!(
listing.contains("dst/dir/a.txt"),
"Moved object dst/dir/a.txt not found"
);
assert!(
listing.contains("dst/dir/sub/b.txt"),
"Moved object dst/dir/sub/b.txt not found"
);
assert!(
!listing.contains("src/dir/a.txt"),
"Source object src/dir/a.txt should be removed"
);
assert!(
!listing.contains("src/dir/sub/b.txt"),
"Source object src/dir/sub/b.txt should be removed"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_delete_object() {
let (config_dir, bucket_name) = match setup_with_alias("delete") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "delete test content").expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/to-delete.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload");
let output = run_rc(
&[
"rm",
&format!("test/{}/to-delete.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to delete: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"stat",
&format!("test/{}/to-delete.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
!output.status.success(),
"File should not exist after delete"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod listing_operations {
use super::*;
#[test]
fn test_list_objects_with_prefix() {
let (config_dir, bucket_name) = match setup_with_alias("list") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let files = ["dir1/file1.txt", "dir1/file2.txt", "dir2/file3.txt"];
for file in &files {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), format!("content for {}", file))
.expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, file),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", file);
}
let output = run_rc(
&["ls", &format!("test/{}/", bucket_name), "--json"],
config_dir.path(),
);
assert!(output.status.success(), "Failed to list all");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("dir1/"), "dir1 prefix missing");
assert!(stdout.contains("dir2/"), "dir2 prefix missing");
let output = run_rc(
&["ls", &format!("test/{}/dir1/", bucket_name), "--json"],
config_dir.path(),
);
assert!(output.status.success(), "Failed to list with prefix");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("file1.txt"), "file1 missing");
assert!(stdout.contains("file2.txt"), "file2 missing");
assert!(!stdout.contains("file3.txt"), "file3 should not be in dir1");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_recursive_listing() {
let (config_dir, bucket_name) = match setup_with_alias("recursive") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let files = ["a/b/c/deep.txt", "a/b/mid.txt", "a/shallow.txt", "top.txt"];
for file in &files {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), format!("content for {}", file))
.expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, file),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", file);
}
let output = run_rc(
&[
"ls",
&format!("test/{}/", bucket_name),
"--recursive",
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to recursive list");
let stdout = String::from_utf8_lossy(&output.stdout);
for file in &files {
assert!(
stdout.contains(file),
"File {} missing in recursive listing",
file
);
}
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod admin_operations {
use super::*;
#[test]
fn test_admin_info_cluster() {
let config_dir = match setup_alias_only() {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let output = run_rc(
&["admin", "info", "cluster", "test", "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to run admin info cluster: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert!(json.get("mode").is_some(), "Expected mode in output");
assert!(
json.get("deploymentId").is_some(),
"Expected deploymentId in output"
);
}
#[test]
fn test_admin_info_server() {
let config_dir = match setup_alias_only() {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let output = run_rc(
&["admin", "info", "server", "test", "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to run admin info server: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert!(json.get("servers").is_some(), "Expected servers in output");
}
#[test]
fn test_admin_info_disk() {
let config_dir = match setup_alias_only() {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let output = run_rc(
&["admin", "info", "disk", "test", "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to run admin info disk: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert!(json.get("disks").is_some(), "Expected disks in output");
}
#[test]
fn test_admin_heal_status() {
let config_dir = match setup_alias_only() {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let output = run_rc(
&["admin", "heal", "status", "test", "--json"],
config_dir.path(),
);
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout);
let combined = format!("{stdout}{stderr}");
if combined.contains("NotImplemented") || combined.contains("Not Implemented") {
eprintln!("Skipping: heal status not supported by backend");
return;
}
panic!("Failed to run admin heal status: {stderr}");
}
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert!(json.get("healId").is_some(), "Expected healId in output");
assert!(json.get("healing").is_some(), "Expected healing in output");
}
}
mod error_handling {
use super::*;
#[test]
fn test_not_found_error() {
let (config_dir, bucket_name) = match setup_with_alias("notfound") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let output = run_rc(
&[
"stat",
&format!("test/{}/nonexistent.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
!output.status.success(),
"Should fail for non-existent object"
);
let exit_code = output.status.code().unwrap_or(-1);
assert!(
exit_code == 5 || exit_code == 3,
"Expected exit code 5 (NOT_FOUND) or 3 (NETWORK_ERROR), got {}",
exit_code
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_bucket_not_found() {
let (endpoint, access_key, secret_key) = match get_test_config() {
Some(c) => c,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let config_dir = tempfile::tempdir().expect("Failed to create temp dir");
let output = run_rc(
&[
"alias",
"set",
"test",
&endpoint,
&access_key,
&secret_key,
"--bucket-lookup",
"path",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to set alias");
let output = run_rc(
&["rb", "test/nonexistent-bucket-xyz123", "--json"],
config_dir.path(),
);
assert!(
!output.status.success(),
"Should fail for non-existent bucket"
);
}
}
mod presigned_urls {
use super::*;
#[test]
fn test_generate_presigned_url() {
let (config_dir, bucket_name) = match setup_with_alias("presign") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "presign test content").expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/presign.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload");
let output = run_rc(
&[
"share",
&format!("test/{}/presign.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to generate presigned URL: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("http"), "URL should contain http");
assert!(
stdout.contains("presign.txt"),
"URL should contain filename"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod multipart_operations {
use super::*;
use std::io::Write;
#[test]
fn test_multipart_upload_exact_boundary() {
let (config_dir, bucket_name) = match setup_with_alias("mpboundary") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let file_size = 10 * 1024 * 1024;
let temp_file = tempfile::Builder::new()
.suffix(".bin")
.tempfile()
.expect("Failed to create temp file");
{
let mut file = std::fs::File::create(temp_file.path()).expect("Failed to create file");
let pattern: Vec<u8> = vec![0xAB; 4096];
for _ in 0..(file_size / 4096) {
file.write_all(&pattern).expect("Failed to write");
}
}
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/boundary.bin", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload boundary file: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"stat",
&format!("test/{}/boundary.bin", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to stat");
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON");
let size = json["size_bytes"].as_i64().unwrap_or(0);
assert_eq!(size, file_size as i64, "File size mismatch");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_multipart_upload_small_last_part() {
let (config_dir, bucket_name) = match setup_with_alias("mplastpart") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let file_size = 10 * 1024 * 1024 + 1;
let temp_file = tempfile::Builder::new()
.suffix(".bin")
.tempfile()
.expect("Failed to create temp file");
{
let mut file = std::fs::File::create(temp_file.path()).expect("Failed to create file");
let pattern: Vec<u8> = vec![0xCD; 4096];
for _ in 0..(file_size / 4096) {
file.write_all(&pattern).expect("Failed to write");
}
file.write_all(&[0xCD]).expect("Failed to write last byte");
}
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/lastpart.bin", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"stat",
&format!("test/{}/lastpart.bin", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to stat");
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON");
let size = json["size_bytes"].as_i64().unwrap_or(0);
assert_eq!(size, file_size as i64, "File size mismatch");
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod recursive_operations {
use super::*;
#[test]
fn test_recursive_delete() {
let (config_dir, bucket_name) = match setup_with_alias("recdel") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let files = [
"to-delete/a/1.txt",
"to-delete/a/2.txt",
"to-delete/b/3.txt",
"keep/4.txt",
];
for file in &files {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), format!("content for {}", file))
.expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, file),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", file);
}
let output = run_rc(
&[
"rm",
"--recursive",
"--force",
&format!("test/{}/to-delete/", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to recursive delete: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"ls",
&format!("test/{}/", bucket_name),
"--recursive",
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to list");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(!stdout.contains("to-delete"), "to-delete should be gone");
assert!(stdout.contains("keep/4.txt"), "keep/4.txt should remain");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_recursive_copy() {
let (config_dir, bucket_name) = match setup_with_alias("reccopy") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let files = ["src/a.txt", "src/b.txt", "src/sub/c.txt"];
for file in &files {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), format!("content for {}", file))
.expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, file),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", file);
}
let output = run_rc(
&[
"cp",
"--recursive",
&format!("test/{}/src/", bucket_name),
&format!("test/{}/dst/", bucket_name),
"--json",
],
config_dir.path(),
);
if !output.status.success() {
eprintln!(
"Recursive S3-to-S3 copy not fully implemented, skipping: {}",
String::from_utf8_lossy(&output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
let output = run_rc(
&[
"ls",
&format!("test/{}/", bucket_name),
"--recursive",
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to list");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("src/a.txt"), "src/a.txt should exist");
assert!(stdout.contains("dst/a.txt"), "dst/a.txt should exist");
assert!(
stdout.contains("dst/sub/c.txt"),
"dst/sub/c.txt should exist"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod concurrent_operations {
use super::*;
#[test]
fn test_concurrent_uploads() {
let (config_dir, bucket_name) = match setup_with_alias("concurrent") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let mut temp_files = Vec::new();
for i in 0..5 {
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(
temp_file.path(),
format!("File {} content with some data", i),
)
.expect("Failed to write");
temp_files.push(temp_file);
}
for (i, temp_file) in temp_files.iter().enumerate() {
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/file{}.txt", bucket_name, i),
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload file{}: {}",
i,
String::from_utf8_lossy(&output.stderr)
);
}
let output = run_rc(
&["ls", &format!("test/{}/", bucket_name), "--json"],
config_dir.path(),
);
assert!(output.status.success(), "Failed to list");
let stdout = String::from_utf8_lossy(&output.stdout);
for i in 0..5 {
assert!(
stdout.contains(&format!("file{}.txt", i)),
"file{}.txt missing",
i
);
}
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod edge_cases {
use super::*;
#[test]
fn test_special_characters_in_key() {
let (config_dir, bucket_name) = match setup_with_alias("special") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "special character test").expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/file with spaces.txt", bucket_name),
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload file with spaces: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"stat",
&format!("test/{}/file with spaces.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to stat file with spaces: {}",
String::from_utf8_lossy(&output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_empty_file_upload() {
let (config_dir, bucket_name) = match setup_with_alias("empty") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "").expect("Failed to write empty file");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/empty.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload empty file: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&["stat", &format!("test/{}/empty.txt", bucket_name), "--json"],
config_dir.path(),
);
assert!(output.status.success(), "Failed to stat empty file");
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON");
let size = json["size_bytes"].as_i64().unwrap_or(-1);
assert_eq!(size, 0, "Empty file should have size 0");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_deep_nested_path() {
let (config_dir, bucket_name) = match setup_with_alias("deep") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "deep nested content").expect("Failed to write");
let deep_path = "a/b/c/d/e/f/g/h/i/j/deep.txt";
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, deep_path),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload to deep path: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"stat",
&format!("test/{}/{}", bucket_name, deep_path),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to stat deep path: {}",
String::from_utf8_lossy(&output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod content_operations {
use super::*;
#[test]
fn test_cat_object() {
let (config_dir, bucket_name) = match setup_with_alias("cat") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
let test_content = "Line 1\nLine 2\nLine 3\nLine 4\nLine 5\n";
std::fs::write(temp_file.path(), test_content).expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/cat-test.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload");
let output = run_rc(
&["cat", &format!("test/{}/cat-test.txt", bucket_name)],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to cat: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout, test_content, "Cat output doesn't match");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_head_object() {
let (config_dir, bucket_name) = match setup_with_alias("head") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
let test_content =
"Line 1\nLine 2\nLine 3\nLine 4\nLine 5\nLine 6\nLine 7\nLine 8\nLine 9\nLine 10\n";
std::fs::write(temp_file.path(), test_content).expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/head-test.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload");
let output = run_rc(
&["head", &format!("test/{}/head-test.txt", bucket_name)],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to head: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("Line 1"), "Should contain Line 1");
assert!(stdout.contains("Line 10"), "Should contain Line 10");
let output = run_rc(
&[
"head",
"-n",
"3",
&format!("test/{}/head-test.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to head with -n");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("Line 1"), "Should contain Line 1");
assert!(stdout.contains("Line 3"), "Should contain Line 3");
assert!(!stdout.contains("Line 4"), "Should not contain Line 4");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_pipe_to_object() {
let (config_dir, bucket_name) = match setup_with_alias("pipe") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let test_content = "Piped content from stdin";
let mut cmd = std::process::Command::new(rc_binary());
cmd.args(["pipe", &format!("test/{}/piped.txt", bucket_name)]);
for (key, value) in setup_test_env(config_dir.path()) {
cmd.env(key, value);
}
cmd.stdin(std::process::Stdio::piped());
cmd.stdout(std::process::Stdio::piped());
cmd.stderr(std::process::Stdio::piped());
let mut child = cmd.spawn().expect("Failed to spawn");
{
use std::io::Write;
let stdin = child.stdin.as_mut().expect("Failed to open stdin");
stdin
.write_all(test_content.as_bytes())
.expect("Failed to write to stdin");
}
let output = child.wait_with_output().expect("Failed to wait");
assert!(
output.status.success(),
"Failed to pipe: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&["cat", &format!("test/{}/piped.txt", bucket_name)],
config_dir.path(),
);
assert!(output.status.success(), "Failed to cat piped file");
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout, test_content, "Piped content doesn't match");
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod find_operations {
use super::*;
#[test]
fn test_find_by_name() {
let (config_dir, bucket_name) = match setup_with_alias("find") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let files = [
"documents/report.txt",
"documents/summary.txt",
"images/photo.jpg",
"images/logo.png",
"data/report.csv",
];
for file in &files {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), format!("content for {}", file))
.expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, file),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", file);
}
let output = run_rc(
&[
"find",
&format!("test/{}/", bucket_name),
"--name",
"*.txt",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to find: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("report.txt"), "Should find report.txt");
assert!(stdout.contains("summary.txt"), "Should find summary.txt");
assert!(!stdout.contains("photo.jpg"), "Should not find photo.jpg");
let output = run_rc(
&["find", &format!("test/{}/images/", bucket_name), "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to find in images path: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("photo.jpg"), "Should find photo.jpg");
assert!(stdout.contains("logo.png"), "Should find logo.png");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_find_by_size() {
let (config_dir, bucket_name) = match setup_with_alias("findsize") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let small_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(small_file.path(), "small").expect("Failed to write");
let large_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
let large_content = "x".repeat(10000); std::fs::write(large_file.path(), &large_content).expect("Failed to write");
let output = run_rc(
&[
"cp",
small_file.path().to_str().unwrap(),
&format!("test/{}/small.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload small file");
let output = run_rc(
&[
"cp",
large_file.path().to_str().unwrap(),
&format!("test/{}/large.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload large file");
let output = run_rc(
&[
"find",
&format!("test/{}/", bucket_name),
"--larger",
"1K",
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to find by size");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("large.txt"), "Should find large.txt");
assert!(!stdout.contains("small.txt"), "Should not find small.txt");
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod diff_operations {
use super::*;
#[test]
fn test_diff_buckets() {
let (config_dir, bucket_name) = match setup_with_alias("diff") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let bucket_name2 = format!("{}-diff", bucket_name);
let output = run_rc(
&["mb", &format!("test/{}", bucket_name2)],
config_dir.path(),
);
assert!(output.status.success(), "Failed to create second bucket");
let temp_file1 = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file1.path(), "content1").expect("Failed to write");
let temp_file2 = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file2.path(), "content2").expect("Failed to write");
run_rc(
&[
"cp",
temp_file1.path().to_str().unwrap(),
&format!("test/{}/file1.txt", bucket_name),
],
config_dir.path(),
);
run_rc(
&[
"cp",
temp_file2.path().to_str().unwrap(),
&format!("test/{}/file2.txt", bucket_name),
],
config_dir.path(),
);
run_rc(
&[
"cp",
temp_file1.path().to_str().unwrap(),
&format!("test/{}/file1.txt", bucket_name2),
],
config_dir.path(),
);
let output = run_rc(
&[
"diff",
&format!("test/{}/", bucket_name),
&format!("test/{}/", bucket_name2),
"--json",
],
config_dir.path(),
);
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
!stdout.is_empty() || stderr.is_empty(),
"Diff should produce output or succeed silently, stderr: {}",
stderr
);
assert!(
stdout.contains("file2.txt"),
"Should show file2.txt as different, stdout: {}, stderr: {}",
stdout,
stderr
);
cleanup_bucket(config_dir.path(), &bucket_name);
cleanup_bucket(config_dir.path(), &bucket_name2);
}
}
mod mirror_operations {
use super::*;
#[test]
fn test_mirror_between_buckets() {
let (config_dir, bucket_name) = match setup_with_alias("mirror") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let bucket_name2 = format!("{}-dest", bucket_name);
let output = run_rc(
&["mb", &format!("test/{}", bucket_name2)],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to create destination bucket: {}",
String::from_utf8_lossy(&output.stderr)
);
let files = ["file1.txt", "file2.txt", "subdir/file3.txt"];
for file in &files {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), format!("content for {}", file))
.expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/source/{}", bucket_name, file),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", file);
}
let output = run_rc(
&[
"mirror",
&format!("test/{}/source/", bucket_name),
&format!("test/{}/", bucket_name2),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to mirror: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"ls",
&format!("test/{}/", bucket_name2),
"--recursive",
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to list mirrored files");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("file1.txt"), "file1.txt should exist");
assert!(stdout.contains("file2.txt"), "file2.txt should exist");
assert!(
stdout.contains("subdir/file3.txt") || stdout.contains("file3.txt"),
"file3.txt should exist"
);
cleanup_bucket(config_dir.path(), &bucket_name);
cleanup_bucket(config_dir.path(), &bucket_name2);
}
#[test]
fn test_mirror_preserves_content_type() {
let (config_dir, bucket_name) = match setup_with_alias("mirrorcontenttype") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let target_bucket = format!("{}-dest", bucket_name);
let output = run_rc(
&["mb", &format!("test/{}", target_bucket)],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to create destination bucket: {}",
String::from_utf8_lossy(&output.stderr)
);
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), b"not-a-real-jpeg").expect("Failed to write test file");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/source/photo.bin", bucket_name),
"--content-type",
"image/jpeg",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to upload source object: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"mirror",
&format!("test/{}/source/", bucket_name),
&format!("test/{}/", target_bucket),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to mirror objects: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"stat",
&format!("test/{}/photo.bin", target_bucket),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to stat mirrored object: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["content_type"], "image/jpeg");
cleanup_bucket(config_dir.path(), &bucket_name);
cleanup_bucket(config_dir.path(), &target_bucket);
}
}
mod tree_operations {
use super::*;
#[test]
fn test_tree_display() {
let (config_dir, bucket_name) = match setup_with_alias("tree") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let files = [
"root.txt",
"dir1/file1.txt",
"dir1/file2.txt",
"dir1/subdir/deep.txt",
"dir2/file3.txt",
];
for file in &files {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), format!("content for {}", file))
.expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket_name, file),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload {}", file);
}
let output = run_rc(
&["tree", &format!("test/{}/", bucket_name)],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to tree: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("dir1"), "Should show dir1");
assert!(stdout.contains("dir2"), "Should show dir2");
assert!(stdout.contains("root.txt"), "Should show root.txt");
let output = run_rc(
&["tree", &format!("test/{}/", bucket_name), "--json"],
config_dir.path(),
);
assert!(output.status.success(), "Failed to tree with --json");
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod version_operations {
use super::*;
#[test]
fn test_bucket_versioning() {
let (config_dir, bucket_name) = match setup_with_alias("version") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let output = run_rc(
&[
"version",
"info",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
if !output.status.success() {
eprintln!(
"Versioning not supported: {}",
String::from_utf8_lossy(&output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
let output = run_rc(
&[
"version",
"enable",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
if !output.status.success() {
eprintln!(
"Enable versioning not supported: {}",
String::from_utf8_lossy(&output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
let output = run_rc(
&[
"version",
"info",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to get versioning info");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("Enabled") || stdout.contains("enabled"),
"Versioning should be enabled"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_rm_purge_permanently_deletes_versioned_object() {
let (config_dir, bucket_name) = match setup_with_alias("rmpurge") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let enable_output = run_rc(
&[
"version",
"enable",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
if !enable_output.status.success() {
eprintln!(
"Enable versioning not supported: {}",
String::from_utf8_lossy(&enable_output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), "versioned delete content").expect("Failed to write");
let normal_key = "normal-delete.txt";
let force_key = "force-delete.txt";
let purge_key = "purge-delete.txt";
let upload_output = run_rc(
&[
"cp",
temp_file
.path()
.to_str()
.expect("Temp file path should be UTF-8"),
&format!("test/{}/{}", bucket_name, normal_key),
],
config_dir.path(),
);
assert!(
upload_output.status.success(),
"Failed to upload normal delete object: {}",
String::from_utf8_lossy(&upload_output.stderr)
);
let delete_output = run_rc(
&[
"rm",
&format!("test/{}/{}", bucket_name, normal_key),
"--json",
],
config_dir.path(),
);
assert!(
delete_output.status.success(),
"Failed to delete versioned object: {}",
String::from_utf8_lossy(&delete_output.stderr)
);
let normal_versions_output = run_rc(
&[
"version",
"list",
&format!("test/{}/{}", bucket_name, normal_key),
"--json",
],
config_dir.path(),
);
assert!(
normal_versions_output.status.success(),
"Failed to list versions after normal rm: {}",
String::from_utf8_lossy(&normal_versions_output.stderr)
);
let normal_stdout = String::from_utf8_lossy(&normal_versions_output.stdout);
let normal_versions: serde_json::Value =
serde_json::from_str(&normal_stdout).expect("Invalid JSON version list");
let normal_versions = normal_versions
.as_array()
.expect("Version list should be a JSON array");
assert_eq!(
normal_versions.len(),
2,
"Expected one object version plus one delete marker after normal rm"
);
assert!(
normal_versions.iter().any(|entry| {
entry["is_delete_marker"].as_bool() == Some(true)
&& entry["is_latest"].as_bool() == Some(true)
}),
"Expected latest version to be a delete marker after normal rm"
);
let force_upload_output = run_rc(
&[
"cp",
temp_file
.path()
.to_str()
.expect("Temp file path should be UTF-8"),
&format!("test/{}/{}", bucket_name, force_key),
],
config_dir.path(),
);
assert!(
force_upload_output.status.success(),
"Failed to upload force delete object: {}",
String::from_utf8_lossy(&force_upload_output.stderr)
);
let force_delete_output = run_rc(
&[
"rm",
&format!("test/{}/{}", bucket_name, force_key),
"--force",
"--json",
],
config_dir.path(),
);
assert!(
force_delete_output.status.success(),
"Failed to force delete versioned object: {}",
String::from_utf8_lossy(&force_delete_output.stderr)
);
let force_versions_output = run_rc(
&[
"version",
"list",
&format!("test/{}/{}", bucket_name, force_key),
"--json",
],
config_dir.path(),
);
assert!(
force_versions_output.status.success(),
"Failed to list versions after force rm: {}",
String::from_utf8_lossy(&force_versions_output.stderr)
);
let force_stdout = String::from_utf8_lossy(&force_versions_output.stdout);
let force_versions: serde_json::Value =
serde_json::from_str(&force_stdout).expect("Invalid JSON version list");
let force_versions = force_versions
.as_array()
.expect("Version list should be a JSON array");
assert_eq!(
force_versions.len(),
2,
"Expected --force rm to keep the object version and create a delete marker"
);
assert!(
force_versions.iter().any(|entry| {
entry["is_delete_marker"].as_bool() == Some(true)
&& entry["is_latest"].as_bool() == Some(true)
}),
"Expected latest version to be a delete marker after force rm"
);
let purge_upload_output = run_rc(
&[
"cp",
temp_file
.path()
.to_str()
.expect("Temp file path should be UTF-8"),
&format!("test/{}/{}", bucket_name, purge_key),
],
config_dir.path(),
);
assert!(
purge_upload_output.status.success(),
"Failed to upload purge delete object: {}",
String::from_utf8_lossy(&purge_upload_output.stderr)
);
let purge_delete_output = run_rc(
&[
"object",
"remove",
&format!("test/{}/{}", bucket_name, purge_key),
"--purge",
"--json",
],
config_dir.path(),
);
assert!(
purge_delete_output.status.success(),
"Failed to purge versioned object through object remove: {}",
String::from_utf8_lossy(&purge_delete_output.stderr)
);
let purge_versions_output = run_rc(
&[
"version",
"list",
&format!("test/{}/{}", bucket_name, purge_key),
"--json",
],
config_dir.path(),
);
assert!(
purge_versions_output.status.success(),
"Failed to list versions after purge rm: {}",
String::from_utf8_lossy(&purge_versions_output.stderr)
);
let purge_stdout = String::from_utf8_lossy(&purge_versions_output.stdout);
let purge_versions: serde_json::Value =
serde_json::from_str(&purge_stdout).expect("Invalid JSON version list");
let purge_versions = purge_versions
.as_array()
.expect("Version list should be a JSON array");
assert!(
purge_versions.is_empty(),
"Expected purge rm to permanently remove all versions"
);
let normal_cleanup_output = run_rc(
&[
"rm",
&format!("test/{}/{}", bucket_name, normal_key),
"--purge",
"--json",
],
config_dir.path(),
);
assert!(
normal_cleanup_output.status.success(),
"Failed to purge cleanup object: {}",
String::from_utf8_lossy(&normal_cleanup_output.stderr)
);
let force_cleanup_output = run_rc(
&[
"rm",
&format!("test/{}/{}", bucket_name, force_key),
"--purge",
"--json",
],
config_dir.path(),
);
assert!(
force_cleanup_output.status.success(),
"Failed to purge force cleanup object: {}",
String::from_utf8_lossy(&force_cleanup_output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_rm_recursive_purge_permanently_deletes_versioned_prefix() {
let (config_dir, bucket_name) = match setup_with_alias("rmpurgeprefix") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let enable_output = run_rc(
&[
"version",
"enable",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
if !enable_output.status.success() {
eprintln!(
"Enable versioning not supported: {}",
String::from_utf8_lossy(&enable_output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), "recursive purge content").expect("Failed to write");
let keys = ["purge-prefix/a.txt", "purge-prefix/nested/b.txt"];
for key in keys {
let upload_output = run_rc(
&[
"cp",
temp_file
.path()
.to_str()
.expect("Temp file path should be UTF-8"),
&format!("test/{}/{}", bucket_name, key),
],
config_dir.path(),
);
assert!(
upload_output.status.success(),
"Failed to upload recursive purge object {key}: {}",
String::from_utf8_lossy(&upload_output.stderr)
);
}
let purge_output = run_rc(
&[
"rm",
"--recursive",
"--purge",
&format!("test/{}/purge-prefix/", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
purge_output.status.success(),
"Failed to purge recursive prefix: {}",
String::from_utf8_lossy(&purge_output.stderr)
);
for key in keys {
let versions_output = run_rc(
&[
"version",
"list",
&format!("test/{}/{}", bucket_name, key),
"--json",
],
config_dir.path(),
);
assert!(
versions_output.status.success(),
"Failed to list versions after recursive purge for {key}: {}",
String::from_utf8_lossy(&versions_output.stderr)
);
let stdout = String::from_utf8_lossy(&versions_output.stdout);
let versions: serde_json::Value =
serde_json::from_str(&stdout).expect("Invalid JSON version list");
let versions = versions
.as_array()
.expect("Version list should be a JSON array");
assert!(
versions.is_empty(),
"Expected recursive purge to permanently remove all versions for {key}"
);
}
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod tag_operations {
use super::*;
#[test]
fn test_object_tags() {
let (config_dir, bucket_name) = match setup_with_alias("tag") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let temp_file = tempfile::Builder::new()
.suffix(".txt")
.tempfile()
.expect("Failed to create temp file");
std::fs::write(temp_file.path(), "tag test content").expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/tagged.txt", bucket_name),
],
config_dir.path(),
);
assert!(output.status.success(), "Failed to upload");
let output = run_rc(
&[
"tag",
"set",
&format!("test/{}/tagged.txt", bucket_name),
"environment=test",
"project=rc-cli",
"--json",
],
config_dir.path(),
);
if !output.status.success() {
eprintln!(
"Tags not supported: {}",
String::from_utf8_lossy(&output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
let output = run_rc(
&[
"tag",
"ls",
&format!("test/{}/tagged.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to get tags: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("environment"),
"Should have environment tag"
);
assert!(stdout.contains("test"), "Should have test value");
let output = run_rc(
&[
"tag",
"rm",
&format!("test/{}/tagged.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to remove tags: {}",
String::from_utf8_lossy(&output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod quota_operations {
use super::*;
#[test]
fn test_bucket_quota_set_info_clear() {
let (config_dir, bucket_name) = match setup_with_alias("quota") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let bucket_path = format!("test/{}", bucket_name);
let output = run_rc(
&["quota", "set", &bucket_path, "64MiB", "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to set quota: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&["quota", "info", &bucket_path, "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to get quota info: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["bucket"], bucket_name);
assert_eq!(json["quota"], 64 * 1024 * 1024);
assert_eq!(json["quotaType"], "HARD");
let output = run_rc(
&["quota", "clear", &bucket_path, "--json"],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to clear quota: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["bucket"], bucket_name);
assert!(json["quota"].is_null());
assert_eq!(json["quotaType"], "HARD");
cleanup_bucket(config_dir.path(), &bucket_name);
}
}
mod alias_operations {
use super::*;
#[test]
fn test_alias_lifecycle() {
let (endpoint, access_key, secret_key) = match get_test_config() {
Some(c) => c,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let config_dir = tempfile::tempdir().expect("Failed to create temp dir");
let output = run_rc(
&[
"alias",
"set",
"myalias",
&endpoint,
&access_key,
&secret_key,
"--bucket-lookup",
"path",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to set alias: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(&["alias", "list", "--json"], config_dir.path());
assert!(
output.status.success(),
"Failed to list aliases: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("myalias"), "Should contain myalias");
assert!(stdout.contains(&endpoint), "Should contain endpoint");
let output = run_rc(&["alias", "remove", "myalias"], config_dir.path());
assert!(
output.status.success(),
"Failed to remove alias: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(&["alias", "list", "--json"], config_dir.path());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(!stdout.contains("myalias"), "myalias should be removed");
}
}
mod option_behavior_operations {
use super::*;
fn upload_text_object(config_dir: &std::path::Path, bucket: &str, key: &str, content: &str) {
let temp_file = tempfile::NamedTempFile::new().expect("Failed to create temp file");
std::fs::write(temp_file.path(), content).expect("Failed to write");
let output = run_rc(
&[
"cp",
temp_file.path().to_str().unwrap(),
&format!("test/{}/{}", bucket, key),
],
config_dir,
);
assert!(
output.status.success(),
"Failed to upload {}: {}",
key,
String::from_utf8_lossy(&output.stderr)
);
}
#[test]
fn test_cp_dry_run_does_not_create_target_object() {
let (config_dir, bucket_name) = match setup_with_alias("cpdryrun") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(
config_dir.path(),
&bucket_name,
"alpha-source.txt",
"cp dry run source",
);
let output = run_rc(
&[
"cp",
&format!("test/{}/alpha-source.txt", bucket_name),
&format!("test/{}/beta-target.txt", bucket_name),
"--dry-run",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"cp --dry-run failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"ls",
"--recursive",
&format!("test/{}/", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to list objects after cp --dry-run"
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("alpha-source.txt"),
"Source object should still exist"
);
assert!(
!stdout.contains("beta-target.txt"),
"Target object should not be created by --dry-run"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_mv_dry_run_keeps_source_and_skips_target() {
let (config_dir, bucket_name) = match setup_with_alias("mvdryrun") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(
config_dir.path(),
&bucket_name,
"move-source.txt",
"mv dry run source",
);
let output = run_rc(
&[
"mv",
&format!("test/{}/move-source.txt", bucket_name),
&format!("test/{}/move-target.txt", bucket_name),
"--dry-run",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"mv --dry-run failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"ls",
"--recursive",
&format!("test/{}/", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to list objects after mv --dry-run"
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("move-source.txt"),
"Source object should still exist"
);
assert!(
!stdout.contains("move-target.txt"),
"Target object should not be created by --dry-run"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_rm_dry_run_does_not_delete_object() {
let (config_dir, bucket_name) = match setup_with_alias("rmdryrun") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(
config_dir.path(),
&bucket_name,
"keep-me.txt",
"rm dry run source",
);
let output = run_rc(
&[
"rm",
&format!("test/{}/keep-me.txt", bucket_name),
"--dry-run",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"rm --dry-run failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let output = run_rc(
&[
"ls",
"--recursive",
&format!("test/{}/", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to list objects after rm --dry-run"
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("keep-me.txt"),
"Object should still exist after rm --dry-run"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_rm_purge_dry_run_does_not_delete_versioned_object() {
let (config_dir, bucket_name) = match setup_with_alias("rmpurgedryrun") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let enable_output = run_rc(
&[
"version",
"enable",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
if !enable_output.status.success() {
eprintln!(
"Enable versioning not supported: {}",
String::from_utf8_lossy(&enable_output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
upload_text_object(
config_dir.path(),
&bucket_name,
"keep-version.txt",
"rm purge dry run source",
);
let dry_run_output = run_rc(
&[
"rm",
&format!("test/{}/keep-version.txt", bucket_name),
"--purge",
"--dry-run",
"--json",
],
config_dir.path(),
);
assert!(
dry_run_output.status.success(),
"rm --purge --dry-run failed: {}",
String::from_utf8_lossy(&dry_run_output.stderr)
);
let versions_output = run_rc(
&[
"version",
"list",
&format!("test/{}/keep-version.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
versions_output.status.success(),
"Failed to list versions after rm --purge --dry-run: {}",
String::from_utf8_lossy(&versions_output.stderr)
);
let versions_stdout = String::from_utf8_lossy(&versions_output.stdout);
let versions: serde_json::Value =
serde_json::from_str(&versions_stdout).expect("Invalid JSON version list");
let versions = versions
.as_array()
.expect("Version list should be a JSON array");
assert_eq!(
versions.len(),
1,
"Object should keep its only version after rm --purge --dry-run"
);
assert_eq!(
versions[0]["key"].as_str(),
Some("keep-version.txt"),
"Version list should still include the uploaded object"
);
assert_eq!(
versions[0]["is_delete_marker"].as_bool(),
Some(false),
"rm --purge --dry-run must not create a delete marker"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_object_remove_purge_dry_run_does_not_delete_versioned_object() {
let (config_dir, bucket_name) = match setup_with_alias("objectremovepurgedryrun") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let enable_output = run_rc(
&[
"version",
"enable",
&format!("test/{}", bucket_name),
"--json",
],
config_dir.path(),
);
if !enable_output.status.success() {
eprintln!(
"Enable versioning not supported: {}",
String::from_utf8_lossy(&enable_output.stderr)
);
cleanup_bucket(config_dir.path(), &bucket_name);
return;
}
upload_text_object(
config_dir.path(),
&bucket_name,
"keep-object-remove-version.txt",
"object remove purge dry run source",
);
let dry_run_output = run_rc(
&[
"object",
"remove",
&format!("test/{}/keep-object-remove-version.txt", bucket_name),
"--purge",
"--dry-run",
"--json",
],
config_dir.path(),
);
assert!(
dry_run_output.status.success(),
"object remove --purge --dry-run failed: {}",
String::from_utf8_lossy(&dry_run_output.stderr)
);
let versions_output = run_rc(
&[
"version",
"list",
&format!("test/{}/keep-object-remove-version.txt", bucket_name),
"--json",
],
config_dir.path(),
);
assert!(
versions_output.status.success(),
"Failed to list versions after object remove --purge --dry-run: {}",
String::from_utf8_lossy(&versions_output.stderr)
);
let versions_stdout = String::from_utf8_lossy(&versions_output.stdout);
let versions: serde_json::Value =
serde_json::from_str(&versions_stdout).expect("Invalid JSON version list");
let versions = versions
.as_array()
.expect("Version list should be a JSON array");
assert_eq!(
versions.len(),
1,
"Object should keep its only version after object remove --purge --dry-run"
);
assert_eq!(
versions[0]["key"].as_str(),
Some("keep-object-remove-version.txt"),
"Version list should still include the uploaded object"
);
assert_eq!(
versions[0]["is_delete_marker"].as_bool(),
Some(false),
"object remove --purge --dry-run must not create a delete marker"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_head_bytes_returns_prefix_bytes() {
let (config_dir, bucket_name) = match setup_with_alias("headbytes") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(
config_dir.path(),
&bucket_name,
"bytes.txt",
"ABCDEFGHIJ12345",
);
let output = run_rc(
&[
"head",
"--bytes",
"5",
&format!("test/{}/bytes.txt", bucket_name),
],
config_dir.path(),
);
assert!(
output.status.success(),
"head --bytes failed: {}",
String::from_utf8_lossy(&output.stderr)
);
assert_eq!(
output.stdout, b"ABCDE",
"head --bytes should output exact prefix"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_find_count_json_reports_match_count() {
let (config_dir, bucket_name) = match setup_with_alias("findcount") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(config_dir.path(), &bucket_name, "a.txt", "aaaaa");
upload_text_object(config_dir.path(), &bucket_name, "b.txt", "bbbbb");
upload_text_object(config_dir.path(), &bucket_name, "c.log", "ccccc");
let output = run_rc(
&[
"find",
&format!("test/{}/", bucket_name),
"--name",
"*.txt",
"--count",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"find --count failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["count"], 2, "Expected exactly 2 txt matches");
assert!(json["total_size_bytes"].as_i64().unwrap_or(0) > 0);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_share_upload_generates_upload_url_with_expiration() {
let (config_dir, bucket_name) = match setup_with_alias("shareupload") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let object_path = format!("test/{}/upload-target.bin", bucket_name);
let output = run_rc(
&[
"share",
&object_path,
"--upload",
"--content-type",
"application/octet-stream",
"--expire",
"1h",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"share --upload failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["type"], "upload");
assert_eq!(json["path"], object_path);
assert_eq!(json["expires_secs"], 3600);
assert!(
json["url"].as_str().unwrap_or_default().starts_with("http"),
"Expected a valid presigned URL"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_share_rejects_expiration_over_seven_days() {
let config_dir = tempfile::tempdir().expect("Failed to create temp dir");
let output = run_rc(
&[
"share",
"test/demo-bucket/demo.txt",
"--expire",
"8d",
"--json",
],
config_dir.path(),
);
assert!(
!output.status.success(),
"share with expiration > 7 days should fail"
);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
stderr.contains("Expiration cannot exceed 7 days"),
"Unexpected error output: {}",
stderr
);
}
#[test]
fn test_find_print_outputs_full_remote_path() {
let (config_dir, bucket_name) = match setup_with_alias("findprint") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(
config_dir.path(),
&bucket_name,
"dir/sample.txt",
"print me",
);
let full_path = format!("test/{}/dir/sample.txt", bucket_name);
let output = run_rc(
&[
"find",
&format!("test/{}/", bucket_name),
"--name",
"*.txt",
"--print",
"--no-color",
],
config_dir.path(),
);
assert!(
output.status.success(),
"find --print failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains(&full_path),
"Expected full remote path in output when using --print, got: {}",
stdout
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_find_exec_runs_command_with_full_remote_path() {
let (config_dir, bucket_name) = match setup_with_alias("findexec") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(config_dir.path(), &bucket_name, "exec-a.txt", "a");
upload_text_object(config_dir.path(), &bucket_name, "exec-b.txt", "b");
let path_a = format!("test/{}/exec-a.txt", bucket_name);
let path_b = format!("test/{}/exec-b.txt", bucket_name);
let output = run_rc(
&[
"find",
&format!("test/{}/", bucket_name),
"--name",
"*.txt",
"--exec",
"echo EXEC:{}",
"--no-color",
],
config_dir.path(),
);
assert!(
output.status.success(),
"find --exec failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains(&format!("EXEC:{}", path_a)),
"Expected command output for first match, got: {}",
stdout
);
assert!(
stdout.contains(&format!("EXEC:{}", path_b)),
"Expected command output for second match, got: {}",
stdout
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_find_exec_rejects_json_output() {
let (config_dir, bucket_name) = match setup_with_alias("findexecjson") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(config_dir.path(), &bucket_name, "x.txt", "x");
let output = run_rc(
&[
"find",
&format!("test/{}/", bucket_name),
"--name",
"*.txt",
"--exec",
"echo EXEC:{}",
"--json",
],
config_dir.path(),
);
assert!(
!output.status.success(),
"find --exec --json should fail with usage error"
);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
stderr.contains("--exec cannot be used with --json output"),
"Unexpected error output: {}",
stderr
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_find_maxdepth_excludes_deeper_matches() {
let (config_dir, bucket_name) = match setup_with_alias("finddepth") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(config_dir.path(), &bucket_name, "top.txt", "top");
upload_text_object(config_dir.path(), &bucket_name, "one/file.txt", "one");
upload_text_object(config_dir.path(), &bucket_name, "one/two/deep.txt", "deep");
let output = run_rc(
&[
"find",
&format!("test/{}/", bucket_name),
"--name",
"*.txt",
"--maxdepth",
"1",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"find --maxdepth failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("top.txt"), "top.txt should be matched");
assert!(
stdout.contains("one/file.txt"),
"one/file.txt should be matched"
);
assert!(
!stdout.contains("one/two/deep.txt"),
"one/two/deep.txt should be excluded by maxdepth=1"
);
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_mirror_remove_with_parallel_synchronizes_destination() {
let (config_dir, bucket_name) = match setup_with_alias("mirroropt") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let target_bucket = format!("{}-dest", bucket_name);
let output = run_rc(
&["mb", &format!("test/{}", target_bucket)],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to create destination bucket: {}",
String::from_utf8_lossy(&output.stderr)
);
upload_text_object(config_dir.path(), &bucket_name, "src/keep-1.txt", "keep-1");
upload_text_object(
config_dir.path(),
&bucket_name,
"src/nested/keep-2.txt",
"keep-2",
);
upload_text_object(
config_dir.path(),
&target_bucket,
"mirror-stale.txt",
"should be removed",
);
let output = run_rc(
&[
"mirror",
&format!("test/{}/src/", bucket_name),
&format!("test/{}/", target_bucket),
"--remove",
"--overwrite",
"--parallel",
"2",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"mirror --remove --parallel failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["errors"], 0);
assert!(json["copied"].as_u64().unwrap_or(0) >= 2);
assert!(json["removed"].as_u64().unwrap_or(0) >= 1);
let output = run_rc(
&[
"ls",
"--recursive",
&format!("test/{}/", target_bucket),
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to list destination after mirror: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("keep-1.txt"),
"Expected keep-1.txt in destination"
);
assert!(
stdout.contains("nested/keep-2.txt") || stdout.contains("keep-2.txt"),
"Expected keep-2.txt in destination"
);
assert!(
!stdout.contains("mirror-stale.txt"),
"Stale destination file should be removed"
);
cleanup_bucket(config_dir.path(), &bucket_name);
cleanup_bucket(config_dir.path(), &target_bucket);
}
#[test]
fn test_mirror_parallel_zero_returns_usage_error() {
let (config_dir, bucket_name) = match setup_with_alias("mirrorparallelzero") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let target_bucket = format!("{}-dest", bucket_name);
let output = run_rc(
&["mb", &format!("test/{}", target_bucket)],
config_dir.path(),
);
assert!(
output.status.success(),
"Failed to create destination bucket"
);
let output = run_rc(
&[
"mirror",
&format!("test/{}/", bucket_name),
&format!("test/{}/", target_bucket),
"--parallel",
"0",
"--json",
],
config_dir.path(),
);
assert!(
!output.status.success(),
"mirror --parallel 0 should fail with usage error"
);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
stderr.contains("--parallel must be greater than 0"),
"Unexpected error output: {}",
stderr
);
cleanup_bucket(config_dir.path(), &bucket_name);
cleanup_bucket(config_dir.path(), &target_bucket);
}
#[test]
fn test_tree_option_combination_filters_expected_nodes() {
let (config_dir, bucket_name) = match setup_with_alias("treeopts") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
upload_text_object(config_dir.path(), &bucket_name, "dir/a.txt", "A");
upload_text_object(config_dir.path(), &bucket_name, "dir/b.log", "B");
upload_text_object(config_dir.path(), &bucket_name, "dir/deep/c.txt", "C");
let output = run_rc(
&[
"tree",
&format!("test/{}/", bucket_name),
"--pattern",
"*.txt",
"--level",
"2",
"--full-path",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"tree option combination failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value = serde_json::from_str(&stdout).expect("Invalid JSON output");
assert_eq!(json["name"], format!("test/{}/", bucket_name));
assert!(stdout.contains("a.txt"), "Expected a.txt to match pattern");
assert!(
!stdout.contains("b.log"),
"b.log should be filtered by pattern"
);
assert!(
!stdout.contains("deep/c.txt"),
"deep/c.txt should be excluded by level=2"
);
let output = run_rc(
&[
"tree",
&format!("test/{}/", bucket_name),
"--dirs-only",
"--json",
],
config_dir.path(),
);
assert!(
output.status.success(),
"tree --dirs-only failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(!stdout.contains("a.txt"), "dirs-only should exclude files");
assert!(!stdout.contains("b.log"), "dirs-only should exclude files");
cleanup_bucket(config_dir.path(), &bucket_name);
}
#[test]
fn test_diff_diff_only_excludes_same_entries() {
let (config_dir, bucket_name) = match setup_with_alias("diffonly") {
Some(v) => v,
None => {
eprintln!("Skipping: S3 test config not available");
return;
}
};
let second_bucket = format!("{}-second", bucket_name);
let output = run_rc(
&["mb", &format!("test/{}", second_bucket)],
config_dir.path(),
);
assert!(output.status.success(), "Failed to create second bucket");
upload_text_object(config_dir.path(), &bucket_name, "same.txt", "same-content");
upload_text_object(
config_dir.path(),
&second_bucket,
"same.txt",
"same-content",
);
upload_text_object(config_dir.path(), &bucket_name, "only-first.txt", "first");
let base_args = [
"diff",
&format!("test/{}/", bucket_name),
&format!("test/{}/", second_bucket),
];
let output = run_rc(
&[base_args[0], base_args[1], base_args[2], "--json"],
config_dir.path(),
);
assert!(
!output.status.success(),
"diff with differences should return non-zero"
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("same.txt"),
"baseline diff should include same entries"
);
let output = run_rc(
&[
"diff",
&format!("test/{}/", bucket_name),
&format!("test/{}/", second_bucket),
"--diff-only",
"--json",
],
config_dir.path(),
);
assert!(
!output.status.success(),
"diff --diff-only with differences should return non-zero"
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
!stdout.contains("\"status\":\"same\""),
"diff-only output must exclude same entries"
);
assert!(
stdout.contains("only-first.txt"),
"diff-only output should include real differences"
);
cleanup_bucket(config_dir.path(), &bucket_name);
cleanup_bucket(config_dir.path(), &second_bucket);
}
}