#![allow(clippy::unwrap_used)]
use async_trait::async_trait;
use flate2::Compression;
use flate2::read::GzDecoder;
use flate2::write::GzEncoder;
use std::io::{Read, Write};
use std::path::Path;
use super::{Builtin, Context, resolve_path};
use crate::error::Result;
use crate::interpreter::ExecResult;
const MAX_DECOMPRESSION_RATIO: usize = 100;
fn read_with_limit<R: Read>(
mut reader: R,
compressed_size: usize,
max_size: u64,
) -> std::io::Result<Vec<u8>> {
let mut output = Vec::new();
let mut buffer = [0u8; 8192];
loop {
let n = reader.read(&mut buffer)?;
if n == 0 {
break;
}
output.extend_from_slice(&buffer[..n]);
if output.len() as u64 > max_size {
return Err(std::io::Error::other(format!(
"decompressed size exceeds {} byte limit (zip bomb protection)",
max_size
)));
}
if compressed_size > 0 && output.len() > compressed_size * MAX_DECOMPRESSION_RATIO {
return Err(std::io::Error::other(format!(
"decompression ratio exceeds {}:1 (zip bomb protection)",
MAX_DECOMPRESSION_RATIO
)));
}
}
Ok(output)
}
pub struct Tar;
#[async_trait]
impl Builtin for Tar {
async fn execute(&self, ctx: Context<'_>) -> Result<ExecResult> {
let mut create = false;
let mut extract = false;
let mut list = false;
let mut verbose = false;
let mut gzip = false;
let mut to_stdout = false;
let mut archive_file: Option<String> = None;
let mut change_dir: Option<String> = None;
let mut files: Vec<String> = Vec::new();
let mut p = super::arg_parser::ArgParser::new(ctx.args);
while !p.is_done() {
if let Some(val) = p.flag_value_opt("-f") {
archive_file = Some(val.to_string());
} else if let Some(val) = p.flag_value_opt("-C") {
change_dir = Some(val.to_string());
} else if p.is_flag() {
let arg = p.current().unwrap();
let chars: Vec<char> = arg[1..].chars().collect();
p.advance();
for c in &chars {
match c {
'c' => create = true,
'x' => extract = true,
't' => list = true,
'v' => verbose = true,
'z' => gzip = true,
'O' => to_stdout = true,
'f' => match p.positional() {
Some(val) => archive_file = Some(val.to_string()),
None => {
return Ok(ExecResult::err(
"tar: option requires an argument -- 'f'\n".to_string(),
2,
));
}
},
'C' => match p.positional() {
Some(val) => change_dir = Some(val.to_string()),
None => {
return Ok(ExecResult::err(
"tar: option requires an argument -- 'C'\n".to_string(),
2,
));
}
},
_ => {
return Ok(ExecResult::err(
format!("tar: invalid option -- '{}'\n", c),
2,
));
}
}
}
} else if let Some(arg) = p.positional() {
files.push(arg.to_string());
}
}
let mode_count = [create, extract, list].iter().filter(|&&x| x).count();
if mode_count == 0 {
return Ok(ExecResult::err(
"tar: You must specify one of -c, -x, or -t\n".to_string(),
2,
));
}
if mode_count > 1 {
return Ok(ExecResult::err(
"tar: You may not specify more than one of -c, -x, -t\n".to_string(),
2,
));
}
let archive_name = archive_file.unwrap_or_else(|| "-".to_string());
if create {
if files.is_empty() {
return Ok(ExecResult::err(
"tar: Cowardly refusing to create an empty archive\n".to_string(),
2,
));
}
create_tar(&ctx, &archive_name, &files, verbose, gzip).await
} else if extract {
extract_tar(
&ctx,
&archive_name,
verbose,
gzip,
change_dir.as_deref(),
to_stdout,
)
.await
} else {
list_tar(&ctx, &archive_name, verbose, gzip).await
}
}
}
const TAR_BLOCK_SIZE: usize = 512;
async fn create_tar(
ctx: &Context<'_>,
archive_name: &str,
files: &[String],
verbose: bool,
gzip: bool,
) -> Result<ExecResult> {
let mut output_data: Vec<u8> = Vec::new();
let mut verbose_output = String::new();
for file in files {
let path = resolve_path(ctx.cwd, file);
if !ctx.fs.exists(&path).await.unwrap_or(false) {
return Ok(ExecResult::err(
format!("tar: {}: Cannot stat: No such file or directory\n", file),
2,
));
}
let metadata = ctx.fs.stat(&path).await?;
if metadata.file_type.is_dir() {
add_directory_to_tar(
ctx,
&path,
file,
&mut output_data,
&mut verbose_output,
verbose,
)
.await?;
} else {
add_file_to_tar(
ctx,
&path,
file,
&mut output_data,
&mut verbose_output,
verbose,
)
.await?;
}
}
output_data.extend_from_slice(&[0u8; TAR_BLOCK_SIZE * 2]);
let final_data = if gzip {
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(&output_data).map_err(|e| {
crate::error::Error::Execution(format!("tar: gzip compression failed: {}", e))
})?;
encoder.finish().map_err(|e| {
crate::error::Error::Execution(format!("tar: gzip compression failed: {}", e))
})?
} else {
output_data
};
if archive_name == "-" {
return Ok(ExecResult {
stdout: String::from_utf8_lossy(&final_data).to_string(),
stderr: verbose_output,
exit_code: 0,
control_flow: crate::interpreter::ControlFlow::None,
..Default::default()
});
}
let archive_path = resolve_path(ctx.cwd, archive_name);
ctx.fs.write_file(&archive_path, &final_data).await?;
Ok(ExecResult {
stdout: String::new(),
stderr: verbose_output,
exit_code: 0,
control_flow: crate::interpreter::ControlFlow::None,
..Default::default()
})
}
async fn add_file_to_tar(
ctx: &Context<'_>,
path: &Path,
name: &str,
output: &mut Vec<u8>,
verbose_output: &mut String,
verbose: bool,
) -> Result<()> {
let metadata = ctx.fs.stat(path).await?;
let content = ctx.fs.read_file(path).await?;
if verbose {
verbose_output.push_str(name);
verbose_output.push('\n');
}
let mut header = [0u8; TAR_BLOCK_SIZE];
let name_bytes = name.as_bytes();
let name_len = name_bytes.len().min(100);
header[..name_len].copy_from_slice(&name_bytes[..name_len]);
write_octal(&mut header[100..108], metadata.mode as u64, 7);
write_octal(&mut header[108..116], 1000, 7);
write_octal(&mut header[116..124], 1000, 7);
write_octal(&mut header[124..136], content.len() as u64, 11);
let mtime = metadata
.modified
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
write_octal(&mut header[136..148], mtime, 11);
header[148..156].copy_from_slice(b" ");
header[156] = b'0';
header[257..263].copy_from_slice(b"ustar ");
header[263..265].copy_from_slice(b" \0");
let checksum: u32 = header.iter().map(|&b| b as u32).sum();
write_octal(&mut header[148..156], checksum as u64, 7);
output.extend_from_slice(&header);
output.extend_from_slice(&content);
let padding = (TAR_BLOCK_SIZE - (content.len() % TAR_BLOCK_SIZE)) % TAR_BLOCK_SIZE;
output.extend(std::iter::repeat_n(0u8, padding));
Ok(())
}
fn add_directory_to_tar<'a>(
ctx: &'a Context<'_>,
path: &'a Path,
name: &'a str,
output: &'a mut Vec<u8>,
verbose_output: &'a mut String,
verbose: bool,
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + Send + 'a>> {
Box::pin(async move {
if verbose {
verbose_output.push_str(name);
verbose_output.push_str("/\n");
}
let metadata = ctx.fs.stat(path).await?;
let mut header = [0u8; TAR_BLOCK_SIZE];
let dir_name = format!("{}/", name);
let name_bytes = dir_name.as_bytes();
let name_len = name_bytes.len().min(100);
header[..name_len].copy_from_slice(&name_bytes[..name_len]);
write_octal(&mut header[100..108], metadata.mode as u64, 7);
write_octal(&mut header[108..116], 1000, 7);
write_octal(&mut header[116..124], 1000, 7);
write_octal(&mut header[124..136], 0, 11);
let mtime = metadata
.modified
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
write_octal(&mut header[136..148], mtime, 11);
header[148..156].copy_from_slice(b" ");
header[156] = b'5';
header[257..263].copy_from_slice(b"ustar ");
header[263..265].copy_from_slice(b" \0");
let checksum: u32 = header.iter().map(|&b| b as u32).sum();
write_octal(&mut header[148..156], checksum as u64, 7);
output.extend_from_slice(&header);
let entries = ctx.fs.read_dir(path).await?;
for entry in entries {
let child_path = path.join(&entry.name);
let child_name = format!("{}/{}", name, entry.name);
if entry.metadata.file_type.is_dir() {
add_directory_to_tar(
ctx,
&child_path,
&child_name,
output,
verbose_output,
verbose,
)
.await?;
} else {
add_file_to_tar(
ctx,
&child_path,
&child_name,
output,
verbose_output,
verbose,
)
.await?;
}
}
Ok(())
})
}
fn write_octal(buf: &mut [u8], value: u64, width: usize) {
let s = format!("{:0>width$o}", value, width = width);
let bytes = s.as_bytes();
let len = bytes.len().min(buf.len() - 1);
buf[..len].copy_from_slice(&bytes[bytes.len() - len..]);
buf[len] = 0;
}
async fn extract_tar(
ctx: &Context<'_>,
archive_name: &str,
verbose: bool,
gzip: bool,
change_dir: Option<&str>,
to_stdout: bool,
) -> Result<ExecResult> {
let extract_base = if let Some(dir) = change_dir {
resolve_path(ctx.cwd, dir)
} else {
ctx.cwd.clone()
};
let data = if archive_name == "-" {
ctx.stdin.map(|s| s.as_bytes().to_vec()).unwrap_or_default()
} else {
let archive_path = resolve_path(ctx.cwd, archive_name);
if !ctx.fs.exists(&archive_path).await.unwrap_or(false) {
return Ok(ExecResult::err(
format!(
"tar: {}: Cannot open: No such file or directory\n",
archive_name
),
2,
));
}
ctx.fs.read_file(&archive_path).await?
};
let limits = ctx.fs.limits();
let max_size = limits.max_total_bytes;
let tar_data = if gzip {
let compressed_size = data.len();
let decoder = GzDecoder::new(data.as_slice());
read_with_limit(decoder, compressed_size, max_size)
.map_err(|e| crate::error::Error::Execution(format!("tar: {}", e)))?
} else {
data
};
let mut verbose_output = String::new();
let mut stdout_output = String::new();
let mut offset = 0;
if change_dir.is_some() {
ctx.fs.mkdir(&extract_base, true).await?;
}
while offset + TAR_BLOCK_SIZE <= tar_data.len() {
let header = &tar_data[offset..offset + TAR_BLOCK_SIZE];
if header.iter().all(|&b| b == 0) {
break;
}
let name_end = header[..100].iter().position(|&b| b == 0).unwrap_or(100);
let name = String::from_utf8_lossy(&header[..name_end]).to_string();
if name.is_empty() {
break;
}
let size = parse_octal(&header[124..136]);
let type_flag = header[156];
if verbose {
verbose_output.push_str(&name);
verbose_output.push('\n');
}
offset += TAR_BLOCK_SIZE;
if !to_stdout
&& (name.contains("..")
|| name.starts_with('/')
|| !resolve_path(&extract_base, &name).starts_with(&extract_base))
{
return Ok(ExecResult::err(
format!("tar: {}: path traversal blocked\n", name),
2,
));
}
match type_flag {
b'5' | b'\0' if name.ends_with('/') => {
if !to_stdout {
let dir_path = resolve_path(&extract_base, &name);
ctx.fs.mkdir(&dir_path, true).await?;
}
}
b'0' | b'\0' => {
let content_blocks = size.div_ceil(TAR_BLOCK_SIZE);
let content_end = offset + content_blocks * TAR_BLOCK_SIZE;
if content_end > tar_data.len() {
return Ok(ExecResult::err(
format!("tar: {}: Unexpected end of archive\n", name),
2,
));
}
let content = &tar_data[offset..offset + size];
if to_stdout {
stdout_output.push_str(&String::from_utf8_lossy(content));
} else {
let file_path = resolve_path(&extract_base, &name);
if let Some(parent) = file_path.parent() {
ctx.fs.mkdir(parent, true).await?;
}
ctx.fs.write_file(&file_path, content).await?;
}
offset = content_end;
}
_ => {
let content_blocks = size.div_ceil(TAR_BLOCK_SIZE);
offset += content_blocks * TAR_BLOCK_SIZE;
}
}
}
Ok(ExecResult {
stdout: stdout_output,
stderr: verbose_output,
exit_code: 0,
control_flow: crate::interpreter::ControlFlow::None,
..Default::default()
})
}
async fn list_tar(
ctx: &Context<'_>,
archive_name: &str,
verbose: bool,
gzip: bool,
) -> Result<ExecResult> {
let data = if archive_name == "-" {
ctx.stdin.map(|s| s.as_bytes().to_vec()).unwrap_or_default()
} else {
let archive_path = resolve_path(ctx.cwd, archive_name);
if !ctx.fs.exists(&archive_path).await.unwrap_or(false) {
return Ok(ExecResult::err(
format!(
"tar: {}: Cannot open: No such file or directory\n",
archive_name
),
2,
));
}
ctx.fs.read_file(&archive_path).await?
};
let limits = ctx.fs.limits();
let max_size = limits.max_total_bytes;
let tar_data = if gzip {
let compressed_size = data.len();
let decoder = GzDecoder::new(data.as_slice());
read_with_limit(decoder, compressed_size, max_size)
.map_err(|e| crate::error::Error::Execution(format!("tar: {}", e)))?
} else {
data
};
let mut output = String::new();
let mut offset = 0;
while offset + TAR_BLOCK_SIZE <= tar_data.len() {
let header = &tar_data[offset..offset + TAR_BLOCK_SIZE];
if header.iter().all(|&b| b == 0) {
break;
}
let name_end = header[..100].iter().position(|&b| b == 0).unwrap_or(100);
let name = String::from_utf8_lossy(&header[..name_end]).to_string();
if name.is_empty() {
break;
}
let size = parse_octal(&header[124..136]);
if verbose {
let mode = parse_octal(&header[100..108]) as u32;
let size_val = parse_octal(&header[124..136]);
let type_flag = header[156];
let type_char = match type_flag {
b'5' => 'd',
b'2' => 'l',
_ => '-',
};
output.push_str(&format!(
"{}{}{}{}{}{}{}{}{}{} {:>8} {}\n",
type_char,
if mode & 0o400 != 0 { 'r' } else { '-' },
if mode & 0o200 != 0 { 'w' } else { '-' },
if mode & 0o100 != 0 { 'x' } else { '-' },
if mode & 0o040 != 0 { 'r' } else { '-' },
if mode & 0o020 != 0 { 'w' } else { '-' },
if mode & 0o010 != 0 { 'x' } else { '-' },
if mode & 0o004 != 0 { 'r' } else { '-' },
if mode & 0o002 != 0 { 'w' } else { '-' },
if mode & 0o001 != 0 { 'x' } else { '-' },
size_val,
name
));
} else {
output.push_str(&name);
output.push('\n');
}
offset += TAR_BLOCK_SIZE;
let content_blocks = size.div_ceil(TAR_BLOCK_SIZE);
offset += content_blocks * TAR_BLOCK_SIZE;
}
Ok(ExecResult::ok(output))
}
fn parse_octal(buf: &[u8]) -> usize {
let s: String = buf
.iter()
.take_while(|&&b| b != 0 && b != b' ')
.map(|&b| b as char)
.collect();
usize::from_str_radix(s.trim(), 8).unwrap_or(0)
}
pub struct Gzip;
#[async_trait]
impl Builtin for Gzip {
async fn execute(&self, ctx: Context<'_>) -> Result<ExecResult> {
let mut decompress = false;
let mut keep = false;
let mut force = false;
let mut files: Vec<String> = Vec::new();
for arg in ctx.args {
if arg.starts_with('-') && arg.len() > 1 {
for c in arg[1..].chars() {
match c {
'd' => decompress = true,
'k' => keep = true,
'f' => force = true,
_ => {
return Ok(ExecResult::err(
format!("gzip: invalid option -- '{}'\n", c),
1,
));
}
}
}
} else {
files.push(arg.clone());
}
}
let limits = ctx.fs.limits();
let max_size = limits.max_file_size;
if files.is_empty() {
if let Some(stdin) = ctx.stdin {
if decompress {
let compressed_size = stdin.len();
let decoder = GzDecoder::new(stdin.as_bytes());
match read_with_limit(decoder, compressed_size, max_size) {
Ok(output) => {
return Ok(ExecResult::ok(
String::from_utf8_lossy(&output).to_string(),
));
}
Err(e) => return Ok(ExecResult::err(format!("gzip: stdin: {}\n", e), 1)),
}
} else {
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(stdin.as_bytes()).map_err(|e| {
crate::error::Error::Execution(format!("gzip: compression failed: {}", e))
})?;
let compressed = encoder.finish().map_err(|e| {
crate::error::Error::Execution(format!("gzip: compression failed: {}", e))
})?;
return Ok(ExecResult::ok(
String::from_utf8_lossy(&compressed).to_string(),
));
}
}
return Ok(ExecResult::ok(String::new()));
}
for file in &files {
let path = resolve_path(ctx.cwd, file);
if !ctx.fs.exists(&path).await.unwrap_or(false) {
return Ok(ExecResult::err(
format!("gzip: {}: No such file or directory\n", file),
1,
));
}
let metadata = ctx.fs.stat(&path).await?;
if metadata.file_type.is_dir() {
return Ok(ExecResult::err(
format!("gzip: {}: Is a directory\n", file),
1,
));
}
if decompress {
if !file.ends_with(".gz") {
return Ok(ExecResult::err(
format!("gzip: {}: unknown suffix -- ignored\n", file),
1,
));
}
let output_name = file.strip_suffix(".gz").unwrap();
let output_path = resolve_path(ctx.cwd, output_name);
if ctx.fs.exists(&output_path).await.unwrap_or(false) && !force {
return Ok(ExecResult::err(
format!("gzip: {}: already exists\n", output_name),
1,
));
}
let data = ctx.fs.read_file(&path).await?;
let compressed_size = data.len();
let decoder = GzDecoder::new(data.as_slice());
let output = read_with_limit(decoder, compressed_size, max_size).map_err(|e| {
crate::error::Error::Execution(format!("gzip: {}: {}", file, e))
})?;
ctx.fs.write_file(&output_path, &output).await?;
if !keep {
ctx.fs.remove(&path, false).await?;
}
} else {
if file.ends_with(".gz") {
return Ok(ExecResult::err(
format!("gzip: {}: already has .gz suffix\n", file),
1,
));
}
let output_name = format!("{}.gz", file);
let output_path = resolve_path(ctx.cwd, &output_name);
if ctx.fs.exists(&output_path).await.unwrap_or(false) && !force {
return Ok(ExecResult::err(
format!("gzip: {}: already exists\n", output_name),
1,
));
}
let data = ctx.fs.read_file(&path).await?;
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(&data).map_err(|e| {
crate::error::Error::Execution(format!("gzip: {}: {}", file, e))
})?;
let compressed = encoder.finish().map_err(|e| {
crate::error::Error::Execution(format!("gzip: {}: {}", file, e))
})?;
ctx.fs.write_file(&output_path, &compressed).await?;
if !keep {
ctx.fs.remove(&path, false).await?;
}
}
}
Ok(ExecResult::ok(String::new()))
}
}
pub struct Gunzip;
#[async_trait]
impl Builtin for Gunzip {
async fn execute(&self, ctx: Context<'_>) -> Result<ExecResult> {
let mut modified_args: Vec<String> = vec!["-d".to_string()];
modified_args.extend(ctx.args.iter().cloned());
let new_ctx = Context {
args: &modified_args,
env: ctx.env,
variables: ctx.variables,
cwd: ctx.cwd,
fs: ctx.fs,
stdin: ctx.stdin,
#[cfg(feature = "http_client")]
http_client: ctx.http_client,
#[cfg(feature = "git")]
git_client: ctx.git_client,
#[cfg(feature = "ssh")]
ssh_client: ctx.ssh_client,
shell: ctx.shell,
};
Gzip.execute(new_ctx).await
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use crate::fs::{FileSystem, InMemoryFs};
async fn create_test_ctx() -> (Arc<InMemoryFs>, PathBuf, HashMap<String, String>) {
let fs = Arc::new(InMemoryFs::new());
let cwd = PathBuf::from("/home/user");
let variables = HashMap::new();
fs.mkdir(&cwd, true).await.unwrap();
(fs, cwd, variables)
}
#[tokio::test]
async fn test_tar_create_and_list() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("test.txt"), b"Hello, world!")
.await
.unwrap();
let args = vec![
"-cf".to_string(),
"archive.tar".to_string(),
"test.txt".to_string(),
];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(fs.exists(&cwd.join("archive.tar")).await.unwrap());
let args = vec!["-tf".to_string(), "archive.tar".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(result.stdout.contains("test.txt"));
}
#[tokio::test]
async fn test_tar_create_and_extract() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("original.txt"), b"Test content")
.await
.unwrap();
let args = vec![
"-cf".to_string(),
"archive.tar".to_string(),
"original.txt".to_string(),
];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
fs.remove(&cwd.join("original.txt"), false).await.unwrap();
assert!(!fs.exists(&cwd.join("original.txt")).await.unwrap());
let args = vec!["-xf".to_string(), "archive.tar".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(fs.exists(&cwd.join("original.txt")).await.unwrap());
let content = fs.read_file(&cwd.join("original.txt")).await.unwrap();
assert_eq!(content, b"Test content");
}
#[tokio::test]
async fn test_tar_verbose() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("test.txt"), b"content")
.await
.unwrap();
let args = vec![
"-cvf".to_string(),
"archive.tar".to_string(),
"test.txt".to_string(),
];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(result.stderr.contains("test.txt"));
}
#[tokio::test]
async fn test_tar_missing_mode() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
let args = vec!["-f".to_string(), "archive.tar".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 2);
assert!(result.stderr.contains("must specify"));
}
#[tokio::test]
async fn test_tar_nonexistent_file() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
let args = vec![
"-cf".to_string(),
"archive.tar".to_string(),
"nonexistent".to_string(),
];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 2);
assert!(result.stderr.contains("No such file"));
}
#[tokio::test]
async fn test_tar_directory() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.mkdir(&cwd.join("testdir"), false).await.unwrap();
fs.write_file(&cwd.join("testdir/file.txt"), b"content")
.await
.unwrap();
let args = vec![
"-cf".to_string(),
"archive.tar".to_string(),
"testdir".to_string(),
];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
let args = vec!["-tf".to_string(), "archive.tar".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(result.stdout.contains("testdir/"));
assert!(result.stdout.contains("testdir/file.txt"));
}
#[tokio::test]
async fn test_gzip_compress() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("test.txt"), b"Hello, world!")
.await
.unwrap();
let args = vec!["test.txt".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Gzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(fs.exists(&cwd.join("test.txt.gz")).await.unwrap());
assert!(!fs.exists(&cwd.join("test.txt")).await.unwrap());
}
#[tokio::test]
async fn test_gzip_decompress() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("test.txt"), b"Hello, world!")
.await
.unwrap();
let args = vec!["test.txt".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
Gzip.execute(ctx).await.unwrap();
let args = vec!["-d".to_string(), "test.txt.gz".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Gzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(fs.exists(&cwd.join("test.txt")).await.unwrap());
let content = fs.read_file(&cwd.join("test.txt")).await.unwrap();
assert_eq!(content, b"Hello, world!");
}
#[tokio::test]
async fn test_gzip_keep() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("test.txt"), b"content")
.await
.unwrap();
let args = vec!["-k".to_string(), "test.txt".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Gzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(fs.exists(&cwd.join("test.txt")).await.unwrap());
assert!(fs.exists(&cwd.join("test.txt.gz")).await.unwrap());
}
#[tokio::test]
async fn test_gzip_nonexistent() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
let args = vec!["nonexistent".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Gzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 1);
assert!(result.stderr.contains("No such file"));
}
#[tokio::test]
async fn test_gzip_already_gz() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("test.gz"), b"content")
.await
.unwrap();
let args = vec!["test.gz".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Gzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 1);
assert!(result.stderr.contains("already has .gz"));
}
#[tokio::test]
async fn test_gzip_directory() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.mkdir(&cwd.join("testdir"), false).await.unwrap();
let args = vec!["testdir".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Gzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 1);
assert!(result.stderr.contains("Is a directory"));
}
#[tokio::test]
async fn test_gunzip_basic() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
fs.write_file(&cwd.join("test.txt"), b"content")
.await
.unwrap();
let args = vec!["test.txt".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
Gzip.execute(ctx).await.unwrap();
let args = vec!["test.txt.gz".to_string()];
let ctx = Context {
args: &args,
env: &env,
variables: &mut variables,
cwd: &mut cwd,
fs: fs.clone(),
stdin: None,
#[cfg(feature = "http_client")]
http_client: None,
#[cfg(feature = "git")]
git_client: None,
#[cfg(feature = "ssh")]
ssh_client: None,
shell: None,
};
let result = Gunzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(fs.exists(&cwd.join("test.txt")).await.unwrap());
}
#[test]
fn test_read_with_limit_normal() {
let data = b"hello world";
let result = read_with_limit(data.as_slice(), data.len(), 1000);
assert!(result.is_ok());
assert_eq!(result.unwrap(), data);
}
#[test]
fn test_read_with_limit_exceeds_max() {
let data = vec![0u8; 1000];
let result = read_with_limit(data.as_slice(), data.len(), 500);
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("exceeds") || err.contains("limit"));
}
#[test]
fn test_read_with_limit_high_ratio() {
let data = vec![0u8; 10100]; let result = read_with_limit(data.as_slice(), 100, u64::MAX);
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("ratio") || err.contains("bomb"));
}
#[tokio::test]
async fn test_gzip_respects_file_size_limit() {
use crate::fs::FsLimits;
let limits = FsLimits::new().max_file_size(100);
let fs = Arc::new(crate::fs::InMemoryFs::with_limits(limits));
let mut cwd = PathBuf::from("/tmp");
let mut variables = HashMap::new();
let env = HashMap::new();
fs.write_file(&cwd.join("small.txt"), b"small content")
.await
.unwrap();
let args = vec!["small.txt".to_string()];
let ctx = Context::new_for_test(&args, &env, &mut variables, &mut cwd, fs.clone(), None);
let result = Gzip.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
}
#[tokio::test]
async fn test_fs_limits_enforced_on_extract() {
use crate::fs::FsLimits;
let limits = FsLimits::new().max_total_bytes(1000).max_file_size(500);
let fs = Arc::new(crate::fs::InMemoryFs::with_limits(limits));
let cwd = PathBuf::from("/tmp");
let large_content = vec![b'x'; 600];
fs.write_file(&cwd.join("large.txt"), &large_content)
.await
.expect_err("Should fail due to file size limit");
}
fn build_tar_with_entry(name: &str, content: &[u8]) -> Vec<u8> {
let mut output = Vec::new();
let mut header = [0u8; 512];
let name_bytes = name.as_bytes();
let name_len = name_bytes.len().min(100);
header[..name_len].copy_from_slice(&name_bytes[..name_len]);
let mode = b"0000644\0";
header[100..108].copy_from_slice(mode);
header[108..116].copy_from_slice(b"0001000\0");
header[116..124].copy_from_slice(b"0001000\0");
let size_str = format!("{:011o}\0", content.len());
header[124..136].copy_from_slice(size_str.as_bytes());
header[136..148].copy_from_slice(b"00000000000\0");
header[148..156].copy_from_slice(b" ");
header[156] = b'0';
header[257..263].copy_from_slice(b"ustar ");
header[263..265].copy_from_slice(b" \0");
let checksum: u32 = header.iter().map(|&b| b as u32).sum();
let cksum_str = format!("{:06o}\0 ", checksum);
header[148..156].copy_from_slice(cksum_str.as_bytes());
output.extend_from_slice(&header);
output.extend_from_slice(content);
let padding = (512 - (content.len() % 512)) % 512;
output.extend(std::iter::repeat_n(0u8, padding));
output.extend_from_slice(&[0u8; 1024]);
output
}
#[tokio::test]
async fn test_tar_extract_path_traversal_dotdot_blocked() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
let malicious_tar = build_tar_with_entry("../../../etc/passwd", b"root:x:0:0");
fs.write_file(&cwd.join("evil.tar"), &malicious_tar)
.await
.unwrap();
let args = vec!["-xf".to_string(), "evil.tar".to_string()];
let ctx = Context::new_for_test(&args, &env, &mut variables, &mut cwd, fs.clone(), None);
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 2);
assert!(result.stderr.contains("path traversal blocked"));
assert!(!fs.exists(&PathBuf::from("/etc/passwd")).await.unwrap());
}
#[tokio::test]
async fn test_tar_extract_path_traversal_absolute_blocked() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
let malicious_tar = build_tar_with_entry("/etc/shadow", b"root:!:19000");
fs.write_file(&cwd.join("evil.tar"), &malicious_tar)
.await
.unwrap();
let args = vec!["-xf".to_string(), "evil.tar".to_string()];
let ctx = Context::new_for_test(&args, &env, &mut variables, &mut cwd, fs.clone(), None);
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 2);
assert!(result.stderr.contains("path traversal blocked"));
assert!(!fs.exists(&PathBuf::from("/etc/shadow")).await.unwrap());
}
#[tokio::test]
async fn test_tar_extract_path_traversal_dir_dotdot_blocked() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
let mut output = Vec::new();
let mut header = [0u8; 512];
let name = b"../../etc/";
header[..name.len()].copy_from_slice(name);
header[100..108].copy_from_slice(b"0000755\0");
header[108..116].copy_from_slice(b"0001000\0");
header[116..124].copy_from_slice(b"0001000\0");
header[124..136].copy_from_slice(b"00000000000\0");
header[136..148].copy_from_slice(b"00000000000\0");
header[148..156].copy_from_slice(b" ");
header[156] = b'5'; header[257..263].copy_from_slice(b"ustar ");
header[263..265].copy_from_slice(b" \0");
let checksum: u32 = header.iter().map(|&b| b as u32).sum();
let cksum_str = format!("{:06o}\0 ", checksum);
header[148..156].copy_from_slice(cksum_str.as_bytes());
output.extend_from_slice(&header);
output.extend_from_slice(&[0u8; 1024]);
fs.write_file(&cwd.join("evil_dir.tar"), &output)
.await
.unwrap();
let args = vec!["-xf".to_string(), "evil_dir.tar".to_string()];
let ctx = Context::new_for_test(&args, &env, &mut variables, &mut cwd, fs.clone(), None);
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 2);
assert!(result.stderr.contains("path traversal blocked"));
}
#[tokio::test]
async fn test_tar_extract_safe_paths_still_work() {
let (fs, mut cwd, mut variables) = create_test_ctx().await;
let env = HashMap::new();
let safe_tar = build_tar_with_entry("subdir/file.txt", b"safe content");
fs.write_file(&cwd.join("safe.tar"), &safe_tar)
.await
.unwrap();
let args = vec!["-xf".to_string(), "safe.tar".to_string()];
let ctx = Context::new_for_test(&args, &env, &mut variables, &mut cwd, fs.clone(), None);
let result = Tar.execute(ctx).await.unwrap();
assert_eq!(result.exit_code, 0);
let content = fs.read_file(&cwd.join("subdir/file.txt")).await.unwrap();
assert_eq!(content, b"safe content");
}
}