use std::path::Path;
use std::sync::Arc;
use serde::Serialize;
use void_core::cid::ToVoidCid;
use void_core::pipeline::unseal::{unseal_commit, UnsealOptions};
use void_core::transport::IpfsBackend;
use void_core::{
cid,
crypto::{CommitReader, EncryptedCommit},
store::{FsStore, ObjectStoreExt},
};
use crate::context::{open_repo, resolve_ref, void_err_to_cli};
use crate::observer::ProgressObserver;
use crate::output::{run_command, CliError, CliOptions};
#[derive(Debug, Clone, Serialize)]
pub struct FileEntry {
pub path: String,
pub size: u64,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UnsealStats {
pub shards_total: usize,
pub shards_read: usize,
pub shards_fetched: usize,
pub bytes_written: usize,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UnsealOutput {
pub commit: String,
pub message: String,
pub files: usize,
pub stats: UnsealStats,
}
#[derive(Debug, Clone, Serialize)]
pub struct UnsealListOutput {
pub commit: String,
pub files: Vec<FileEntry>,
}
pub struct UnsealArgs {
pub output: Option<String>,
pub commit: Option<String>,
pub list: bool,
pub offline: bool,
pub no_verify: bool,
pub backend: Option<String>,
pub kubo: Option<String>,
pub gateway: Option<String>,
pub timeout: Option<u64>,
}
fn collect_all_files(
store: &FsStore,
commit: &void_core::metadata::Commit,
reader: &CommitReader,
) -> Result<Vec<FileEntry>, CliError> {
let manifest = void_core::metadata::manifest_tree::TreeManifest::from_commit(store, commit, reader)
.map_err(void_err_to_cli)?
.ok_or_else(|| CliError::internal("commit has no manifest_cid"))?;
let mut entries: Vec<FileEntry> = manifest
.iter()
.map(|me| {
let me = me.map_err(void_err_to_cli)?;
Ok(FileEntry {
path: me.path.clone(),
size: me.length,
})
})
.collect::<Result<_, CliError>>()?;
entries.sort_by(|a, b| a.path.cmp(&b.path));
Ok(entries)
}
fn run_list_mode(
cwd: &Path,
args: &UnsealArgs,
ctx: &mut crate::output::CommandContext,
) -> Result<UnsealListOutput, CliError> {
ctx.progress("Loading commit...");
let repo = open_repo(cwd)?;
let void_dir = repo.void_dir();
let vault = repo.vault().clone();
let commit_ref = args.commit.as_deref().unwrap_or("HEAD");
let commit_cid_typed = resolve_ref(void_dir.as_std_path(), commit_ref)?;
let commit_cid =
cid::from_bytes(commit_cid_typed.as_bytes()).map_err(|e| CliError::internal(e.to_string()))?;
let commit_cid_str = commit_cid.to_string();
ctx.verbose(format!(
"Commit: {}",
&commit_cid_str[..12.min(commit_cid_str.len())]
));
let objects_dir = void_dir.join("objects");
let store = FsStore::new(&objects_dir)
.map_err(|e| CliError::internal(format!("failed to open object store: {e}")))?;
let commit_encrypted: EncryptedCommit = store
.get_blob(&commit_cid)
.map_err(|e| CliError::not_found(format!("commit not found: {e}")))?;
let (commit_bytes, reader) = CommitReader::open_with_vault(&vault, &commit_encrypted)
.map_err(|e| CliError::internal(format!("commit decryption failed: {e}")))?;
let commit = commit_bytes.parse()
.map_err(|e| CliError::internal(format!("failed to parse commit: {e}")))?;
ctx.progress("Collecting file entries...");
let files = collect_all_files(&store, &commit, &reader)?;
ctx.progress(format!("Found {} files", files.len()));
if !ctx.use_json() {
ctx.info(format!("commit {}", commit_cid_str));
ctx.info("");
for file in &files {
ctx.info(format!("{:>10} {}", file.size, file.path));
}
ctx.info("");
ctx.info(format!("{} file(s)", files.len()));
}
Ok(UnsealListOutput {
commit: commit_cid_str,
files,
})
}
fn run_extract_mode(
cwd: &Path,
args: &UnsealArgs,
ctx: &mut crate::output::CommandContext,
) -> Result<UnsealOutput, CliError> {
ctx.progress("Preparing unseal...");
let repo = open_repo(cwd)?;
let void_dir = repo.void_dir().to_owned();
let output_dir = match &args.output {
Some(dir) => {
let path = Path::new(dir);
if path.is_absolute() {
path.to_path_buf()
} else {
cwd.join(path)
}
}
None => cwd.to_path_buf(),
};
let commit_cid = match &args.commit {
Some(ref_str) => {
Some(resolve_ref(void_dir.as_std_path(), ref_str)?)
}
None => None,
};
ctx.verbose(format!("Output directory: {}", output_dir.display()));
if let Some(ref cid) = commit_cid {
let cid_str = cid.to_cid_string();
ctx.verbose(format!("Commit: {}", &cid_str[..12.min(cid_str.len())]));
} else {
ctx.verbose("Commit: HEAD");
}
let observer: Arc<ProgressObserver> = if ctx.use_json() {
Arc::new(ProgressObserver::new_hidden())
} else {
Arc::new(ProgressObserver::new("Unsealing..."))
};
let ipfs_backend = match args.backend.as_deref() {
Some("gateway") => {
let gateway_url = args.gateway.as_ref().ok_or_else(|| {
CliError::invalid_args("--gateway URL is required when using gateway backend")
})?;
Some(IpfsBackend::Gateway {
base: gateway_url.clone(),
})
}
Some("kubo") | Some(_) => {
let kubo_url = args.kubo.as_deref().unwrap_or("http://127.0.0.1:5001");
Some(IpfsBackend::Kubo {
api: kubo_url.to_string(),
})
}
None => None,
};
let timeout_ms = args.timeout.unwrap_or(30000);
let output_dir_utf8 = camino::Utf8PathBuf::try_from(output_dir.clone())
.map_err(|e| CliError::internal(format!("output path is not valid UTF-8: {e}")))?;
let unseal_opts = UnsealOptions {
ctx: repo.context().clone(),
output_dir: output_dir_utf8,
commit_cid,
backend: ipfs_backend,
timeout: std::time::Duration::from_millis(timeout_ms),
offline: args.offline,
verify_content_hashes: !args.no_verify,
};
let result = unseal_commit(unseal_opts).map_err(void_err_to_cli)?;
observer.finish();
if !ctx.use_json() {
ctx.info(format!("commit {}", result.commit_cid.to_cid_string()));
if !result.message.is_empty() {
ctx.info(format!(" {}", result.message));
}
ctx.info("");
ctx.info(format!(
"Unsealed {} file(s) to {}",
result.stats.files_restored,
output_dir.display()
));
ctx.info(format!(
" {} shard(s) read, {} fetched, {} bytes written",
result.stats.shards_read, result.stats.shards_fetched, result.stats.bytes_written
));
}
Ok(UnsealOutput {
commit: result.commit_cid.to_cid_string(),
message: result.message,
files: result.stats.files_restored,
stats: UnsealStats {
shards_total: result.stats.shards_total,
shards_read: result.stats.shards_read,
shards_fetched: result.stats.shards_fetched,
bytes_written: result.stats.bytes_written,
},
})
}
#[derive(Debug, Clone, Serialize)]
#[serde(untagged)]
pub enum UnsealResultOutput {
Extract(UnsealOutput),
List(UnsealListOutput),
}
pub fn run(cwd: &Path, args: UnsealArgs, opts: &CliOptions) -> Result<(), CliError> {
run_command("unseal", opts, |ctx| {
if args.list {
let result = run_list_mode(cwd, &args, ctx)?;
Ok(UnsealResultOutput::List(result))
} else {
let result = run_extract_mode(cwd, &args, ctx)?;
Ok(UnsealResultOutput::Extract(result))
}
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_unseal_output_serialization() {
let output = UnsealOutput {
commit: "bafytest123".to_string(),
message: "test commit".to_string(),
files: 42,
stats: UnsealStats {
shards_total: 5,
shards_read: 5,
shards_fetched: 0,
bytes_written: 500000,
},
};
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"commit\":\"bafytest123\""));
assert!(json.contains("\"message\":\"test commit\""));
assert!(json.contains("\"files\":42"));
assert!(json.contains("\"shardsTotal\":5"));
assert!(json.contains("\"shardsRead\":5"));
assert!(json.contains("\"shardsFetched\":0"));
assert!(json.contains("\"bytesWritten\":500000"));
}
#[test]
fn test_unseal_list_output_serialization() {
let output = UnsealListOutput {
commit: "bafytest456".to_string(),
files: vec![
FileEntry {
path: "src/main.rs".to_string(),
size: 1234,
},
FileEntry {
path: "README.md".to_string(),
size: 567,
},
],
};
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"commit\":\"bafytest456\""));
assert!(json.contains("\"path\":\"src/main.rs\""));
assert!(json.contains("\"size\":1234"));
assert!(json.contains("\"path\":\"README.md\""));
assert!(json.contains("\"size\":567"));
}
#[test]
fn test_file_entry_serialization() {
let entry = FileEntry {
path: "test/file.txt".to_string(),
size: 42,
};
let json = serde_json::to_string(&entry).unwrap();
assert!(json.contains("\"path\":\"test/file.txt\""));
assert!(json.contains("\"size\":42"));
}
#[test]
fn test_unseal_result_output_extract_variant() {
let output = UnsealResultOutput::Extract(UnsealOutput {
commit: "bafy123".to_string(),
message: "msg".to_string(),
files: 1,
stats: UnsealStats {
shards_total: 1,
shards_read: 1,
shards_fetched: 0,
bytes_written: 100,
},
});
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"commit\":\"bafy123\""));
assert!(json.contains("\"message\":\"msg\""));
}
#[test]
fn test_unseal_result_output_list_variant() {
let output = UnsealResultOutput::List(UnsealListOutput {
commit: "bafy456".to_string(),
files: vec![],
});
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"commit\":\"bafy456\""));
assert!(json.contains("\"files\":[]"));
}
}