use bevy::winit::WinitPlugin;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
#[cfg(target_os = "windows")]
use winapi::um::fileapi::GetFileAttributesA;
#[cfg(target_os = "windows")]
use winapi::um::winnt::FILE_ATTRIBUTE_HIDDEN;
#[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd"))]
use libc::{UF_HIDDEN, stat};
#[cfg(unix)]
use std::os::unix::fs::MetadataExt;
fn is_hidden(path: &PathBuf) -> bool {
let file_name = path.file_name().and_then(|s| s.to_str());
if file_name.map_or(false, |s| s.starts_with('.')) {
return true;
}
let path_str = match path.to_str() {
Some(s) => s,
None => return true, };
let path_c = match std::ffi::CString::new(path_str) {
Ok(c) => c,
Err(_) => return true, };
#[cfg(target_os = "windows")]
{
let attributes = unsafe { GetFileAttributesA(path_c.as_ptr()) };
if attributes == u32::MAX {
return true; }
return (attributes & FILE_ATTRIBUTE_HIDDEN) != 0;
}
#[cfg(target_os = "macos")]
{
let mut file_stat: stat = unsafe { std::mem::zeroed() };
let ret = unsafe { libc::stat(path_c.as_ptr(), &mut file_stat) };
if ret != 0 {
return true; }
return (file_stat.st_flags & UF_HIDDEN as u32) != 0;
}
#[cfg(target_os = "linux")]
{
}
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
{
return false; }
}
use base64::Engine as _;
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
use bevy::prelude::*;
use bevy::{asset::AssetServer, log::LogPlugin};
use clap::{Parser, Subcommand};
use bevy_dlc::{
DLC_PACK_VERSION_LATEST, EncryptionKey, PackItem, extract_dlc_ids_from_license,
extract_encrypt_key_from_license, extract_product_from_license,
pack_encrypted_pack_with_metadata, parse_encrypted_pack, parse_encrypted_pack_info,
prelude::*,
};
use owo_colors::{AnsiColors, OwoColorize};
use secure_gate::RevealSecret;
mod repl;
mod watch;
#[derive(Parser)]
#[command(
author,
about = "bevy-dlc helper: pack and unpack .dlcpack containers",
long_about = "Utility for creating, inspecting and extracting bevy-dlc encrypted containers."
)]
struct Cli {
#[arg(long, global = true)]
dry_run: bool,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
#[command(
about = "Print version information",
long_about = "Display version information for bevy-dlc and the encrypted pack format. If a .dlcpack file is supplied, also display the embedded pack version.",
alias = "v"
)]
Version {
#[arg(value_name = "DLC", help = "Optional .dlcpack path")]
dlc: Option<PathBuf>,
},
#[command(
about = "Pack assets into a .dlcpack bundle",
long_about = "Encrypts the provided input files into a single bevy-dlc .dlcpack bundle. Use --list to preview container metadata.",
alias = "p"
)]
Pack {
#[arg(value_name = "PRODUCT")]
product: String,
#[arg(
help = "Identifier embedded into the container and private key (e.g. expansion_1)",
value_name = "DLC_ID"
)]
dlc_id: String,
#[arg(value_name = "FILES...", last = true)]
files: Vec<PathBuf>,
#[arg(
short,
long,
help = "Show the metadata the container would contain and exit; no file or private key will be produced."
)]
list: bool,
#[arg(
short,
long,
help = "Destination path for the generated .dlcpack (default: <dlc_id>.dlcpack). If the path has no extension it will be treated as a directory.",
value_name = "OUT"
)]
out: Option<PathBuf>,
#[arg(
long = "types",
help = "Override asset types: ext=TypePath (e.g., json=my_game::LevelData)",
long_help = "Manually specify TypePath for extensions that Bevy doesn't recognize.\nFormat: --types json=my_game::LevelData --types csv=my_game::CsvData\nThese take precedence over auto-detected types from Bevy's loaders.",
value_name = "EXT=TYPE",
num_args = 1..
)]
types: Option<Vec<String>>,
#[arg(
long = "metadata",
help = "Pack metadata entry as key=value. Values are parsed as JSON when possible.",
value_name = "KEY=VALUE",
num_args = 1..
)]
metadata: Option<Vec<String>>,
#[arg(
long = "pubkey",
help = "Optional public key (base64url or file) used to verify a supplied signed license or to print alongside a supplied license",
value_name = "PUBKEY"
)]
pubkey: Option<String>,
#[arg(
short,
long = "signed-license",
help = "Optional SignedLicense token to use instead of generating a new one",
value_name = "SIGNED_LICENSE"
)]
signed_license: Option<String>,
},
#[command(
about = "List contents of a .dlcpack (prints entries/metadata)",
long_about = "Display detailed metadata for the entries inside a .dlcpack. If given a directory, lists all .dlcpack files inside.",
alias = "ls"
)]
List {
#[arg(
value_name = "DLC",
help = "Path to a .dlcpack file, or a directory containing .dlcpack files (recursive)"
)]
dlc: PathBuf,
},
#[command(
about = "Validate a .dlcpack file against a signed license and public key",
long_about = "Checks that the .dlcpack's embedded DLC id is covered by the signed license, and that the signature is valid for the given public key. If the license does not include the DLC id but is otherwise valid, the command will attempt to extend the license with the missing DLC id (if a private key is available) and print the extended token.",
alias = "validate",
alias = "c"
)]
Check {
#[arg(value_name = "DLC")]
dlc: PathBuf,
#[arg(short, long, value_name = "PRODUCT")]
product: Option<String>,
#[arg(short, long = "signed-license", value_name = "SIGNED_LICENSE")]
signed_license: Option<String>,
#[arg(long = "pubkey", value_name = "PUBKEY")]
pubkey: Option<String>,
},
#[command(
about = "Generate a product .slicense and .pubkey.",
long_about = "Create a signed-license token and write <product>.slicense and <product>.pubkey; these files are used as defaults by other commands when present.",
alias = "g"
)]
Generate {
#[arg(value_name = "PRODUCT")]
product: String,
#[arg(value_name = "DLCS", num_args = 1..)]
dlcs: Vec<String>,
#[arg(short, long, value_name = "OUT_DIR")]
out_dir: Option<PathBuf>,
#[arg(short, long)]
force: bool,
},
#[command(
about = "Generate a random 32-character AES-256 key",
long_about = "Generate a cryptographically random 32-character key for use with include_signed_license_aes!. The key is printed as exactly 32 printable ASCII characters, matching the requirement of bevy_dlc_macro.",
alias = "aes"
)]
AesKey,
#[command(
about = "Interactive REPL to edit an existing .dlcpack metadata (add/remove entries, merge another pack, etc.)",
long_about = "Modify the manifest of an existing .dlcpack (change types, remove entries) without re-encrypting the content. If a key/license is provided, you can also add new files."
)]
Edit {
#[arg(value_name = "DLC")]
dlc: PathBuf,
#[arg(short, long = "signed-license", value_name = "SIGNED_LICENSE")]
signed_license: Option<String>,
#[arg(long = "pubkey", value_name = "PUBKEY")]
pubkey: Option<String>,
#[arg(short, long, value_name = "PRODUCT")]
product: Option<String>,
#[arg(value_name = "REPL_CMD", last = true)]
command: Vec<String>,
},
#[command(
about = "Find a .dlcpack file with specified DLC id in a directory",
long_about = "Search for .dlcpack files in a directory (recursively) for a matching DLC id in their manifest. This is useful for locating files when you only have the DLC id and not the filename."
)]
Find {
#[arg(value_name = "DLC_ID")]
dlc_id: String,
#[arg(value_name = "DIR")]
dir: PathBuf,
#[arg(short = 'd', long, default_value_t = 5)]
max_depth: usize,
},
#[command(
about = "Watch real source files and repack changed entries back into their .dlcpack files",
long_about = "Scans the current directory recursively for .dlcpack files, resolves their archived entry paths against real files on disk, and watches those real files for changes. When a tracked source file changes, the matching entry is re-packed into the originating .dlcpack."
)]
Watch,
}
pub(crate) fn parse_metadata_value(raw: &str) -> serde_json::Value {
serde_json::from_str(raw).unwrap_or_else(|_| serde_json::Value::String(raw.to_string()))
}
pub(crate) fn parse_metadata_assignments(
assignments: &[String],
) -> Result<bevy_dlc::PackMetadata, Box<dyn std::error::Error>> {
let mut metadata = bevy_dlc::PackMetadata::new();
for assignment in assignments {
let (key, raw_value) = assignment.split_once('=').ok_or_else(|| {
format!(
"invalid metadata entry '{}'; expected key=value",
assignment
)
})?;
let key = key.trim();
if key.is_empty() {
return Err(format!(
"invalid metadata entry '{}'; key cannot be empty",
assignment
)
.into());
}
metadata.insert(key.to_string(), parse_metadata_value(raw_value.trim()));
}
Ok(metadata)
}
fn print_pack_metadata(metadata: &bevy_dlc::PackMetadata, metadata_locked: bool) {
if metadata_locked {
println!(
"{} encrypted (DLC key required to inspect)",
"metadata:".blue()
);
return;
}
if metadata.is_empty() {
println!("{} none", "metadata:".blue());
return;
}
println!("{}", "metadata:".blue());
for (key, value) in metadata {
let rendered =
serde_json::to_string(value).unwrap_or_else(|_| "<unprintable metadata>".to_string());
println!(" - {} = {}", key, rendered);
}
}
fn collect_files_recursive(
dir: &std::path::Path,
out: &mut Vec<std::path::PathBuf>,
ext_filter: Option<&str>,
max_depth: usize,
) -> std::io::Result<()> {
for entry in std::fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
let name = entry.file_name();
let name_str = name.to_string_lossy();
if path.is_dir() {
if name_str.starts_with('.') || name_str == "target" || name_str == "node_modules" {
continue;
}
if max_depth > 0 {
collect_files_recursive(&path, out, ext_filter, max_depth - 1)?;
}
} else if path.is_file() {
if is_hidden(&path) {
continue;
}
let file_ext = path
.extension()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_ascii_lowercase();
match ext_filter {
Some(filter) => {
if file_ext.eq_ignore_ascii_case(filter) {
out.push(path);
}
}
None => {
const EXCLUDED_EXTENSIONS: &[&str] = &[
"dlcpack", "slicense", "pubkey", "exe", "dll", "so", "dylib", "pdb", "ilk",
"exp", "lib", "a", "o", "rlib",
];
if !EXCLUDED_EXTENSIONS.contains(&file_ext.as_str()) {
out.push(path);
}
}
}
}
}
Ok(())
}
fn parse_type_overrides(overrides: &[String]) -> HashMap<String, String> {
let mut map = HashMap::new();
for entry in overrides {
if let Some((ext, type_path)) = entry.split_once('=') {
map.insert(ext.to_ascii_lowercase(), type_path.to_string());
}
}
map
}
fn print_signed_license_and_pubkey(
signedlicense: &str,
dlc_key: &DlcKey,
write_files: bool,
product: Option<&str>,
out_dir: Option<&std::path::Path>,
) {
let pubkey_b64 = URL_SAFE_NO_PAD.encode(dlc_key.get_public_key().0);
if !write_files {
println!("{}:\n{}", "SIGNED LICENSE".green().bold(), signedlicense);
println!("{}: {}", "PUB KEY".blue().bold(), pubkey_b64);
} else {
if let Some(prod) = product {
let dir = out_dir.unwrap_or_else(|| std::path::Path::new("."));
if !dir.exists() {
if let Err(e) = std::fs::create_dir_all(dir) {
print_error(&format!(
"failed to create output directory {}: {}",
dir.display(),
e
));
return;
}
}
let slicense_path = dir.join(format!("{}.slicense", prod));
let pubkey_path = dir.join(format!("{}.pubkey", prod));
if let Err(e) = std::fs::write(&slicense_path, signedlicense) {
print_error(&format!(
"failed to write {}: {}",
slicense_path.display(),
e
));
}
if let Err(e) = std::fs::write(&pubkey_path, pubkey_b64) {
print_error(&format!("failed to write {}: {}", pubkey_path.display(), e));
}
} else {
print_warning("no product name supplied; skipping file write");
}
}
}
async fn resolve_type_paths_from_bevy(
app: &mut App,
paths: &[PathBuf],
overrides: &HashMap<String, String>,
) -> Result<HashMap<PathBuf, String>, Box<dyn std::error::Error>> {
let mut extensions_to_query: Vec<String> = Vec::new();
for path in paths {
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
let ext_lower = ext.to_ascii_lowercase();
if !overrides.contains_key(&ext_lower) && !extensions_to_query.contains(&ext_lower) {
extensions_to_query.push(ext_lower);
}
}
}
let mut ext_map: HashMap<String, String> = HashMap::new();
{
let world = app.world();
let asset_server_ref = world
.get_resource::<AssetServer>()
.ok_or("AssetServer resource not found")?;
let asset_server = asset_server_ref.clone();
for ext in &extensions_to_query {
app.update();
match asset_server.get_asset_loader_with_extension(ext).await {
Ok(loader) => {
let type_name = loader.asset_type_name();
ext_map.insert(ext.clone(), type_name.to_string());
}
Err(_) => {
return Err(format!(
"no AssetLoader registered for extension '{}'; either add the plugin that provides the loader or pass --types {}=TypePath",
ext, ext
).into());
}
}
}
}
let mut result = HashMap::new();
for path in paths {
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
let ext_lower = ext.to_ascii_lowercase();
if let Some(tp) = overrides.get(&ext_lower) {
result.insert(path.clone(), tp.clone());
} else if let Some(tp) = ext_map.get(&ext_lower) {
result.insert(path.clone(), tp.clone());
}
}
}
Ok(result)
}
fn print_pack_entries(version: usize, ents: &[(String, bevy_dlc::EncryptedAsset)]) {
if version as u8 == DLC_PACK_VERSION_LATEST {
for (p, enc) in ents.iter() {
println!(
" - {} (ext={}) type={}",
p,
enc.original_extension,
enc.type_path.clone().unwrap_or("None".to_string())
);
}
} else {
println!(
"Version {} is not supported anymore. Repack your DLC assets using the 'pack' command.",
version
);
}
}
fn resolve_file_or_value(val: String) -> String {
let path = std::path::Path::new(&val);
if path.is_file() {
std::fs::read_to_string(path)
.ok()
.map(|s| s.trim().to_string())
.unwrap_or(val)
} else {
val
}
}
fn resolve_pubkey_and_license(
pubkey: Option<String>,
signed_license: Option<String>,
product: &str,
) -> (Option<String>, Option<String>) {
resolve_pubkey_and_license_with_search_roots(
pubkey,
signed_license,
product,
&[PathBuf::from(".")],
)
}
fn resolve_pubkey_and_license_with_search_roots(
pubkey: Option<String>,
signed_license: Option<String>,
product: &str,
search_roots: &[PathBuf],
) -> (Option<String>, Option<String>) {
let pubkey = pubkey.map(resolve_file_or_value);
let signed_license = signed_license.map(resolve_file_or_value);
let resolved_pubkey = pubkey.or_else(|| find_product_file_in_search_roots(product, "pubkey", search_roots));
let resolved_license =
signed_license.or_else(|| find_product_file_in_search_roots(product, "slicense", search_roots));
(resolved_pubkey, resolved_license)
}
fn derive_encrypt_key(
signed_license: Option<&str>,
) -> Result<EncryptionKey, Box<dyn std::error::Error>> {
Ok(if let Some(lic_str) = signed_license {
if let Some(enc_key) =
extract_encrypt_key_from_license(&bevy_dlc::SignedLicense::from(lic_str.to_string()))
{
enc_key
} else {
EncryptionKey::new(rand::random())
}
} else {
EncryptionKey::new(rand::random())
})
}
fn handle_license_output(
signed_license: Option<&str>,
pubkey: Option<&str>,
product: &str,
dlc_id_str: &str,
signer_key: Option<&DlcKey>,
write_files: bool,
) -> Result<(), Box<dyn std::error::Error>> {
if let Some(sup_license) = signed_license {
if let Some(pubkey_str) = pubkey {
let verifier = DlcKey::public(pubkey_str)
.map_err(|e| format!("invalid provided pubkey: {:?}", e))?;
if !verifier.verify_signed_license(&SignedLicense::from(sup_license.to_string())) {
return Err("supplied signed-license verification failed".into());
}
let final_license = SignedLicense::from(sup_license.to_string());
let verified_product = extract_product_from_license(&final_license).unwrap_or_default();
if verified_product != product {
return Err("supplied signed-license product does not match pack product".into());
}
let mut final_license = SignedLicense::from(sup_license.to_string());
if !extract_dlc_ids_from_license(&final_license)
.iter()
.any(|d| d == &dlc_id_str)
{
let extended = signer_key.and_then(|dlc_key| {
dlc_key
.extend_signed_license(
&final_license,
&[DlcId::from(dlc_id_str.to_string())],
Product::from(product.to_string()),
)
.ok()
});
match extended {
Some(ext) => {
println!("{}", "note: supplied license did not include requested DLC id, extending it now.".white().bold());
final_license = ext;
}
None => {
print_warning(&format!(
"license does not include DLC id '{}'; the pack was created but users need a license that covers this DLC id to unlock it. Re-run `generate` with this DLC id to update the license.",
dlc_id_str
));
}
}
}
final_license.with_secret(|s| {
println!("{}:\n{}", "SIGNED LICENSE".green().bold(), s);
println!("{}: {}", "PUB KEY".blue().bold(), pubkey_str);
});
} else {
print_warning("supplied signed-license not verified (no --pubkey supplied)");
let dlc_ids_in_existing =
extract_dlc_ids_from_license(&SignedLicense::from(sup_license.to_string()));
let final_license = if !dlc_ids_in_existing.iter().any(|d| d == dlc_id_str) {
if let Some(dlc_key) = signer_key {
let extended = dlc_key.extend_signed_license(
&SignedLicense::from(sup_license.to_string()),
&[DlcId::from(dlc_id_str.to_string())],
Product::from(product.to_string()),
)?;
println!(
"{}",
"note: existing license did not include requested DLC id, extended with it"
.white()
.bold()
);
extended
} else {
print_warning(&format!(
"existing license does not include DLC id '{}' (no private key available to extend)",
dlc_id_str
));
SignedLicense::from(sup_license.to_string())
}
} else {
SignedLicense::from(sup_license.to_string())
};
final_license.with_secret(|s| {
println!("{}:\n{}", "SIGNED LICENSE:".green().bold(), s);
});
}
} else {
if let Some(dlc_key) = signer_key {
let signedlicense = dlc_key.create_signed_license(
&[DlcId::from(dlc_id_str.to_string())],
Product::from(product.to_string()),
)?;
signedlicense.with_secret(|s| {
if write_files {
print_signed_license_and_pubkey(s.as_str(), dlc_key, false, Some(product), None)
} else {
println!("{}:\n{}", "SIGNED LICENSE".green().bold(), s);
}
});
} else {
let dlc_key = DlcKey::generate_random();
let signedlicense = dlc_key.create_signed_license(
&[DlcId::from(dlc_id_str.to_string())],
Product::from(product.to_string()),
)?;
signedlicense.with_secret(|s| {
if write_files {
print_signed_license_and_pubkey(s.as_str(), &dlc_key, true, Some(product), None)
} else {
println!("{}:\n{}", "SIGNED LICENSE".green().bold(), s);
}
});
}
}
Ok(())
}
fn resolve_keys(
pubkey: Option<String>,
signed_license: Option<String>,
product: Option<Product>,
embedded_product: Option<Product>,
) -> (Option<crate::DlcKey>, Option<crate::SignedLicense>) {
resolve_keys_with_search_roots(
pubkey,
signed_license,
product,
embedded_product,
&[PathBuf::from(".")],
)
}
pub(crate) fn resolve_keys_with_search_roots(
pubkey: Option<String>,
signed_license: Option<String>,
product: Option<Product>,
embedded_product: Option<Product>,
search_roots: &[PathBuf],
) -> (Option<crate::DlcKey>, Option<crate::SignedLicense>) {
let product_name = product
.as_ref()
.or_else(|| embedded_product.as_ref())
.map(|value| value.as_ref().to_string());
let (resolved_pubkey_str, resolved_license_str) = match product_name.as_deref() {
Some(name) => resolve_pubkey_and_license_with_search_roots(
pubkey,
signed_license,
name,
search_roots,
),
None => (
pubkey.map(resolve_file_or_value),
signed_license.map(resolve_file_or_value),
),
};
let resolved_pubkey = resolved_pubkey_str.and_then(|s| match crate::DlcKey::public(&s) {
Ok(k) => Some(k),
Err(_) => None,
});
let resolved_license = resolved_license_str.map(crate::SignedLicense::from);
(resolved_pubkey, resolved_license)
}
fn find_product_file_in_search_roots(
product: &str,
ext: &str,
search_roots: &[PathBuf],
) -> Option<String> {
let file_name = format!("{}.{}", product, ext);
for root in search_roots {
let direct_path = root.join(&file_name);
if direct_path.is_file() {
return std::fs::read_to_string(&direct_path)
.ok()
.map(|s| s.trim().to_string());
}
let mut matches = Vec::new();
if collect_files_recursive(root, &mut matches, Some(ext), 3).is_ok() {
for path in matches {
if let Some(found_name) = path.file_name().and_then(|s| s.to_str()) {
if found_name.eq_ignore_ascii_case(&file_name) {
return std::fs::read_to_string(&path)
.ok()
.map(|s| s.trim().to_string());
}
}
}
}
}
None
}
fn print_error(message: &str) {
eprintln!("{}: {}", "error".red().bold(), message.white());
}
fn print_warning(message: &str) {
eprintln!("{}: {}", "warning".yellow().bold(), message.white());
}
fn print_error_and_exit(message: &str) -> ! {
print_error(message);
std::process::exit(1);
}
fn is_executable(path: &std::path::Path) -> bool {
if let Ok(Some(t)) = infer::get_from_path(path) {
match t.matcher_type() {
infer::MatcherType::App => return true,
_ => {}
}
}
false
}
fn test_decrypt_archive_with_key_from_reader<R: std::io::Read>(
dlc_pack_file: &str,
mut reader: R,
encrypt_key: &EncryptionKey,
signature_verified: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let (_prod, _did, version, entries, blocks) = parse_encrypted_pack(&mut reader)?;
if entries.is_empty() {
println!("container has no entries");
return Ok(());
}
let (archive_nonce, archive_ciphertext) = if version == DLC_PACK_VERSION_LATEST as usize {
let b = blocks
.first()
.ok_or("v5 pack missing block metadata for archive decrypt")?;
let mut f = std::fs::File::open(dlc_pack_file)?;
use std::io::Seek;
f.seek(std::io::SeekFrom::Start(b.file_offset))?;
let mut buf = vec![0u8; b.encrypted_size as usize];
f.read_exact(&mut buf)?;
(b.nonce, buf)
} else {
return Err(format!("unsupported pack version: {}", version).into());
};
use aes_gcm::aead::AeadInPlace;
use aes_gcm::{Aes256Gcm, KeyInit, Nonce};
use secure_gate::RevealSecret;
let mut buf = archive_ciphertext.clone();
let _ = encrypt_key.with_secret(|key_bytes| {
let cipher = Aes256Gcm::new_from_slice(key_bytes).map_err(|e| e.to_string())?;
let nonce = Nonce::from_slice(&archive_nonce);
cipher
.decrypt_in_place(nonce, &[], &mut buf)
.map_err(|_| "decryption failed (incorrect key or corrupted ciphertext)".to_string())
})?;
let plain = buf;
let dec = flate2::read::GzDecoder::new(std::io::Cursor::new(plain));
let mut ar = tar::Archive::new(dec);
ar.entries()
.map_err(|e| Box::<dyn std::error::Error>::from(format!("(archive extract): {}", e)))?;
if signature_verified {
println!("{} -> {}", "GOOD".green().bold(), dlc_pack_file);
} else {
println!(
"{} -> {}\n{}",
"OKAY:".yellow().bold(),
dlc_pack_file,
".dlcpack archive decrypts with embedded encrypt key (signature NOT verified).\nTry providing the corresponding public key and signed license to verify the signature."
);
}
Ok(())
}
fn validate_dlc_file(
path: &std::path::Path,
product_arg: Option<&str>,
signed_license_arg: Option<&str>,
pubkey_arg: Option<&str>,
) -> Result<(), Box<dyn std::error::Error>> {
use std::io::Seek;
let file = std::fs::File::open(path)?;
let mut reader = std::io::BufReader::new(file);
let (prod, dlc_id, _v, _ents, _blocks) = parse_encrypted_pack(&mut reader)?;
let embedded_product = Some(prod.clone());
let (supplied_pubkey, supplied_license) = resolve_keys(
pubkey_arg.map(|s| s.to_string()),
signed_license_arg.map(|s| s.to_string()),
product_arg.map(|s| Product::from(s.to_string())),
embedded_product,
);
if supplied_license.is_none() {
return Err("no signed license supplied or found (use --signed-license or --product <name> to pick <product>.slicense)".into());
}
let supplied_license = supplied_license.unwrap();
if let Some(pk) = supplied_pubkey.as_ref() {
let verifier = pk;
if !verifier.verify_signed_license(&supplied_license) {
return Err("signed-license verification failed".into());
}
let verified_product = extract_product_from_license(&supplied_license).unwrap_or_default();
if Product::from(verified_product) != prod {
return Err("license product does not match pack".into());
}
let verified_dlcs = extract_dlc_ids_from_license(&supplied_license);
if !verified_dlcs.iter().any(|d| d == &dlc_id.as_ref()) {
return Err(format!("license does not include DLC id '{}'", dlc_id).into());
}
}
if let Some(enc_key) = extract_encrypt_key_from_license(&supplied_license) {
reader.seek(std::io::SeekFrom::Start(0))?;
test_decrypt_archive_with_key_from_reader(
path.to_str().unwrap(),
&mut reader,
&enc_key,
supplied_pubkey.is_some(),
)?;
} else if supplied_pubkey.is_some() {
print_warning(
"License verified but does not carry an embedded encrypt key — cannot test decrypt",
);
}
Ok(())
}
fn find_dlcpack(
root_path: &Path,
dlc_id: impl Into<DlcId>,
depth: Option<usize>,
) -> Result<(PathBuf, usize, DlcPack), Box<dyn std::error::Error>> {
let dlc_id = dlc_id.into();
let mut candidates: Vec<PathBuf> = Vec::new();
collect_files_recursive(
root_path,
&mut candidates,
Some("dlcpack"),
depth.unwrap_or(5),
)?;
let mut best_match: Option<(PathBuf, usize, DlcPack)> = None;
for p in candidates {
let file = std::fs::File::open(&p)?;
let mut reader = std::io::BufReader::new(file);
let (prod, did, version, ents, _blocks) = parse_encrypted_pack(&mut reader)?;
let did = DlcId::from(did);
if did != dlc_id {
continue;
}
let pack = DlcPack::new(
did.clone(),
prod,
version as u8,
ents.into_iter()
.map(|(path, encrypted)| DlcPackEntry::new(path, encrypted))
.collect(),
);
best_match = Some((p, version, pack));
break;
}
if let Some(matched) = best_match {
Ok(matched)
} else {
Err(format!("no .dlcpack found with dlc_id '{}'", dlc_id).into())
}
}
async fn pack_command(
app: &mut App,
dlc_id_str: String,
files: Vec<PathBuf>,
list: bool,
out: Option<PathBuf>,
product: String,
types: Option<Vec<String>>,
metadata: Option<Vec<String>>,
pubkey: Option<String>,
signed_license: Option<String>,
dry_run: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let (pubkey, signed_license) = resolve_pubkey_and_license(pubkey, signed_license, &product);
if signed_license.is_none() {
return Err(format!(
"no signed license found for product '{product}'. \
Run `bevy-dlc generate {product} <dlc_id>` to create one first, \
then use `--signed-license <path-or-token>` or place `{product}.slicense` in the current directory."
).into());
}
let mut selected_files: Vec<PathBuf> = Vec::new();
for entry in &files {
if entry.is_dir() {
collect_files_recursive(entry, &mut selected_files, None, 10)?;
} else if entry.is_file() {
selected_files.push(entry.clone());
} else {
return Err(format!("input path not found: {}", entry.display()).into());
}
}
if selected_files.is_empty() {
return Err("no files selected for dlcpack".into());
}
let type_overrides = types
.as_ref()
.map(|t| parse_type_overrides(t))
.unwrap_or_default();
let type_path_map = bevy::tasks::block_on(async {
resolve_type_paths_from_bevy(app, &selected_files, &type_overrides).await
})?;
let pack_metadata = metadata
.as_ref()
.map(|values| parse_metadata_assignments(values))
.transpose()?
.unwrap_or_default();
let mut items: Vec<PackItem> = Vec::new();
for file in &selected_files {
if is_executable(file) {
return Err(format!("refusing to pack executable file: {}", file.display()).into());
}
let mut f = File::open(file)?;
let mut bytes = Vec::new();
f.read_to_end(&mut bytes)?;
let mut rel = file
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("file")
.to_string();
for base in &files {
if base.is_dir() && file.starts_with(base) {
rel = file
.strip_prefix(base)
.unwrap()
.to_string_lossy()
.to_string();
break;
}
}
let ext = file
.extension()
.and_then(|s| s.to_str())
.map(|s| s.to_string());
let type_path = type_path_map.get(file).cloned();
let mut item = PackItem::new(rel.clone(), bytes.clone())?;
if let Some(e) = ext {
item = item.with_extension(e)?;
}
if let Some(tp) = type_path {
item = item.with_type_path(tp);
}
items.push(item);
}
let dlc_id = DlcId::from(dlc_id_str.clone());
let dlc_key = if let Some(pk) = pubkey.as_deref() {
match DlcKey::public(pk) {
Ok(k) => k,
Err(_) => DlcKey::generate_random(),
}
} else {
DlcKey::generate_random()
};
let encrypt_key = derive_encrypt_key(signed_license.as_deref())?;
let container = pack_encrypted_pack_with_metadata(
&dlc_id,
&items,
&Product::from(product.clone()),
&pack_metadata,
&encrypt_key,
bevy_dlc::DEFAULT_BLOCK_SIZE,
)?;
handle_license_output(
signed_license.as_deref(),
pubkey.as_deref(),
&product,
&dlc_id_str,
Some(&dlc_key),
!dry_run,
)?;
if list {
let parsed = parse_encrypted_pack_info(&container[..], Some(&encrypt_key))?;
let did = parsed.dlc_id;
let version = parsed.version;
let ents = parsed.entries;
println!("{} {} entries: {}", "dlc_id".blue(), did, ents.len());
print_pack_metadata(&parsed.metadata, parsed.metadata_locked);
print_pack_entries(version, &ents);
}
let out_path = if let Some(out_val) = out {
let path = PathBuf::from(&out_val);
if path.exists() {
if path.is_dir() {
path.join(format!("{}.dlcpack", dlc_id_str))
} else {
path
}
} else {
if path.extension().is_some() {
path
} else {
if !path.exists() {
std::fs::create_dir_all(&path)?;
}
path.join(format!("{}.dlcpack", dlc_id_str))
}
}
} else {
PathBuf::from(format!("{}.dlcpack", dlc_id_str))
};
if dry_run {
print_warning(format!("dry-run: would create dlcpack: {}", out_path.display()).as_str());
} else {
std::fs::write(&out_path, &container)?;
println!("created dlcpack: {}", out_path.display());
}
Ok(())
}
fn build_pack_app() -> App {
let mut app = App::new();
app.add_plugins(
DefaultPlugins
.set(WindowPlugin {
primary_window: None,
..default()
})
.disable::<WinitPlugin>()
.set(LogPlugin {
level: bevy::log::Level::ERROR,
..Default::default()
}),
);
app.finish();
app.cleanup();
app.update();
app
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let cli = Cli::parse();
match cli.command {
Commands::Version { dlc } => {
println!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"));
if let Some(path) = dlc {
let result = (|| -> Result<_, Box<dyn std::error::Error>> {
let file = std::fs::File::open(&path)?;
let mut reader = std::io::BufReader::new(file);
let (_prod, did, version, _ents, _blocks) = parse_encrypted_pack(&mut reader)?;
Ok((did, version))
})();
match result {
Ok((did, version)) => {
println!("{} -> {} (pack v{})", path.display(), did.as_str(), version);
}
Err(e) => {
print_error(&format!(
"error reading/parsing '{}': {}",
path.display(),
e
));
std::process::exit(1);
}
}
}
return Ok(());
}
Commands::Pack {
dlc_id: dlc_id_str,
files,
list,
out,
product,
types,
metadata,
pubkey,
signed_license,
} => {
let mut app = build_pack_app();
bevy::tasks::block_on(async {
pack_command(
&mut app,
dlc_id_str,
files,
list,
out,
product,
types,
metadata,
pubkey,
signed_license,
cli.dry_run,
)
.await
})?;
}
Commands::List { dlc } => {
if dlc.is_dir() {
let mut files = Vec::new();
collect_files_recursive(&dlc, &mut files, Some("dlcpack"), 10)?;
if files.is_empty() {
return Err("no .dlcpack files found in directory".into());
}
for file in &files {
let f = std::fs::File::open(file)?;
let mut reader = std::io::BufReader::new(f);
let parsed = parse_encrypted_pack_info(&mut reader, None)?;
let did = parsed.dlc_id;
let version = parsed.version;
let ents = parsed.entries;
println!(
"{} -> {} {} (v{}) entries: {}",
"dlcpack:".color(AnsiColors::Blue),
did.as_str().color(AnsiColors::Magenta).bold(),
file.display(),
version,
ents.len()
);
print_pack_metadata(&parsed.metadata, parsed.metadata_locked);
print_pack_entries(version, &ents);
}
return Ok(());
}
let file = std::fs::File::open(&dlc)?;
let mut reader = std::io::BufReader::new(file);
let parsed = parse_encrypted_pack_info(&mut reader, None)?;
let did = parsed.dlc_id;
let version = parsed.version;
let ents = parsed.entries;
println!(
"{} {} (v{}) entries: {}",
"dlcpack".color(AnsiColors::Blue),
did.as_str().color(AnsiColors::Magenta).bold(),
version,
ents.len()
);
print_pack_metadata(&parsed.metadata, parsed.metadata_locked);
print_pack_entries(version, &ents);
return Ok(());
}
Commands::Check {
dlc,
product,
signed_license,
pubkey,
} => {
if dlc.is_dir() {
let mut files = Vec::new();
collect_files_recursive(&dlc, &mut files, Some("dlcpack"), 10)?;
if files.is_empty() {
print_error_and_exit("no .dlcpack files found in directory");
}
let mut failures = 0usize;
for file in &files {
match validate_dlc_file(
file.as_path(),
product.as_deref(),
signed_license.as_deref(),
pubkey.as_deref(),
) {
Ok(()) => {}
Err(e) => {
print_error(&format!("{}: {}", file.display(), e));
failures += 1;
}
}
}
if failures > 0 {
print_error_and_exit(&format!("{} file(s) failed validation", failures));
}
return Ok(());
}
match validate_dlc_file(
&dlc,
product.as_deref(),
signed_license.as_deref(),
pubkey.as_deref(),
) {
Ok(()) => return Ok(()),
Err(e) => print_error_and_exit(&e.to_string()),
}
}
Commands::Generate {
product,
dlcs,
out_dir,
force,
} => {
let dlc_key = DlcKey::generate_random();
let signedlicense =
dlc_key.create_signed_license(&dlcs, Product::from(product.clone()))?;
let out_dir_path = out_dir
.clone()
.unwrap_or_else(|| std::path::PathBuf::from("."));
if !out_dir_path.exists() {
std::fs::create_dir_all(&out_dir_path)?;
}
let slicense_path = out_dir_path.join(format!("{}.slicense", product));
let pubkey_path = out_dir_path.join(format!("{}.pubkey", product));
if !force {
if slicense_path.exists() || pubkey_path.exists() {
if slicense_path.exists() {
let valid = std::fs::read_to_string(&slicense_path)
.ok()
.and_then(|s| {
let sl = bevy_dlc::SignedLicense::from(s.trim().to_string());
bevy_dlc::extract_encrypt_key_from_license(&sl).map(|_| ())
})
.is_some();
if !valid {
print_error_and_exit(
format!(
"existing {} is not a valid signed license; use --force to overwrite",
slicense_path.display()
)
.as_str(),
);
}
}
if pubkey_path.exists() {
let valid = std::fs::read_to_string(&pubkey_path)
.ok()
.map(|pk| DlcKey::public(pk.trim()).is_ok())
.unwrap_or(false);
if !valid {
print_error_and_exit(
format!(
"existing {} is not a valid public key; use --force to overwrite",
pubkey_path.display()
)
.as_str(),
);
}
}
print_error_and_exit(
format!(
"'{}' or '{}' already exists; use {} to overwrite",
slicense_path.display(),
pubkey_path.display(),
"--force".color(AnsiColors::Magenta).bold()
)
.as_str(),
);
}
}
let write_files = !cli.dry_run;
signedlicense.with_secret(|s| {
print_signed_license_and_pubkey(
s.as_str(),
&dlc_key,
write_files,
Some(product.as_str()),
Some(&out_dir_path),
)
});
if cli.dry_run {
print_warning(
format!(
"dry-run: would write {} and {}",
slicense_path.display(),
pubkey_path.display()
)
.as_str(),
);
} else {
println!(
"Wrote {} and {}.",
slicense_path.display(),
pubkey_path.display()
);
print_warning(
"Do NOT SHARE these files or the contents printed above with untrusted parties.",
);
}
return Ok(());
}
Commands::Edit {
dlc,
signed_license,
pubkey,
product,
command,
} => {
let file = std::fs::File::open(&dlc)?;
let mut reader = std::io::BufReader::new(file);
let (emb_prod, _emb_did, _v, _ents, _blocks) = parse_encrypted_pack(&mut reader)?;
let (_, sup_lic) = resolve_keys(
pubkey,
signed_license,
product.map(|p| Product::from(p)),
Some(emb_prod),
);
let encrypt_key = if let Some(lic) = sup_lic.as_ref() {
extract_encrypt_key_from_license(lic)
.map(|ek| ek.with_secret(|kb| EncryptionKey::new(*kb)))
} else {
None
};
let initial = if command.is_empty() {
None
} else {
Some(command.clone())
};
repl::run_edit_repl(dlc, encrypt_key, initial, cli.dry_run)?;
}
Commands::Find {
dlc_id,
dir,
max_depth,
} => match find_dlcpack(&dir, dlc_id.clone(), Some(max_depth)) {
Ok((path, _version, _pack)) => {
println!("Found .dlcpack at: {}", path.display().bold());
}
Err(e) => {
print_error(&e.to_string());
}
},
Commands::AesKey => {
if cli.dry_run {
print_warning("dry-run: would generate and print a random 32-character AES key");
} else {
let key = URL_SAFE_NO_PAD.encode(rand::random::<[u8; 24]>());
println!("{} {}", "AES KEY:".color(AnsiColors::Cyan).bold(), key);
}
}
Commands::Watch => {
watch::run_watch_command(cli.dry_run)?;
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::collect_files_recursive;
use tempfile::tempdir;
#[test]
fn collect_files_skips_forbidden_extensions() {
let tmp = tempdir().unwrap();
let txt = tmp.path().join("sprite.txt");
let json = tmp.path().join("level.json");
let png = tmp.path().join("icon.png");
std::fs::write(&txt, b"hello").unwrap();
std::fs::write(&json, b"{}").unwrap();
std::fs::write(&png, b"\x89PNG").unwrap();
let forbidden_cases = [
"bundle.dlcpack",
"game.slicense",
"game.pubkey",
"app.exe",
"runtime.dll",
"module.so",
"lib.dylib",
"debug.pdb",
"link.exp",
"link.lib",
"object.o",
"rustlib.rlib",
"archive.a",
];
for name in &forbidden_cases {
std::fs::write(tmp.path().join(name), b"data").unwrap();
}
let mut collected = Vec::new();
collect_files_recursive(tmp.path(), &mut collected, None, 5).unwrap();
const FORBIDDEN: &[&str] = &[
"dlcpack", "slicense", "pubkey", "exe", "dll", "so", "dylib", "pdb", "ilk", "exp",
"lib", "a", "o", "rlib",
];
for path in &collected {
let ext = path
.extension()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_ascii_lowercase();
assert!(
!FORBIDDEN.contains(&ext.as_str()),
"forbidden extension '.{}' was collected: {}",
ext,
path.display()
);
}
let names: Vec<_> = collected
.iter()
.map(|p| p.file_name().unwrap().to_string_lossy().into_owned())
.collect();
assert!(
names.contains(&"sprite.txt".to_string()),
"sprite.txt missing"
);
assert!(
names.contains(&"level.json".to_string()),
"level.json missing"
);
assert!(names.contains(&"icon.png".to_string()), "icon.png missing");
assert_eq!(
collected.len(),
3,
"unexpected files collected: {:?}",
names
);
}
#[test]
fn collect_files_ext_filter_returns_matching_files() {
let tmp = tempdir().unwrap();
std::fs::write(tmp.path().join("bundle.dlcpack"), b"data").unwrap();
std::fs::write(tmp.path().join("other.dlcpack"), b"data").unwrap();
std::fs::write(tmp.path().join("good.txt"), b"data").unwrap();
let mut collected = Vec::new();
collect_files_recursive(tmp.path(), &mut collected, Some("dlcpack"), 5).unwrap();
assert_eq!(
collected.len(),
2,
"expected 2 .dlcpack files, got: {:?}",
collected
);
assert!(
collected
.iter()
.any(|p| p.file_name().unwrap() == "bundle.dlcpack")
);
assert!(
collected
.iter()
.any(|p| p.file_name().unwrap() == "other.dlcpack")
);
assert!(
!collected
.iter()
.any(|p| p.extension().and_then(|e| e.to_str()) == Some("txt"))
);
}
#[test]
fn collect_files_skips_hidden_files() {
let tmp = tempdir().unwrap();
std::fs::write(tmp.path().join(".hidden"), b"secret").unwrap();
std::fs::write(tmp.path().join("visible.txt"), b"data").unwrap();
let mut collected = Vec::new();
collect_files_recursive(tmp.path(), &mut collected, None, 5).unwrap();
let names: Vec<_> = collected
.iter()
.map(|p| p.file_name().unwrap().to_string_lossy().into_owned())
.collect();
assert!(
!names.iter().any(|n| n.starts_with('.')),
"hidden file collected"
);
assert!(names.contains(&"visible.txt".to_string()));
}
#[test]
fn collect_files_skips_build_dirs() {
let tmp = tempdir().unwrap();
let target_dir = tmp.path().join("target");
let nm_dir = tmp.path().join("node_modules");
let hidden_dir = tmp.path().join(".git");
std::fs::create_dir_all(&target_dir).unwrap();
std::fs::create_dir_all(&nm_dir).unwrap();
std::fs::create_dir_all(&hidden_dir).unwrap();
std::fs::write(target_dir.join("artifact.txt"), b"build").unwrap();
std::fs::write(nm_dir.join("dep.txt"), b"dep").unwrap();
std::fs::write(hidden_dir.join("config"), b"cfg").unwrap();
std::fs::write(tmp.path().join("asset.txt"), b"asset").unwrap();
let mut collected = Vec::new();
collect_files_recursive(tmp.path(), &mut collected, None, 5).unwrap();
let names: Vec<_> = collected
.iter()
.map(|p| p.file_name().unwrap().to_string_lossy().into_owned())
.collect();
assert!(
!names.contains(&"artifact.txt".to_string()),
"target/ was traversed"
);
assert!(
!names.contains(&"dep.txt".to_string()),
"node_modules/ was traversed"
);
assert!(names.contains(&"asset.txt".to_string()));
}
}