use std::fs::File;
use std::io::BufReader;
mod buffers;
pub mod enums;
pub mod json;
use brres_sys::ffi;
use enums::*;
use bytemuck;
use json::*;
use std::path::Path;
use anyhow::anyhow;
#[derive(Debug, Clone, PartialEq)]
pub struct Archive {
pub models: Vec<Model>,
pub textures: Vec<Texture>,
pub chrs: Vec<ChrData>,
pub srts: Vec<JSONSrtData>,
pub pats: Vec<JSONPatAnim>,
pub clrs: Vec<JSONClrAnim>,
pub viss: Vec<JSONVisData>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Model {
pub name: String,
pub info: JSONModelInfo,
pub bones: Vec<JSONBoneData>,
pub materials: Vec<JSONMaterial>,
pub meshes: Vec<Mesh>,
pub positions: Vec<VertexPositionBuffer>,
pub normals: Vec<VertexNormalBuffer>,
pub texcoords: Vec<VertexTextureCoordinateBuffer>,
pub colors: Vec<VertexColorBuffer>,
pub matrices: Vec<JSONDrawMatrix>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Mesh {
pub name: String,
pub visible: bool,
pub clr_buffer: Vec<String>,
pub nrm_buffer: String,
pub pos_buffer: String,
pub uv_buffer: Vec<String>,
pub vcd: i32,
pub current_matrix: i32,
pub mprims: Vec<MatrixPrimitive>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct MatrixPrimitive {
pub matrices: Vec<i32>,
pub num_prims: i32,
pub vertex_data_buffer: Vec<u8>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct VertexPositionBuffer {
pub id: i32,
pub name: String,
pub q_comp: i32,
pub q_divisor: i32,
pub q_stride: i32,
pub q_type: i32,
pub data: Vec<[f32; 3]>,
pub cached_minmax: Option<([f32; 3], [f32; 3])>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct VertexNormalBuffer {
pub id: i32,
pub name: String,
pub q_comp: i32,
pub q_divisor: i32,
pub q_stride: i32,
pub q_type: i32,
pub data: Vec<[f32; 3]>,
pub cached_minmax: Option<([f32; 3], [f32; 3])>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct VertexColorBuffer {
pub id: i32,
pub name: String,
pub q_comp: i32,
pub q_divisor: i32,
pub q_stride: i32,
pub q_type: i32,
pub data: Vec<[u8; 4]>,
pub cached_minmax: Option<([u8; 4], [u8; 4])>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct VertexTextureCoordinateBuffer {
pub id: i32,
pub name: String,
pub q_comp: i32,
pub q_divisor: i32,
pub q_stride: i32,
pub q_type: i32,
pub data: Vec<[f32; 2]>,
pub cached_minmax: Option<([f32; 2], [f32; 2])>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Texture {
pub name: String,
pub width: u32,
pub height: u32,
pub format: u32,
pub number_of_images: u32,
pub data: Vec<u8>,
pub min_lod: f32,
pub max_lod: f32,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ChrData {
pub name: String,
pub nodes: Vec<ChrNode>,
pub tracks: Vec<ChrTrack>,
pub source_path: String,
pub frame_duration: u16,
pub wrap_mode: u32,
pub scale_rule: u32,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ChrNode {
pub name: String,
pub flags: u32,
pub tracks: Vec<u32>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ChrTrack {
pub quant: ChrQuantization,
pub scale: f32,
pub offset: f32,
pub frames_data: Vec<ChrFrame>,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct ChrFrame {
pub frame: f64,
pub value: f64,
pub slope: f64,
}
struct JsonWriteCtx {
buffers: Vec<Vec<u8>>,
}
impl JsonWriteCtx {
fn new() -> Self {
JsonWriteCtx {
buffers: Vec::new(),
}
}
fn save_buffer_with_move(&mut self, buf: Vec<u8>) -> usize {
self.buffers.push(buf);
self.buffers.len() - 1
}
fn save_buffer_with_copy<T: AsRef<[u8]>>(&mut self, buf: T) -> usize {
let tmp: Vec<u8> = buf.as_ref().to_vec();
self.save_buffer_with_move(tmp)
}
}
impl Archive {
fn from_json(archive: JsonArchive, buffers: Vec<Vec<u8>>) -> Self {
Self {
chrs: archive
.chrs
.into_iter()
.map(|m| ChrData::from_json(m, &buffers))
.collect(),
clrs: archive.clrs,
models: archive
.models
.into_iter()
.map(|m| Model::from_json(m, &buffers))
.collect(),
pats: archive.pats,
srts: archive.srts,
textures: archive
.textures
.into_iter()
.map(|t| Texture::from_json(t, &buffers))
.collect(),
viss: archive.viss,
}
}
pub fn get_model(&self, name: &str) -> Option<&Model> {
self.models.iter().find(|model| model.name == name)
}
pub fn get_texture(&self, name: &str) -> Option<&Texture> {
self.textures.iter().find(|texture| texture.name == name)
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonArchive {
JsonArchive {
chrs: self.chrs.iter().map(|c| c.to_json(ctx)).collect(),
clrs: self.clrs.clone(),
models: self.models.iter().map(|m| m.to_json(ctx)).collect(),
pats: self.pats.clone(),
srts: self.srts.clone(),
textures: self.textures.iter().map(|t| t.to_json(ctx)).collect(),
viss: self.viss.clone(),
}
}
}
impl Model {
fn from_json(model: JsonModel, buffers: &[Vec<u8>]) -> Self {
Self {
bones: model.bones,
materials: model.materials,
matrices: model.matrices,
meshes: model
.meshes
.into_iter()
.map(|m| Mesh::from_json(m, buffers))
.collect(),
name: model.name,
info: model.info,
positions: model
.positions
.into_iter()
.map(|m| VertexPositionBuffer::from_json(m, buffers))
.collect(),
normals: model
.normals
.into_iter()
.map(|m| VertexNormalBuffer::from_json(m, buffers))
.collect(),
colors: model
.colors
.into_iter()
.map(|m| VertexColorBuffer::from_json(m, buffers))
.collect(),
texcoords: model
.texcoords
.into_iter()
.map(|m| VertexTextureCoordinateBuffer::from_json(m, buffers))
.collect(),
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonModel {
JsonModel {
name: self.name.clone(),
info: self.info.clone(),
bones: self.bones.clone(),
materials: self.materials.clone(),
matrices: self.matrices.clone(),
meshes: self.meshes.iter().map(|m| m.to_json(ctx)).collect(),
positions: self.positions.iter().map(|p| p.to_json(ctx)).collect(),
normals: self.normals.iter().map(|n| n.to_json(ctx)).collect(),
colors: self.colors.iter().map(|c| c.to_json(ctx)).collect(),
texcoords: self.texcoords.iter().map(|t| t.to_json(ctx)).collect(),
}
}
}
impl Mesh {
fn from_json(json_mesh: JsonMesh, buffers: &[Vec<u8>]) -> Self {
Self {
clr_buffer: json_mesh.clr_buffer,
current_matrix: json_mesh.current_matrix,
mprims: json_mesh
.mprims
.into_iter()
.map(|p| MatrixPrimitive::from_json(p, buffers))
.collect(),
name: json_mesh.name,
nrm_buffer: json_mesh.nrm_buffer,
pos_buffer: json_mesh.pos_buffer,
uv_buffer: json_mesh.uv_buffer,
vcd: json_mesh.vcd,
visible: json_mesh.visible,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonMesh {
JsonMesh {
clr_buffer: self.clr_buffer.clone(),
current_matrix: self.current_matrix,
mprims: self.mprims.iter().map(|p| p.to_json(ctx)).collect(),
name: self.name.clone(),
nrm_buffer: self.nrm_buffer.clone(),
pos_buffer: self.pos_buffer.clone(),
uv_buffer: self.uv_buffer.clone(),
vcd: self.vcd,
visible: self.visible,
}
}
}
impl MatrixPrimitive {
fn from_json(json_primitive: JsonPrimitive, buffers: &[Vec<u8>]) -> Self {
let vertex_data_buffer = buffers
.get(json_primitive.vertexDataBufferId as usize)
.cloned()
.unwrap_or_else(Vec::new);
Self {
matrices: json_primitive.matrices,
num_prims: json_primitive.num_prims,
vertex_data_buffer,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonPrimitive {
let buffer_id = ctx.save_buffer_with_copy(&self.vertex_data_buffer);
JsonPrimitive {
matrices: self.matrices.clone(),
num_prims: self.num_prims,
vertexDataBufferId: buffer_id as i32,
}
}
}
impl VertexPositionBuffer {
fn from_json(buffer_data: JsonBufferData<[f32; 3]>, buffers: &[Vec<u8>]) -> Self {
let data = buffers
.get(buffer_data.dataBufferId as usize)
.expect("Invalid buffer ID");
let elem_count = data.len() / std::mem::size_of::<[f32; 3]>();
let data: Vec<[f32; 3]> = unsafe {
std::slice::from_raw_parts(data.as_ptr() as *const [f32; 3], elem_count).to_vec()
};
let cached_minmax = if let (Some(cached_min), Some(cached_max)) =
(buffer_data.cached_min, buffer_data.cached_max)
{
Some((cached_min, cached_max))
} else {
None
};
Self {
id: buffer_data.id,
name: buffer_data.name,
q_comp: buffer_data.q_comp,
q_divisor: buffer_data.q_divisor,
q_stride: buffer_data.q_stride,
q_type: buffer_data.q_type,
data,
cached_minmax,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonBufferData<[f32; 3]> {
let buffer_id = ctx.save_buffer_with_copy(bytemuck::cast_slice(&self.data));
JsonBufferData {
dataBufferId: buffer_id as i32,
id: self.id,
name: self.name.clone(),
q_comp: self.q_comp,
q_divisor: self.q_divisor,
q_stride: self.q_stride,
q_type: self.q_type,
cached_min: self.cached_minmax.map(|(min, _)| min),
cached_max: self.cached_minmax.map(|(_, max)| max),
}
}
}
impl VertexNormalBuffer {
fn from_json(buffer_data: JsonBufferData<[f32; 3]>, buffers: &[Vec<u8>]) -> Self {
let data = buffers
.get(buffer_data.dataBufferId as usize)
.expect("Invalid buffer ID");
let elem_count = data.len() / std::mem::size_of::<[f32; 3]>();
let data: Vec<[f32; 3]> = unsafe {
std::slice::from_raw_parts(data.as_ptr() as *const [f32; 3], elem_count).to_vec()
};
let cached_minmax = if let (Some(cached_min), Some(cached_max)) =
(buffer_data.cached_min, buffer_data.cached_max)
{
Some((cached_min, cached_max))
} else {
None
};
Self {
id: buffer_data.id,
name: buffer_data.name,
q_comp: buffer_data.q_comp,
q_divisor: buffer_data.q_divisor,
q_stride: buffer_data.q_stride,
q_type: buffer_data.q_type,
data,
cached_minmax,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonBufferData<[f32; 3]> {
let buffer_id = ctx.save_buffer_with_copy(bytemuck::cast_slice(&self.data));
JsonBufferData {
dataBufferId: buffer_id as i32,
id: self.id,
name: self.name.clone(),
q_comp: self.q_comp,
q_divisor: self.q_divisor,
q_stride: self.q_stride,
q_type: self.q_type,
cached_min: self.cached_minmax.map(|(min, _)| min),
cached_max: self.cached_minmax.map(|(_, max)| max),
}
}
}
impl VertexColorBuffer {
fn from_json(buffer_data: JsonBufferData<[u8; 4]>, buffers: &[Vec<u8>]) -> Self {
let data = buffers
.get(buffer_data.dataBufferId as usize)
.expect("Invalid buffer ID");
let elem_count = data.len() / std::mem::size_of::<[u8; 4]>();
let data: Vec<[u8; 4]> = unsafe {
std::slice::from_raw_parts(data.as_ptr() as *const [u8; 4], elem_count).to_vec()
};
let cached_minmax = if let (Some(cached_min), Some(cached_max)) =
(buffer_data.cached_min, buffer_data.cached_max)
{
Some((cached_min, cached_max))
} else {
None
};
Self {
id: buffer_data.id,
name: buffer_data.name,
q_comp: buffer_data.q_comp,
q_divisor: buffer_data.q_divisor,
q_stride: buffer_data.q_stride,
q_type: buffer_data.q_type,
data,
cached_minmax,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonBufferData<[u8; 4]> {
let buffer_id = ctx.save_buffer_with_copy(bytemuck::cast_slice(&self.data));
JsonBufferData {
dataBufferId: buffer_id as i32,
id: self.id,
name: self.name.clone(),
q_comp: self.q_comp,
q_divisor: self.q_divisor,
q_stride: self.q_stride,
q_type: self.q_type,
cached_min: self.cached_minmax.map(|(min, _)| min),
cached_max: self.cached_minmax.map(|(_, max)| max),
}
}
}
impl VertexTextureCoordinateBuffer {
fn from_json(buffer_data: JsonBufferData<[f32; 2]>, buffers: &[Vec<u8>]) -> Self {
let data = buffers
.get(buffer_data.dataBufferId as usize)
.expect("Invalid buffer ID");
let elem_count = data.len() / std::mem::size_of::<[f32; 2]>();
let data: Vec<[f32; 2]> = unsafe {
std::slice::from_raw_parts(data.as_ptr() as *const [f32; 2], elem_count).to_vec()
};
let cached_minmax = if let (Some(cached_min), Some(cached_max)) =
(buffer_data.cached_min, buffer_data.cached_max)
{
Some((cached_min, cached_max))
} else {
None
};
Self {
id: buffer_data.id,
name: buffer_data.name,
q_comp: buffer_data.q_comp,
q_divisor: buffer_data.q_divisor,
q_stride: buffer_data.q_stride,
q_type: buffer_data.q_type,
data,
cached_minmax,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonBufferData<[f32; 2]> {
let buffer_id = ctx.save_buffer_with_copy(bytemuck::cast_slice(&self.data));
JsonBufferData {
dataBufferId: buffer_id as i32,
id: self.id,
name: self.name.clone(),
q_comp: self.q_comp,
q_divisor: self.q_divisor,
q_stride: self.q_stride,
q_type: self.q_type,
cached_min: self.cached_minmax.map(|(min, _)| min),
cached_max: self.cached_minmax.map(|(_, max)| max),
}
}
}
impl Texture {
fn from_json(texture: JsonTexture, buffers: &[Vec<u8>]) -> Self {
let data = buffers
.get(texture.dataBufferId as usize)
.map(|buffer| buffer.clone())
.unwrap_or_else(Vec::new);
Self {
data,
format: texture.format,
height: texture.height,
max_lod: texture.maxLod,
min_lod: texture.minLod,
name: texture.name,
number_of_images: texture.number_of_images,
width: texture.width,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JsonTexture {
let buffer_id = ctx.save_buffer_with_copy(&self.data);
JsonTexture {
dataBufferId: buffer_id as i32,
format: self.format,
height: self.height,
maxLod: self.max_lod,
minLod: self.min_lod,
name: self.name.clone(),
number_of_images: self.number_of_images,
width: self.width,
sourcePath: "".to_string(),
}
}
}
impl ChrData {
fn from_json(json_data: JSONChrData, buffers: &[Vec<u8>]) -> Self {
let nodes = json_data
.nodes
.into_iter()
.map(ChrNode::from_json)
.collect();
let tracks = json_data
.tracks
.into_iter()
.map(|json_track| ChrTrack::from_json(json_track, buffers))
.collect();
Self {
nodes,
tracks,
name: json_data.name,
source_path: json_data.sourcePath,
frame_duration: json_data.frameDuration,
wrap_mode: json_data.wrapMode,
scale_rule: json_data.scaleRule,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JSONChrData {
let nodes = self.nodes.iter().map(ChrNode::to_json).collect();
let tracks = self.tracks.iter().map(|track| track.to_json(ctx)).collect();
JSONChrData {
nodes,
tracks,
name: self.name.clone(),
sourcePath: self.source_path.clone(),
frameDuration: self.frame_duration,
wrapMode: self.wrap_mode,
scaleRule: self.scale_rule,
}
}
}
impl ChrNode {
fn from_json(json_node: JSONChrNode) -> Self {
Self {
name: json_node.name,
flags: json_node.flags,
tracks: json_node.tracks,
}
}
fn to_json(&self) -> JSONChrNode {
JSONChrNode {
name: self.name.clone(),
flags: self.flags,
tracks: self.tracks.clone(),
}
}
}
impl ChrTrack {
fn from_json(json_track: JSONChrTrack, buffers: &[Vec<u8>]) -> Self {
let data = buffers
.get(json_track.framesDataBufferId as usize)
.expect("Invalid buffer ID");
let frames_data = ChrFramePacker::unpack(data, json_track.numKeyFrames as usize);
Self {
quant: json_track.quant,
scale: json_track.scale,
offset: json_track.offset,
frames_data,
}
}
fn to_json(&self, ctx: &mut JsonWriteCtx) -> JSONChrTrack {
let buffer_id = ctx.save_buffer_with_move(ChrFramePacker::pack(&self.frames_data));
JSONChrTrack {
quant: self.quant,
scale: self.scale,
offset: self.offset,
framesDataBufferId: buffer_id as u32,
numKeyFrames: self.frames_data.len() as u32,
}
}
}
struct ChrFramePacker;
impl ChrFramePacker {
const CHR_FRAME_SIZE: usize = 8 * 3;
fn pack(frames: &[ChrFrame]) -> Vec<u8> {
let mut buffer = Vec::with_capacity(frames.len() * Self::CHR_FRAME_SIZE);
for frame in frames {
buffer.extend_from_slice(&frame.frame.to_le_bytes());
buffer.extend_from_slice(&frame.value.to_le_bytes());
buffer.extend_from_slice(&frame.slope.to_le_bytes());
}
buffer
}
fn unpack(bytes: &[u8], num_keyframes: usize) -> Vec<ChrFrame> {
let mut frames = Vec::with_capacity(num_keyframes);
for i in 0..num_keyframes {
let offset = i * Self::CHR_FRAME_SIZE;
let frame = f64::from_le_bytes(bytes[offset..offset + 8].try_into().unwrap());
let value = f64::from_le_bytes(bytes[offset + 8..offset + 16].try_into().unwrap());
let slope = f64::from_le_bytes(bytes[offset + 16..offset + 24].try_into().unwrap());
frames.push(ChrFrame {
frame,
value,
slope,
});
}
frames
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
#[test]
fn test_archive() {
let file_path = "dummy.json";
match read_json(file_path) {
Ok(json_archive) => {
let buffers = buffers::read_buffer(&fs::read("dummy.bin").unwrap());
let archive = Archive::from_json(json_archive, buffers);
assert!(archive.get_model("example").is_some());
assert!(archive.get_texture("human_A").is_some());
}
Err(e) => panic!("Error reading JSON file: {}", e),
}
}
}
fn create_archive(json_str: &str, raw_buffer: &[u8]) -> anyhow::Result<Archive> {
let json_archive: JsonArchive = serde_json::from_str(json_str)?;
let buffers = buffers::read_buffer(raw_buffer);
Ok(Archive::from_json(json_archive, buffers))
}
fn create_json(archive: &Archive) -> anyhow::Result<(String, Vec<u8>)> {
let mut ctx = JsonWriteCtx::new();
let json_archive = archive.to_json(&mut ctx);
let json_str = serde_json::to_string_pretty(&json_archive)?;
let blob = buffers::collate_buffers(&ctx.buffers);
Ok((json_str, blob))
}
fn read_raw_brres(brres: &[u8]) -> anyhow::Result<Archive> {
let tmp = ffi::CBrresWrapper::from_bytes(brres)?;
create_archive(&tmp.json_metadata, &tmp.buffer_data)
}
fn write_raw_brres(archive: &Archive) -> anyhow::Result<Vec<u8>> {
let (json, blob) = create_json(&archive)?;
let ffi_obj = ffi::CBrresWrapper::write_bytes(&json, &blob)?;
Ok(ffi_obj.buffer_data.to_vec())
}
pub fn read_mdl0mat_preset_folder(folder: &std::path::Path) -> anyhow::Result<Archive> {
let folder_str = folder.to_str().ok_or(anyhow!("Failed to stringify path"))?;
let ffi_obj = ffi::CBrresWrapper::read_preset_folder(folder_str)?;
Ok(read_raw_brres(&ffi_obj.buffer_data)?)
}
impl Archive {
pub fn from_memory(buffer: &[u8]) -> anyhow::Result<Archive> {
read_raw_brres(&buffer)
}
pub fn from_path<P: AsRef<Path>>(path: P) -> anyhow::Result<Archive> {
let buf = std::fs::read(path)?;
read_raw_brres(&buf)
}
pub fn write_memory(self: &Self) -> anyhow::Result<Vec<u8>> {
write_raw_brres(self)
}
pub fn write_path<P: AsRef<Path>>(self: &Self, path: P) -> anyhow::Result<()> {
let buf = write_raw_brres(self)?;
std::fs::write(path, buf)?;
Ok(())
}
}
#[cfg(test)]
mod tests4 {
use super::*;
use std::fs;
#[test]
fn test_read_raw_brres() {
let brres_data =
fs::read("../../tests/samples/sea.brres").expect("Failed to read sea.brres file");
match read_raw_brres(&brres_data) {
Ok(archive) => {
println!("{:#?}", archive.get_model("sea").unwrap().meshes[0]);
println!("{:#?}", archive.get_model("sea").unwrap().materials[0]);
}
Err(e) => {
panic!("Error reading brres file: {:#?}", e);
}
}
}
#[test]
fn test_validate_json_is_lossless_sea() {
let brres_data =
fs::read("../../tests/samples/sea.brres").expect("Failed to read sea.brres file");
let initial_archive = read_raw_brres(&brres_data).expect("Failed to read brres file");
let (json_str, blob) =
create_json(&initial_archive).expect("Failed to create JSON from Archive");
let new_archive =
create_archive(&json_str, &blob).expect("Failed to create Archive from JSON");
assert_eq!(
initial_archive, new_archive,
"Archives do not match after JSON roundtrip"
);
println!("{:#?}", initial_archive.get_model("sea").unwrap().meshes[0]);
println!(
"{:#?}",
initial_archive.get_model("sea").unwrap().materials[0]
);
}
fn assert_buffers_equal(encoded_image: &[u8], cached_image: &[u8], format: &str) {
if encoded_image != cached_image {
let diff_count = encoded_image
.iter()
.zip(cached_image.iter())
.filter(|(a, b)| a != b)
.count();
println!("Mismatch for {:?} - {} bytes differ", format, diff_count);
let mut diff_examples = vec![];
for (i, (a, b)) in encoded_image.iter().zip(cached_image.iter()).enumerate() {
if a != b {
diff_examples.push((i, *a, *b));
if diff_examples.len() >= 10 {
break;
}
}
}
println!("Example differences (up to 10):");
for (i, a, b) in diff_examples {
println!("Byte {}: encoded = {}, cached = {}", i, a, b);
}
println!(
"Size of left: {}, size of right: {}",
encoded_image.len(),
cached_image.len()
);
panic!("Buffers are not equal");
}
}
fn test_validate_binary_is_lossless(path: &str, passes: bool) {
let name = std::path::Path::new(path)
.file_name()
.unwrap()
.to_str()
.unwrap();
let brres_data = fs::read(path).expect("Failed to read brres file");
let archive = read_raw_brres(&brres_data).expect("Error reading brres file");
let written_data = write_raw_brres(&archive).expect("Error writing brres file");
if passes {
if &brres_data != &written_data {
fs::write(format!("{name}_good.brres"), &brres_data);
fs::write(format!("{name}_bad.brres"), &written_data);
}
assert_buffers_equal(&brres_data, &written_data, path);
} else {
assert!(&brres_data != &written_data)
}
}
#[test]
fn test_validate_binary_is_lossless_walk() {
let brres_data = fs::read("../../tests/samples/human_walk.brres")
.expect("Failed to read human_walk.brres file");
let archive = read_raw_brres(&brres_data).expect("Error reading brres file");
let written_data = write_raw_brres(&archive).expect("Error writing brres file");
fs::write("Invalid_human_walk.brres", &written_data).unwrap();
assert_buffers_equal(&brres_data, &written_data, "human_walk.brres");
}
#[test]
fn test_validate_binary_is_lossless_sea() {
test_validate_binary_is_lossless("../../tests/samples/sea.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_srt() {
test_validate_binary_is_lossless("../../tests/samples/srt.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_rPB() {
test_validate_binary_is_lossless("../../tests/samples/rPB.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_MashBalloonGC() {
test_validate_binary_is_lossless("../../tests/samples/MashBalloonGC.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_human_walk_env() {
test_validate_binary_is_lossless("../../tests/samples/human_walk_env.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_map_model() {
test_validate_binary_is_lossless("../../tests/samples/map_model.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_kuribo() {
test_validate_binary_is_lossless("../../tests/samples/kuribo.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_human_walk_chr0() {
test_validate_binary_is_lossless("../../tests/samples/human_walk_chr0.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_driver_model() {
test_validate_binary_is_lossless("../../tests/samples/driver_model.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_mariotreeGC() {
test_validate_binary_is_lossless("../../tests/samples/mariotreeGC.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_fur_rabbits_chr0() {
test_validate_binary_is_lossless("../../tests/samples/fur_rabbits-chr0.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_luigi_circuit() {
test_validate_binary_is_lossless("../../tests/samples/luigi_circuit.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_pocha_nomodel() {
test_validate_binary_is_lossless("../../tests/samples/pocha_nomodel.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_pocha() {
test_validate_binary_is_lossless("../../tests/samples/pocha.brres", false)
}
#[test]
fn test_validate_binary_is_lossless_smooth_rtpa() {
test_validate_binary_is_lossless("../../tests/samples/smooth_rtpa.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_smooth_rcla() {
test_validate_binary_is_lossless("../../tests/samples/smooth_rcla.brres", true)
}
#[test]
fn test_validate_binary_is_lossless_brvia() {
test_validate_binary_is_lossless("../../tests/samples/brvia.brres", true)
}
#[test]
fn test_driver_model() {
test_validate_binary_is_lossless("../../tests/samples/driver_model.brres", true)
}
}