use std::{
any::type_name,
collections::HashMap,
io::{self, Cursor},
mem, str, vec,
};
use anyhow::{bail, ensure, Context, Result};
use bytemuck::{bytes_of, bytes_of_mut, cast_slice_mut, AnyBitPattern, NoUninit, Pod};
use half::f16;
use num_traits::{AsPrimitive, Float, PrimInt};
use crate::{
sdf::{self, Value},
usdc::coding,
};
use super::layout::*;
const SW_VERSION: Version = version(0, 12, 0);
#[derive(Debug)]
pub struct CrateFile<R> {
reader: R,
pub bootstrap: Bootstrap,
pub sections: Vec<Section>,
pub tokens: Vec<String>,
pub strings: Vec<usize>,
pub fields: Vec<Field>,
pub fieldsets: Vec<Option<usize>>,
pub paths: Vec<sdf::Path>,
pub specs: Vec<Spec>,
}
impl<R> CrateFile<R> {
#[inline]
pub fn version(&self) -> Version {
Version::from(self.bootstrap)
}
}
impl<R: io::Read + io::Seek> CrateFile<R> {
pub fn open(mut reader: R) -> Result<Self> {
let bootstrap = Self::read_header(&mut reader)?;
let mut file = CrateFile {
reader,
bootstrap,
sections: Vec::new(),
tokens: Vec::new(),
strings: Vec::new(),
fields: Vec::new(),
fieldsets: Vec::new(),
paths: Vec::new(),
specs: Vec::new(),
};
file.read_sections().context("Unable to read sections")?;
file.read_tokens().context("Unable to read TOKENS section")?;
file.read_strings().context("Unable to read STRINGS section")?;
file.read_fields().context("Unable to read FIELDS section")?;
file.read_fieldsets().context("Unable to read FIELDSETS section")?;
file.read_paths().context("Unable to read PATHS section")?;
file.read_specs().context("Unable to read SPECS section")?;
Ok(file)
}
pub fn validate(&self) -> Result<()> {
self.fields.iter().enumerate().try_for_each(|(index, field)| {
self.tokens
.get(field.token_index)
.with_context(|| format!("Invalid field token index {}: {}", index, field.token_index))?;
anyhow::Ok(())
})?;
self.fieldsets
.iter()
.enumerate()
.filter_map(|(i, index)| index.map(|index| (i, index)))
.try_for_each(|(index, fieldset)| {
self.fields
.get(fieldset)
.with_context(|| format!("Invalid fieldset index {index}: {fieldset}"))?;
anyhow::Ok(())
})?;
self.specs.iter().enumerate().try_for_each(|(index, spec)| {
self.paths
.get(spec.path_index)
.with_context(|| format!("Invalid spec {} path index: {}", index, spec.path_index))?;
self.fieldsets
.get(spec.fieldset_index)
.with_context(|| format!("Invalid spec {} fieldset index: {}", index, spec.fieldset_index))?;
if spec.fieldset_index > 0 {
ensure!(
self.fieldsets[spec.fieldset_index - 1].is_none(),
"Invalid spec {}, the element at the prior index {} must be a default-constructed field index",
index,
spec.fieldset_index
);
}
ensure!(spec.spec_type != sdf::SpecType::Unknown, "Invalid spec {index} type");
anyhow::Ok(())
})?;
Ok(())
}
fn read_header(mut reader: impl io::Read + io::Seek) -> Result<Bootstrap> {
let header = reader.read_pod::<Bootstrap>()?;
ensure!(header.ident.eq(super::MAGIC), "Usd crate bootstrap section corrupt");
ensure!(header.toc_offset > 0, "Invalid TOC offset");
let file_ver = version(header.version[0], header.version[1], header.version[2]);
ensure!(
SW_VERSION.can_read(file_ver),
"Usd crate version mismatch, file is {file_ver}, library supports {SW_VERSION}"
);
Ok(header)
}
fn read_sections(&mut self) -> Result<()> {
self.set_position(self.bootstrap.toc_offset)?;
let count = self.reader.read_count()?;
ensure!(count > 0, "Crate file has no sections");
ensure!(count < 64, "Suspiciously large number of sections: {count}");
self.sections = self.reader.read_vec::<Section>(count)?;
Ok(())
}
fn read_tokens(&mut self) -> Result<()> {
let Some(section) = self.find_section(Section::TOKENS) else {
return Ok(());
};
self.set_position(section.start)?;
let file_ver = self.version();
let count = self.reader.read_count()?;
self.tokens = if file_ver < version(0, 4, 0) {
todo!("Support TOKENS reader for < 0.4.0 files");
} else {
let uncompressed_size = self.reader.read_count()?;
let mut buffer = self.read_compressed(uncompressed_size)?;
ensure!(
buffer.len() == uncompressed_size,
"Decompressed size mismatch (expected {}, got {})",
uncompressed_size,
buffer.len(),
);
ensure!(
buffer.last() == Some(&b'\0'),
"Tokens section not null-terminated in crate file"
);
buffer.pop();
let strings = buffer
.split(|c| *c == b'\0')
.map(|buf| str::from_utf8(buf).map(|str| str.to_string()))
.collect::<Result<Vec<_>, str::Utf8Error>>()
.context("Failed to parse TOKENS section")?;
ensure!(
strings.len() == count,
"Crate file claims {} tokens, but found {}",
count,
strings.len(),
);
strings
};
Ok(())
}
fn read_strings(&mut self) -> Result<()> {
let Some(section) = self.find_section(Section::STRINGS) else {
return Ok(());
};
self.set_position(section.start)?;
let count = self.reader.read_count()?;
ensure!(
count < 128 * 1024 * 1024,
"Suspiciously large number of strings: {count}"
);
let strings = self.reader.read_vec::<u32>(count)?;
self.strings = strings.into_iter().map(|offset| offset as usize).collect::<Vec<_>>();
Ok(())
}
fn read_fields(&mut self) -> Result<()> {
let Some(section) = self.find_section(Section::FIELDS) else {
return Ok(());
};
self.set_position(section.start)?;
let file_ver = self.version();
self.fields = if file_ver < version(0, 4, 0) {
todo!("Support FIELDS reader before < 0.4.0")
} else {
let field_count = self.reader.read_count()?;
let indices = self.read_encoded_ints(field_count)?;
let reps = self.read_compressed(field_count)?;
let fields: Vec<_> = indices
.iter()
.zip(reps.iter())
.map(|(index, value)| Field::new(*index, *value))
.collect();
debug_assert_eq!(fields.len(), field_count);
fields
};
Ok(())
}
fn read_fieldsets(&mut self) -> Result<()> {
let Some(section) = self.find_section(Section::FIELDSETS) else {
return Ok(());
};
self.set_position(section.start)?;
let file_ver = self.version();
self.fieldsets = if file_ver < version(0, 4, 0) {
todo!("Support FIELDSETS reader for < 0.4.0 files");
} else {
let count = self.reader.read_count()?;
let decoded = self.read_encoded_ints::<u32>(count)?;
const INVALID_INDEX: u32 = u32::MAX;
let sets = decoded
.into_iter()
.map(|i| if i == INVALID_INDEX { None } else { Some(i as usize) })
.collect::<Vec<_>>();
debug_assert_eq!(sets.len(), count);
sets
};
Ok(())
}
fn read_paths(&mut self) -> Result<()> {
let Some(section) = self.find_section(Section::PATHS) else {
return Ok(());
};
self.set_position(section.start)?;
let file_ver = self.version();
if file_ver == version(0, 0, 1) {
todo!("Support PATHS reader for == 0.0.1 files");
} else if file_ver < version(0, 4, 0) {
todo!("Support PATHS reader for < 0.4.0 files");
} else {
let path_count = self.reader.read_count()?;
self.paths = vec![sdf::Path::default(); path_count];
self.read_compressed_paths()?;
};
Ok(())
}
fn read_compressed_paths(&mut self) -> Result<()> {
let count: usize = self.reader.read_count()?;
let path_indexes = self.read_encoded_ints::<u32>(count)?;
debug_assert_eq!(path_indexes.len(), count);
let element_token_indexes = self.read_encoded_ints::<i32>(count)?;
debug_assert_eq!(element_token_indexes.len(), count);
let jumps = self.read_encoded_ints::<i32>(count)?;
debug_assert_eq!(jumps.len(), count);
self.build_compressed_paths(&path_indexes, &element_token_indexes, &jumps, 0, sdf::Path::default())?;
Ok(())
}
fn build_compressed_paths(
&mut self,
path_indexes: &[u32],
element_token_indexes: &[i32],
jumps: &[i32],
mut current_index: usize,
mut parent_path: sdf::Path,
) -> Result<()> {
let mut has_child;
let mut has_sibling;
loop {
let this_index = current_index;
current_index += 1;
if parent_path.is_empty() {
parent_path = sdf::Path::new("/")?;
self.paths[this_index] = parent_path.clone();
} else {
let token_index = element_token_indexes[this_index];
let is_prim_property_path = token_index < 0;
let token_index = token_index.unsigned_abs() as usize;
let element_token = self.tokens[token_index].as_str();
self.paths[path_indexes[this_index] as usize] = if is_prim_property_path {
parent_path.append_property(element_token)?
} else if element_token.starts_with('{') {
parent_path.append_variant_segment(element_token)
} else {
parent_path.append_path(element_token)?
};
}
has_child = jumps[this_index] > 0 || jumps[this_index] == -1;
has_sibling = jumps[this_index] >= 0;
if has_child {
if has_sibling {
let sibling_index = this_index + jumps[this_index] as usize;
self.build_compressed_paths(
path_indexes,
element_token_indexes,
jumps,
sibling_index,
parent_path,
)?;
}
parent_path = self.paths[path_indexes[this_index] as usize].clone();
}
if !has_child && !has_sibling {
break;
}
}
Ok(())
}
fn read_specs(&mut self) -> Result<()> {
let Some(section) = self.find_section(Section::SPECS) else {
return Ok(());
};
self.set_position(section.start)?;
let file_ver = self.version();
self.specs = if file_ver == version(0, 0, 1) {
todo!("Support SPECS reader for == 0.0.1 files");
} else if file_ver < version(0, 4, 0) {
todo!("Support SPECS reader for < 0.4.0 files");
} else {
let spec_count = self.reader.read_count()?;
let path_indexes = self.read_encoded_ints::<u32>(spec_count)?;
let fieldset_indexes = self.read_encoded_ints::<u32>(spec_count)?;
let spec_types = self.read_encoded_ints::<u32>(spec_count)?;
path_indexes
.into_iter()
.zip(fieldset_indexes)
.zip(spec_types)
.map(|((path, fieldset), spec_type)| {
Ok(Spec {
path_index: path as usize,
fieldset_index: fieldset as usize,
spec_type: sdf::SpecType::from_repr(spec_type)
.with_context(|| format!("Unable to parse SDF spec type: {spec_type}"))?,
})
})
.collect::<Result<Vec<_>>>()?
};
Ok(())
}
pub fn find_section(&self, name: &str) -> Option<&Section> {
self.sections.iter().find(|s| s.name() == name)
}
fn resolve_string(&self, string_index: u32) -> String {
let token = self.strings[string_index as usize];
self.tokens[token].clone()
}
fn set_position(&mut self, position: u64) -> Result<()> {
self.reader.seek(io::SeekFrom::Start(position))?;
Ok(())
}
fn unpack_value<T: Default + Pod>(&mut self, value: ValueRep) -> Result<T> {
ensure!(!value.is_array(), "Can't unpack array {value:?} as inline value");
let ty = value.ty()?;
ensure!(ty != Type::Invalid, "Invalid value type");
let value = if value.is_inlined() {
let tmp = value.payload() & ((1_u64 << (mem::size_of::<u32>() * 8)) - 1);
let mut cursor = Cursor::new(bytes_of(&tmp));
cursor.read_pod::<T>()?
} else {
self.set_position(value.payload())?;
self.reader.read_pod::<T>()?
};
Ok(value)
}
fn read_token(&mut self, value: ValueRep) -> Result<String> {
let index: u64 = self.unpack_value(value)?;
let value = self.tokens[index as usize].clone();
Ok(value)
}
fn read_compressed<T: Default + NoUninit + AnyBitPattern>(&mut self, estimated_count: usize) -> Result<Vec<T>> {
let compressed_size = self.reader.read_count()?;
let mut input = vec![0_u8; compressed_size];
self.reader.read_exact(&mut input)?;
let mut output = vec![T::default(); estimated_count];
let actual_size = decompress_lz4(&input, cast_slice_mut(&mut output))?;
let actual_count = actual_size / mem::size_of::<T>();
if actual_count < output.len() {
output.truncate(actual_count);
}
Ok(output)
}
fn read_encoded_ints<T: PrimInt + 'static>(&mut self, count: usize) -> Result<Vec<T>>
where
i64: AsPrimitive<T>,
{
let estimated_size = coding::encoded_buffer_size::<u32>(count);
let buffer = self.read_compressed::<u8>(estimated_size)?;
let ints = coding::decode_ints(buffer.as_slice(), count)?;
debug_assert_eq!(ints.len(), count);
Ok(ints)
}
const MIN_COMPRESSED_ARRAY_SIZE: usize = 4;
fn unpack_array_len(&mut self, value: ValueRep, kind: ArrayKind) -> Result<(usize, bool)> {
debug_assert!(!value.is_inlined());
if value.payload() == 0 {
return Ok((0, false));
}
self.set_position(value.payload())?;
if self.version() < version(0, 5, 0) {
let _ = self.reader.read_pod::<u32>()?;
}
let mut compressed = true;
match kind {
ArrayKind::Ints => {
if self.version() < version(0, 5, 0) || !value.is_compressed() {
compressed = false;
}
}
ArrayKind::Floats => {
if self.version() < version(0, 6, 0) || !value.is_compressed() {
compressed = false;
}
}
ArrayKind::Other => {
debug_assert!(!value.is_compressed());
compressed = false;
}
}
let count = if self.version() < version(0, 7, 0) {
self.reader.read_pod::<u32>()? as usize
} else {
self.reader.read_pod::<u64>()? as usize
};
if count < Self::MIN_COMPRESSED_ARRAY_SIZE {
compressed = false;
}
Ok((count, compressed))
}
fn read_ints<T: PrimInt + Pod + Default>(&mut self, value: ValueRep) -> Result<Vec<T>>
where
i64: AsPrimitive<T>,
{
let (count, compressed) = self.unpack_array_len(value, ArrayKind::Ints)?;
if count == 0 {
return Ok(Vec::default());
}
if compressed {
self.read_encoded_ints(count)
} else {
self.reader.read_vec(count)
}
}
fn read_floats<T: Float + Default + Pod>(&mut self, value: ValueRep) -> Result<Vec<T>> {
use num_traits::cast;
ensure!(!value.is_inlined());
let (count, compressed) = self.unpack_array_len(value, ArrayKind::Floats)?;
let vec = if compressed {
let code = self.reader.read_pod::<u8>()?;
match code {
b'i' => {
let ints: Vec<i32> = self.read_compressed(count)?;
ints.into_iter().map(|i| cast(i).unwrap()).collect()
}
b't' => {
let lut_size = self.reader.read_pod::<u32>()? as usize;
let lut: Vec<T> = self.reader.read_vec(lut_size)?;
let indexes: Vec<u32> = self.read_encoded_ints(count)?;
ensure!(
indexes.len() == count,
"Read invalid number of indexes to decompress doubles array"
);
let mut output = vec![T::zero(); count];
for (i, index) in indexes.into_iter().enumerate() {
output[i] = lut[index as usize];
}
output
}
_ => bail!("Invalid compressed double array code: {code}"),
}
} else {
self.reader.read_vec(count)?
};
Ok(vec)
}
fn read_list_op<T: Default + Clone + PartialEq>(
&mut self,
value: ValueRep,
mut read: impl FnMut(&mut Self) -> Result<Vec<T>>,
) -> Result<sdf::ListOp<T>> {
self.set_position(value.payload())?;
let mut out = sdf::ListOp::<T>::default();
let header = self.reader.read_pod::<ListOpHeader>()?;
if header.is_explicit() {
out.explicit = true;
}
if header.has_explicit() {
out.explicit_items = read(self)?;
}
if header.has_added() {
out.added_items = read(self)?;
}
if header.has_prepend() {
out.prepended_items = read(self)?;
}
if header.has_appended() {
out.appended_items = read(self)?;
}
if header.has_deleted() {
out.deleted_items = read(self)?;
}
if header.has_ordered() {
out.ordered_items = read(self)?;
}
Ok(out)
}
fn read_string_vec(&mut self) -> Result<Vec<String>> {
let count = self.reader.read_count()?;
let indices = self.reader.read_vec::<u32>(count)?;
let vec = indices
.into_iter()
.map(|string_index| {
let token_index = self.strings[string_index as usize];
self.tokens[token_index].clone()
})
.collect();
Ok(vec)
}
fn read_token_vec(&mut self) -> Result<Vec<String>> {
let count = self.reader.read_count()?;
let indices = self.reader.read_vec::<u32>(count)?;
let vec = indices
.into_iter()
.map(|index| self.tokens[index as usize].clone())
.collect();
Ok(vec)
}
fn read_path_vec(&mut self) -> Result<Vec<sdf::Path>> {
let count = self.reader.read_count()?;
let indices = self.reader.read_vec::<u32>(count)?;
let vec = indices
.into_iter()
.map(|index| self.paths[index as usize].clone())
.collect();
Ok(vec)
}
fn read_pod_vec<T: Default + NoUninit + AnyBitPattern>(&mut self) -> Result<Vec<T>> {
let count = self.reader.read_count()?;
self.reader.read_vec(count)
}
fn read_string(&mut self) -> Result<String> {
let index = self.reader.read_pod::<u32>()?;
let string = self.resolve_string(index);
Ok(string)
}
fn read_path(&mut self) -> Result<sdf::Path> {
let index = self.reader.read_pod::<u32>()?;
let path = self.paths[index as usize].clone();
Ok(path)
}
fn read_reference(&mut self) -> Result<sdf::Reference> {
let asset_path = self.read_string()?;
let prim_path = self.read_path()?;
let layer_offset = self.reader.read_pod::<sdf::LayerOffset>()?;
let custom_data = self.read_custom_data()?;
Ok(sdf::Reference {
asset_path,
prim_path,
layer_offset,
custom_data,
})
}
fn read_payload(&mut self) -> Result<sdf::Payload> {
let asset_path = self.read_string()?;
let prim_path = self.read_path()?;
let mut payload = sdf::Payload {
asset_path,
prim_path,
layer_offset: None,
};
if self.version() >= version(0, 8, 0) {
let layer_offset = self.reader.read_pod::<sdf::LayerOffset>()?;
payload.layer_offset = Some(layer_offset);
}
Ok(payload)
}
fn apply_recursive_offset(&mut self) -> Result<()> {
let offset = self.reader.read_pod::<i64>()?;
self.reader.seek(io::SeekFrom::Current(offset - 8))?;
Ok(())
}
fn read_custom_data(&mut self) -> Result<HashMap<String, Value>> {
let mut count = self.reader.read_count()?;
let mut dict = HashMap::default();
while count > 0 {
let key = self.read_string()?;
let value = {
self.apply_recursive_offset()?;
let value = self.reader.read_pod::<ValueRep>()?;
ensure!(value.ty()? != Type::Invalid, "Can't parse dictionary value type");
ensure!(value.ty()? != Type::Dictionary, "Nested dictionaries are not supported");
let saved_position = self.reader.stream_position()?;
let value = self.value(value)?;
self.set_position(saved_position)?;
value
};
dict.insert(key, value);
count -= 1;
}
Ok(dict)
}
fn read_vec_array<T: Default + NoUninit + AnyBitPattern, const N: usize>(
&mut self,
value: ValueRep,
) -> Result<Vec<[T; N]>> {
debug_assert!(value.is_array());
debug_assert!(!value.is_compressed());
let (count, _) = self.unpack_array_len(value, ArrayKind::Other)?;
if count == 0 {
return Ok(Vec::default());
}
let flat: Vec<T> = self.reader.read_vec(count * N)?;
let mut result = Vec::with_capacity(count);
for chunk in flat.chunks_exact(N) {
let arr: [T; N] = chunk.try_into().unwrap_or_else(|_| {
unreachable!()
});
result.push(arr);
}
Ok(result)
}
pub fn value(&mut self, value: ValueRep) -> Result<sdf::Value> {
let ty = value.ty()?;
ensure!(ty != Type::Invalid, "Invalid value type");
let variant = match ty {
Type::Bool if value.is_array() => {
let vec = self
.read_vec_array::<u8, 1>(value)?
.into_iter()
.map(|[v]| v != 0)
.collect();
sdf::Value::BoolVec(vec)
}
Type::Bool => {
let value: i32 = self.unpack_value(value)?;
sdf::Value::Bool(value != 0)
}
Type::Uchar if value.is_array() => {
let vec = self.read_vec_array::<u8, 1>(value)?.into_iter().map(|[v]| v).collect();
sdf::Value::UcharVec(vec)
}
Type::Uchar => {
let value = self.unpack_value::<u8>(value)?;
sdf::Value::Uchar(value)
}
Type::Int if value.is_array() => sdf::Value::IntVec(self.read_ints(value)?),
Type::Int => sdf::Value::Int(self.unpack_value(value)?),
Type::Uint if value.is_array() => sdf::Value::UintVec(self.read_ints(value)?),
Type::Uint => sdf::Value::Uint(self.unpack_value(value)?),
Type::Int64 if value.is_array() => sdf::Value::Int64Vec(self.read_ints(value)?),
Type::Int64 => sdf::Value::Int64(self.unpack_value(value)?),
Type::Uint64 if value.is_array() => sdf::Value::Uint64Vec(self.read_ints(value)?),
Type::Uint64 => sdf::Value::Uint64(self.unpack_value(value)?),
Type::Half if value.is_array() => sdf::Value::HalfVec(self.read_floats(value)?),
Type::Half => sdf::Value::Half(self.unpack_value(value)?),
Type::Float if value.is_array() => sdf::Value::FloatVec(self.read_floats(value)?),
Type::Float => sdf::Value::Float(self.unpack_value(value)?),
Type::Double if value.is_array() => sdf::Value::DoubleVec(self.read_floats(value)?),
Type::Double if value.is_inlined() => {
let value = self.unpack_value::<f32>(value)?;
sdf::Value::Double(value as f64)
}
Type::Double => sdf::Value::Double(self.unpack_value(value)?),
Type::DoubleVector => sdf::Value::DoubleVec(self.read_floats(value)?),
Type::StringVector => {
ensure!(!value.is_inlined());
self.set_position(value.payload())?;
sdf::Value::StringVec(self.read_string_vec()?)
}
Type::String if value.is_array() => {
self.set_position(value.payload())?;
sdf::Value::StringVec(self.read_string_vec()?)
}
Type::String => {
ensure!(!value.is_array());
let string_index = self.unpack_value::<u32>(value)?;
sdf::Value::String(self.resolve_string(string_index))
}
Type::AssetPath => sdf::Value::AssetPath(self.read_token(value)?),
Type::Token if value.is_array() => {
let (count, _) = self.unpack_array_len(value, ArrayKind::Other)?;
let indices = self.reader.read_vec::<u32>(count)?;
let tokens = indices.into_iter().map(|i| self.tokens[i as usize].clone()).collect();
sdf::Value::TokenVec(tokens)
}
Type::Token => sdf::Value::Token(self.read_token(value)?),
Type::Vec2h if value.is_array() => Value::Vec2hVec(self.read_vec_array::<f16, 2>(value)?),
Type::Vec2f if value.is_array() => Value::Vec2fVec(self.read_vec_array::<f32, 2>(value)?),
Type::Vec2d if value.is_array() => Value::Vec2dVec(self.read_vec_array::<f64, 2>(value)?),
Type::Vec2i if value.is_array() => Value::Vec2iVec(self.read_vec_array::<i32, 2>(value)?),
Type::Vec3h if value.is_array() => Value::Vec3hVec(self.read_vec_array::<f16, 3>(value)?),
Type::Vec3f if value.is_array() => Value::Vec3fVec(self.read_vec_array::<f32, 3>(value)?),
Type::Vec3d if value.is_array() => Value::Vec3dVec(self.read_vec_array::<f64, 3>(value)?),
Type::Vec3i if value.is_array() => Value::Vec3iVec(self.read_vec_array::<i32, 3>(value)?),
Type::Vec4h if value.is_array() => Value::Vec4hVec(self.read_vec_array::<f16, 4>(value)?),
Type::Vec4f if value.is_array() => Value::Vec4fVec(self.read_vec_array::<f32, 4>(value)?),
Type::Vec4d if value.is_array() => Value::Vec4dVec(self.read_vec_array::<f64, 4>(value)?),
Type::Vec4i if value.is_array() => Value::Vec4iVec(self.read_vec_array::<i32, 4>(value)?),
Type::Vec2h if value.is_inlined() => sdf::Value::Vec2h(self.unpack_value::<[f16; 2]>(value)?),
Type::Vec2f if value.is_inlined() => sdf::Value::Vec2f(to_vec::<f32, 2>(self.unpack_value(value)?)),
Type::Vec2d if value.is_inlined() => sdf::Value::Vec2d(to_vec::<f64, 2>(self.unpack_value(value)?)),
Type::Vec2i if value.is_inlined() => sdf::Value::Vec2i(to_vec::<i32, 2>(self.unpack_value(value)?)),
Type::Vec3h if value.is_inlined() => sdf::Value::Vec3h(self.unpack_value::<[f16; 3]>(value)?),
Type::Vec3f if value.is_inlined() => sdf::Value::Vec3f(to_vec::<f32, 3>(self.unpack_value(value)?)),
Type::Vec3d if value.is_inlined() => sdf::Value::Vec3d(to_vec::<f64, 3>(self.unpack_value(value)?)),
Type::Vec3i if value.is_inlined() => sdf::Value::Vec3i(to_vec::<i32, 3>(self.unpack_value(value)?)),
Type::Vec4h if value.is_inlined() => sdf::Value::Vec4h(self.unpack_value::<[f16; 4]>(value)?),
Type::Vec4f if value.is_inlined() => sdf::Value::Vec4f(to_vec::<f32, 4>(self.unpack_value(value)?)),
Type::Vec4d if value.is_inlined() => sdf::Value::Vec4d(to_vec::<f64, 4>(self.unpack_value(value)?)),
Type::Vec4i if value.is_inlined() => sdf::Value::Vec4i(to_vec::<i32, 4>(self.unpack_value(value)?)),
Type::Vec2h => sdf::Value::Vec2h(self.unpack_value::<[f16; 2]>(value)?),
Type::Vec2f => sdf::Value::Vec2f(self.unpack_value::<[f32; 2]>(value)?),
Type::Vec2d => sdf::Value::Vec2d(self.unpack_value::<[f64; 2]>(value)?),
Type::Vec2i => sdf::Value::Vec2i(self.unpack_value::<[i32; 2]>(value)?),
Type::Vec3h => sdf::Value::Vec3h(self.unpack_value::<[f16; 3]>(value)?),
Type::Vec3f => sdf::Value::Vec3f(self.unpack_value::<[f32; 3]>(value)?),
Type::Vec3d => sdf::Value::Vec3d(self.unpack_value::<[f64; 3]>(value)?),
Type::Vec3i => sdf::Value::Vec3i(self.unpack_value::<[i32; 3]>(value)?),
Type::Vec4h => sdf::Value::Vec4h(self.unpack_value::<[f16; 4]>(value)?),
Type::Vec4f => sdf::Value::Vec4f(self.unpack_value::<[f32; 4]>(value)?),
Type::Vec4d => sdf::Value::Vec4d(self.unpack_value::<[f64; 4]>(value)?),
Type::Vec4i => sdf::Value::Vec4i(self.unpack_value::<[i32; 4]>(value)?),
Type::Matrix2d if value.is_array() => Value::Matrix2dVec(self.read_vec_array::<f64, 4>(value)?),
Type::Matrix3d if value.is_array() => Value::Matrix3dVec(self.read_vec_array::<f64, 9>(value)?),
Type::Matrix4d if value.is_array() => Value::Matrix4dVec(self.read_vec_array::<f64, 16>(value)?),
Type::Matrix2d if value.is_inlined() => {
sdf::Value::Matrix2d(to_mat_diag::<2, 4>(self.unpack_value(value)?))
}
Type::Matrix3d if value.is_inlined() => {
sdf::Value::Matrix3d(to_mat_diag::<3, 9>(self.unpack_value(value)?))
}
Type::Matrix4d if value.is_inlined() => {
sdf::Value::Matrix4d(to_mat_diag::<4, 16>(self.unpack_value(value)?))
}
Type::Matrix2d => sdf::Value::Matrix2d(self.unpack_value::<[f64; 4]>(value)?),
Type::Matrix3d => sdf::Value::Matrix3d(self.unpack_value::<[f64; 9]>(value)?),
Type::Matrix4d => sdf::Value::Matrix4d(self.unpack_value::<[f64; 16]>(value)?),
Type::Quath if value.is_array() => Value::QuathVec(self.read_vec_array::<f16, 4>(value)?),
Type::Quath => sdf::Value::Quath(self.unpack_value::<[f16; 4]>(value)?),
Type::Quatf if value.is_array() => Value::QuatfVec(self.read_vec_array::<f32, 4>(value)?),
Type::Quatf => sdf::Value::Quatf(self.unpack_value::<[f32; 4]>(value)?),
Type::Quatd if value.is_array() => Value::QuatdVec(self.read_vec_array::<f64, 4>(value)?),
Type::Quatd => sdf::Value::Quatd(self.unpack_value::<[f64; 4]>(value)?),
Type::TokenListOp => {
ensure!(!value.is_inlined());
let list = self.read_list_op(value, |file: &mut Self| file.read_token_vec())?;
sdf::Value::TokenListOp(list)
}
Type::StringListOp => {
ensure!(!value.is_inlined());
let list = self.read_list_op(value, |file: &mut Self| file.read_string_vec())?;
sdf::Value::StringListOp(list)
}
Type::PathListOp => {
ensure!(!value.is_inlined());
let list = self.read_list_op(value, |file: &mut Self| file.read_path_vec())?;
sdf::Value::PathListOp(list)
}
Type::ReferenceListOp => {
ensure!(!value.is_inlined());
let list = self.read_list_op(value, |file: &mut Self| {
let count = file.reader.read_count()?;
let mut vec = Vec::with_capacity(count);
for _ in 0..count {
let reference = file.read_reference()?;
vec.push(reference);
}
Ok(vec)
})?;
sdf::Value::ReferenceListOp(list)
}
Type::IntListOp => {
ensure!(!value.is_inlined());
sdf::Value::IntListOp(self.read_list_op(value, |f: &mut Self| f.read_pod_vec())?)
}
Type::Int64ListOp => {
ensure!(!value.is_inlined());
sdf::Value::Int64ListOp(self.read_list_op(value, |f: &mut Self| f.read_pod_vec())?)
}
Type::UIntListOp => {
ensure!(!value.is_inlined());
sdf::Value::UIntListOp(self.read_list_op(value, |f: &mut Self| f.read_pod_vec())?)
}
Type::UInt64ListOp => {
ensure!(!value.is_inlined());
sdf::Value::UInt64ListOp(self.read_list_op(value, |f: &mut Self| f.read_pod_vec())?)
}
Type::TokenVector => {
ensure!(!value.is_inlined());
self.set_position(value.payload())?;
let tokens = self.read_token_vec()?;
sdf::Value::TokenVec(tokens)
}
Type::PathVector => {
ensure!(!value.is_inlined());
self.set_position(value.payload())?;
let paths = self.read_path_vec()?;
sdf::Value::PathVec(paths)
}
Type::Specifier => {
let tmp: i32 = self.unpack_value(value)?;
let specifier =
sdf::Specifier::from_repr(tmp).with_context(|| format!("Unable to parse SDF specifier: {tmp}"))?;
sdf::Value::Specifier(specifier)
}
Type::Permission => {
let tmp: i32 = self.unpack_value(value)?;
let permission =
sdf::Permission::from_repr(tmp).with_context(|| format!("Unable to parse permission: {tmp}"))?;
sdf::Value::Permission(permission)
}
Type::Variability => {
let tmp: i32 = self.unpack_value(value)?;
let variability =
sdf::Variability::from_repr(tmp).with_context(|| format!("Unable to parse variability: {tmp}"))?;
sdf::Value::Variability(variability)
}
Type::LayerOffsetVector => {
ensure!(!value.is_inlined());
ensure!(!value.is_array());
ensure!(!value.is_compressed());
self.set_position(value.payload())?;
let count = self.reader.read_count()?;
let vec = self.reader.read_vec(count)?;
sdf::Value::LayerOffsetVec(vec)
}
Type::Payload => {
ensure!(!value.is_inlined());
ensure!(!value.is_array());
ensure!(!value.is_compressed());
self.set_position(value.payload())?;
let payload = self.read_payload()?;
sdf::Value::Payload(payload)
}
Type::PayloadListOp => {
let list = self.read_list_op(value, |file: &mut Self| {
let count = file.reader.read_count()?;
let mut vec = Vec::with_capacity(count);
for _ in 0..count {
let payload = file.read_payload()?;
vec.push(payload);
}
Ok(vec)
})?;
sdf::Value::PayloadListOp(list)
}
Type::VariantSelectionMap => {
ensure!(!value.is_inlined());
ensure!(!value.is_array());
ensure!(!value.is_compressed());
self.set_position(value.payload())?;
let count = self.reader.read_count()?;
let mut map = HashMap::with_capacity(count);
for _ in 0..count {
let key = self.read_string()?;
let value = self.read_string()?;
map.insert(key, value);
}
sdf::Value::VariantSelectionMap(map)
}
Type::TimeSamples => {
ensure!(!value.is_inlined());
ensure!(!value.is_compressed());
self.set_position(value.payload())?;
self.apply_recursive_offset()?;
let times_rep = self.reader.read_pod::<ValueRep>()?;
let ty = times_rep.ty()?;
ensure!(
ty == Type::DoubleVector || (ty == Type::Double && times_rep.is_array()),
"Invalid time samples type: expected either double vector or double array"
);
let saved_position = self.reader.stream_position()?;
let times = self
.value(times_rep)?
.try_as_double_vec()
.context("Failed to read time samples")?;
self.set_position(saved_position)?;
self.apply_recursive_offset()?;
let count = self.reader.read_count()?;
ensure!(count == times.len(), "Invalid time samples count");
let value_reps = self.reader.read_vec::<ValueRep>(count)?;
debug_assert_eq!(value_reps.len(), count);
let values = value_reps
.into_iter()
.map(|value| self.value(value))
.collect::<Result<Vec<_>>>()?;
let samples = times.into_iter().zip(values).collect();
sdf::Value::TimeSamples(samples)
}
Type::Dictionary if value.is_inlined() => sdf::Value::Dictionary(HashMap::default()),
Type::Dictionary => {
ensure!(!value.is_compressed(), "Dictionary {ty} can't be compressed");
ensure!(!value.is_array(), "Dictionary {ty} can't be inlined");
self.set_position(value.payload())?;
sdf::Value::Dictionary(self.read_custom_data()?)
}
Type::ValueBlock => sdf::Value::ValueBlock,
Type::Value => sdf::Value::Value,
Type::TimeCode if value.is_array() => sdf::Value::TimeCodeVec(self.read_floats(value)?),
Type::TimeCode => sdf::Value::TimeCode(self.unpack_value::<f64>(value)?),
Type::PathExpression => {
let token = self.read_token(value)?;
sdf::Value::PathExpression(token)
}
Type::UnregisteredValue => {
let token = self.read_token(value)?;
sdf::Value::UnregisteredValue(token)
}
Type::UnregisteredValueListOp => {
ensure!(!value.is_inlined());
let list = self.read_list_op(value, |file: &mut Self| file.read_string_vec())?;
sdf::Value::UnregisteredValueListOp(list)
}
Type::Relocates => {
ensure!(!value.is_inlined());
self.set_position(value.payload())?;
let count = self.reader.read_count()?;
let mut pairs = Vec::with_capacity(count);
for _ in 0..count {
let src_idx: u32 = self.reader.read_pod()?;
let tgt_idx: u32 = self.reader.read_pod()?;
let src = self.paths[src_idx as usize].clone();
let tgt = self.paths[tgt_idx as usize].clone();
pairs.push((src, tgt));
}
sdf::Value::Relocates(pairs)
}
_ => bail!("Unsupported value type: {ty}"),
};
Ok(variant)
}
}
enum ArrayKind {
Ints,
#[allow(dead_code)]
Floats,
Other,
}
fn to_vec<T: From<i8>, const N: usize>(data: [i8; N]) -> [T; N] {
data.map(T::from)
}
fn to_mat_diag<const N: usize, const M: usize>(data: [i8; N]) -> [f64; M] {
let mut matrix = [0_f64; M];
for i in 0..N {
matrix[i * N + i] = data[i] as f64;
}
matrix
}
fn decompress_lz4(mut input: &[u8], output: &mut [u8]) -> Result<usize> {
let chunks = input.read_pod::<u8>().context("Unable to read lz4 chunk count")? as usize;
if chunks == 0 {
let size = lz4_flex::decompress_into(input, output).context("Failed to decompress data, possibly corrupt?")?;
Ok(size)
} else {
todo!("Support lz4 chunked decompression")
}
}
pub trait ReadExt {
fn read_count(&mut self) -> Result<usize>;
fn read_pod<T: Default + Pod>(&mut self) -> Result<T>;
fn read_vec<T: Default + NoUninit + AnyBitPattern>(&mut self, count: usize) -> Result<Vec<T>>;
}
impl<R: io::Read> ReadExt for R {
fn read_count(&mut self) -> Result<usize> {
let mut count = 0_u64;
self.read_exact(bytes_of_mut(&mut count))
.context("Unable to read size from IO stream")?;
Ok(count as usize)
}
fn read_pod<T: Default + Pod>(&mut self) -> Result<T> {
let mut object = T::default();
self.read_exact(bytes_of_mut(&mut object))
.with_context(|| format!("Unable to read pod: {}", type_name::<T>()))?;
Ok(object)
}
fn read_vec<T: Default + NoUninit + AnyBitPattern>(&mut self, count: usize) -> Result<Vec<T>> {
if count == 0 {
return Ok(Vec::new());
}
let mut vec = vec![T::default(); count];
self.read_exact(cast_slice_mut(&mut vec))
.context("Unable to read vec")?;
Ok(vec)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
#[test]
fn test_read_crate_struct() {
let path = "./vendor/usd-wg-assets/full_assets/ElephantWithMonochord/SoC-ElephantWithMonochord.usdc";
if fs::metadata(path).is_err() {
eprintln!("Skipping test_read_crate_struct: fixture not available at {path}");
return;
}
let mut f = fs::File::open(path).expect("Failed to read crate file");
let file = CrateFile::open(&mut f).expect("Failed to read crate file");
assert_eq!(file.sections.len(), 6);
file.sections.iter().for_each(|section| {
assert!(!section.name().is_empty());
assert_ne!(section.start, 0_u64);
assert_ne!(section.size, 0_u64);
});
assert_eq!(file.tokens.len(), 192);
assert_eq!(file.fields.len(), 158);
file.fields.iter().for_each(|field| {
let _ = field.value_rep.ty().unwrap();
let _ = file.tokens[field.token_index];
});
assert_eq!(file.fieldsets.len(), 577);
assert_eq!(file.paths.len(), 248);
assert_eq!(file.specs.len(), 248);
assert!(file.validate().is_ok());
}
}