use std::any::TypeId;
use std::convert::TryFrom;
use std::fmt;
use std::ops::RangeInclusive;
use std::path::{Path, PathBuf};
use bytemuck::{cast_slice, cast_vec};
use num_derive::FromPrimitive;
use num_traits::ToPrimitive;
#[derive(Debug)]
pub enum Error {
InvalidCast(std::io::Error),
FailedToLoadPieceData,
MissingPieceData,
PieceDataMismatch,
IO(std::io::Error),
VTKIO(Box<crate::Error>),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Error::InvalidCast(source) => write!(f, "Invalid cast error: {:?}", source),
Error::MissingPieceData => write!(f, "Missing piece data"),
Error::PieceDataMismatch => write!(f, "Piece type doesn't match data set type"),
Error::IO(source) => write!(f, "IO error: {:?}", source),
Error::VTKIO(source) => write!(f, "VTK IO error: {:?}", source),
Error::FailedToLoadPieceData => write!(f, "Failed to load piece data"),
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::InvalidCast(source) => Some(source),
Error::IO(source) => Some(source),
Error::VTKIO(source) => Some(source),
_ => None,
}
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Error {
Error::IO(e)
}
}
impl From<crate::Error> for Error {
fn from(e: crate::Error) -> Error {
Error::VTKIO(Box::new(e))
}
}
#[derive(Clone, PartialEq, Debug)]
pub struct Vtk {
pub version: Version,
pub title: String,
pub byte_order: ByteOrder,
pub data: DataSet,
pub file_path: Option<PathBuf>,
}
impl Vtk {
pub fn load_all_pieces(&mut self) -> Result<(), Error> {
let Vtk {
data, file_path, ..
} = self;
fn flatten_pieces<P, F>(pieces: &mut Vec<Piece<P>>, mut pick_data_set_pieces: F)
where
F: FnMut(DataSet) -> Option<Vec<Piece<P>>>,
{
let owned_pieces = std::mem::take(pieces);
*pieces = owned_pieces
.into_iter()
.flat_map(|piece| {
let (loaded, rest) = match piece {
Piece::Loaded(data_set) => (pick_data_set_pieces(*data_set), None),
p => (None, Some(p)),
};
loaded.into_iter().flatten().chain(rest.into_iter())
})
.collect();
}
let file_path = file_path.as_ref().map(|p| p.as_ref());
match data {
DataSet::ImageData { pieces, meta, .. } => {
for p in pieces.iter_mut() {
p.load_piece_in_place_recursive(file_path)?;
}
flatten_pieces(pieces, |data_set| match data_set {
DataSet::ImageData { pieces, .. } => Some(pieces),
_ => None,
});
*meta = None;
}
DataSet::StructuredGrid { pieces, meta, .. } => {
for p in pieces.iter_mut() {
p.load_piece_in_place_recursive(file_path)?;
}
flatten_pieces(pieces, |data_set| match data_set {
DataSet::StructuredGrid { pieces, .. } => Some(pieces),
_ => None,
});
*meta = None;
}
DataSet::RectilinearGrid { pieces, meta, .. } => {
for p in pieces.iter_mut() {
p.load_piece_in_place_recursive(file_path)?;
}
flatten_pieces(pieces, |data_set| match data_set {
DataSet::RectilinearGrid { pieces, .. } => Some(pieces),
_ => None,
});
*meta = None;
}
DataSet::UnstructuredGrid { pieces, meta, .. } => {
for p in pieces.iter_mut() {
p.load_piece_in_place_recursive(file_path)?;
}
flatten_pieces(pieces, |data_set| match data_set {
DataSet::UnstructuredGrid { pieces, .. } => Some(pieces),
_ => None,
});
*meta = None;
}
DataSet::PolyData { pieces, meta, .. } => {
for p in pieces.iter_mut() {
p.load_piece_in_place_recursive(file_path)?;
}
flatten_pieces(pieces, |data_set| match data_set {
DataSet::PolyData { pieces, .. } => Some(pieces),
_ => None,
});
*meta = None;
}
_ => {}
}
Ok(())
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Version {
pub major: u8,
pub minor: u8,
}
impl Version {
pub fn new(pair: (u8, u8)) -> Self {
Version {
major: pair.0,
minor: pair.1,
}
}
}
impl From<(u8, u8)> for Version {
fn from(pair: (u8, u8)) -> Self {
Version::new(pair)
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}.{}", self.major, self.minor)
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum ByteOrder {
BigEndian,
LittleEndian,
}
impl ByteOrder {
pub fn new<BO: byteorder::ByteOrder + 'static>() -> ByteOrder {
if TypeId::of::<BO>() == TypeId::of::<byteorder::BigEndian>() {
ByteOrder::BigEndian
} else {
ByteOrder::LittleEndian
}
}
pub fn native() -> ByteOrder {
Self::new::<byteorder::NativeEndian>()
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum IOBuffer {
Bit(Vec<u8>),
U8(Vec<u8>),
I8(Vec<i8>),
U16(Vec<u16>),
I16(Vec<i16>),
U32(Vec<u32>),
I32(Vec<i32>),
U64(Vec<u64>),
I64(Vec<i64>),
F32(Vec<f32>),
F64(Vec<f64>),
}
impl Default for IOBuffer {
fn default() -> IOBuffer {
IOBuffer::F32(Vec::new())
}
}
impl IOBuffer {
pub fn new<T: ToPrimitive + 'static>(v: Vec<T>) -> Self {
use std::mem::transmute;
unsafe {
match TypeId::of::<T>() {
x if x == TypeId::of::<u8>() => IOBuffer::U8(transmute(v)),
x if x == TypeId::of::<i8>() => IOBuffer::I8(transmute(v)),
x if x == TypeId::of::<u16>() => IOBuffer::U16(transmute(v)),
x if x == TypeId::of::<i16>() => IOBuffer::I16(transmute(v)),
x if x == TypeId::of::<u32>() => IOBuffer::U32(transmute(v)),
x if x == TypeId::of::<i32>() => IOBuffer::I32(transmute(v)),
x if x == TypeId::of::<u64>() => IOBuffer::U64(transmute(v)),
x if x == TypeId::of::<i64>() => IOBuffer::I64(transmute(v)),
x if x == TypeId::of::<f32>() => IOBuffer::F32(transmute(v)),
x if x == TypeId::of::<f64>() => IOBuffer::F64(transmute(v)),
_ => v.into_iter().map(|x| x.to_f64().unwrap()).collect(),
}
}
}
}
impl<T: ToPrimitive + 'static> From<Vec<T>> for IOBuffer {
fn from(v: Vec<T>) -> IOBuffer {
IOBuffer::new(v)
}
}
impl<T: ToPrimitive + 'static> std::iter::FromIterator<T> for IOBuffer {
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = T>,
{
IOBuffer::new(iter.into_iter().collect::<Vec<T>>())
}
}
impl<T: 'static> Into<Option<Vec<T>>> for IOBuffer {
fn into(self) -> Option<Vec<T>> {
use std::mem::transmute;
unsafe {
Some(match self {
IOBuffer::U8(v) if TypeId::of::<T>() == TypeId::of::<u8>() => transmute(v),
IOBuffer::I8(v) if TypeId::of::<T>() == TypeId::of::<i8>() => transmute(v),
IOBuffer::U16(v) if TypeId::of::<T>() == TypeId::of::<u16>() => transmute(v),
IOBuffer::I16(v) if TypeId::of::<T>() == TypeId::of::<i16>() => transmute(v),
IOBuffer::U32(v) if TypeId::of::<T>() == TypeId::of::<u32>() => transmute(v),
IOBuffer::I32(v) if TypeId::of::<T>() == TypeId::of::<i32>() => transmute(v),
IOBuffer::U64(v) if TypeId::of::<T>() == TypeId::of::<u64>() => transmute(v),
IOBuffer::I64(v) if TypeId::of::<T>() == TypeId::of::<i64>() => transmute(v),
IOBuffer::F32(v) if TypeId::of::<T>() == TypeId::of::<f32>() => transmute(v),
IOBuffer::F64(v) if TypeId::of::<T>() == TypeId::of::<f64>() => transmute(v),
_ => return None,
})
}
}
}
#[macro_export]
macro_rules! match_buf {
($buf:expr, $v:pat => $e:expr) => {
match $buf {
IOBuffer::Bit($v) => $e,
IOBuffer::U8($v) => $e,
IOBuffer::I8($v) => $e,
IOBuffer::U16($v) => $e,
IOBuffer::I16($v) => $e,
IOBuffer::U32($v) => $e,
IOBuffer::I32($v) => $e,
IOBuffer::U64($v) => $e,
IOBuffer::I64($v) => $e,
IOBuffer::F32($v) => $e,
IOBuffer::F64($v) => $e,
}
};
}
macro_rules! impl_bytes_constructor {
($bytes:ident, $bo:ident, $read:ident, $t:ident, $variant:ident) => {{
use byteorder::ReadBytesExt;
let mut out = vec![num_traits::Zero::zero(); $bytes.len() / std::mem::size_of::<$t>()];
let mut reader = std::io::Cursor::new($bytes);
match $bo {
ByteOrder::BigEndian => reader
.$read::<byteorder::BE>(out.as_mut_slice())
.map_err(|e| Error::InvalidCast(e))?,
ByteOrder::LittleEndian => reader
.$read::<byteorder::LE>(out.as_mut_slice())
.map_err(|e| Error::InvalidCast(e))?,
}
Ok(IOBuffer::$variant(out))
}};
}
impl IOBuffer {
pub fn scalar_type(&self) -> ScalarType {
match self {
IOBuffer::Bit(_) => ScalarType::Bit,
IOBuffer::U8(_) => ScalarType::U8,
IOBuffer::I8(_) => ScalarType::I8,
IOBuffer::U16(_) => ScalarType::U16,
IOBuffer::I16(_) => ScalarType::I16,
IOBuffer::U32(_) => ScalarType::U32,
IOBuffer::I32(_) => ScalarType::I32,
IOBuffer::U64(_) => ScalarType::U64,
IOBuffer::I64(_) => ScalarType::I64,
IOBuffer::F32(_) => ScalarType::F32,
IOBuffer::F64(_) => ScalarType::F64,
}
}
pub fn scalar_size(&self) -> usize {
self.scalar_type().size()
}
pub fn len(&self) -> usize {
match_buf!(self, v => v.len())
}
pub fn num_bytes(&self) -> usize {
self.len() * self.scalar_size()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[cfg(feature = "xml")]
pub fn into_bytes_with_size(
self,
bo: ByteOrder,
compressor: crate::xml::Compressor,
compression_level: u32,
) -> Vec<u8> {
use byteorder::WriteBytesExt;
use byteorder::{BE, LE};
self.into_bytes_with_size_impl(bo, compressor, compression_level, 8, |mut out, size| {
match bo {
ByteOrder::BigEndian => out.write_u64::<BE>(size as u64).unwrap(),
ByteOrder::LittleEndian => out.write_u64::<LE>(size as u64).unwrap(),
}
})
}
#[cfg(feature = "xml")]
pub fn into_bytes_with_size32(
self,
bo: ByteOrder,
compressor: crate::xml::Compressor,
compression_level: u32,
) -> Vec<u8> {
use byteorder::WriteBytesExt;
use byteorder::{BE, LE};
self.into_bytes_with_size_impl(bo, compressor, compression_level, 4, |mut out, size| {
match bo {
ByteOrder::BigEndian => out.write_u32::<BE>(size as u32).unwrap(),
ByteOrder::LittleEndian => out.write_u32::<LE>(size as u32).unwrap(),
}
})
}
#[rustfmt::skip]
#[cfg(feature = "xml")]
fn into_bytes_with_size_impl(
self,
bo: ByteOrder,
compressor: crate::xml::Compressor,
compression_level: u32,
prefix_size: usize,
write_size: impl Fn(&mut [u8], usize),
) -> Vec<u8> {
use crate::xml::Compressor;
let mut out = vec![0u8; prefix_size];
let num_uncompressed_bytes = self.num_bytes();
out.reserve(num_uncompressed_bytes);
if compressor == Compressor::None || compression_level == 0 {
match self {
IOBuffer::Bit(mut v) | IOBuffer::U8(mut v) => {
out.append(&mut v);
write_size(out.as_mut_slice(), num_uncompressed_bytes);
return out;
}
IOBuffer::I8(v) => {
out.append(&mut cast_vec(v));
write_size(out.as_mut_slice(), num_uncompressed_bytes);
return out;
}
_ => {}
}
}
{
match compressor {
Compressor::ZLib => {
#[cfg(feature = "flate2")]
{
use flate2::{write::ZlibEncoder, Compression};
let mut e = ZlibEncoder::new(out, Compression::new(compression_level));
self.write_bytes(&mut e, bo);
let mut out = e.finish().unwrap();
let num_compressed_bytes = out.len() - prefix_size;
write_size(out.as_mut_slice(), num_compressed_bytes);
return out;
}
}
Compressor::LZMA => {
#[cfg(feature = "xz2")]
{
let mut e = xz2::write::XzEncoder::new(out, compression_level);
self.write_bytes(&mut e, bo);
let mut out = e.finish().unwrap();
let num_compressed_bytes = out.len() - prefix_size;
write_size(out.as_mut_slice(), num_compressed_bytes);
return out;
}
}
Compressor::LZ4 => {
#[cfg(feature = "lz4")]
{
self.write_bytes(&mut out, bo);
let mut out = lz4::compress(&out);
let num_compressed_bytes = out.len() - prefix_size;
write_size(out.as_mut_slice(), num_compressed_bytes);
return out;
}
}
Compressor::None => {}
}
}
self.write_bytes(&mut out, bo);
write_size(out.as_mut_slice(), num_uncompressed_bytes);
out.shrink_to_fit();
out
}
#[cfg(feature = "xml")]
fn write_bytes<W: byteorder::WriteBytesExt>(self, out: &mut W, bo: ByteOrder) {
use byteorder::{BE, LE};
match self {
IOBuffer::Bit(v) => v.into_iter().for_each(|x| out.write_u8(x).unwrap()),
IOBuffer::U8(v) => v.into_iter().for_each(|x| out.write_u8(x).unwrap()),
IOBuffer::I8(v) => v.into_iter().for_each(|x| out.write_i8(x).unwrap()),
IOBuffer::U16(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_u16::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_u16::<LE>(x).unwrap())
}
},
IOBuffer::I16(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_i16::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_i16::<LE>(x).unwrap())
}
},
IOBuffer::U32(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_u32::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_u32::<LE>(x).unwrap())
}
},
IOBuffer::I32(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_i32::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_i32::<LE>(x).unwrap())
}
},
IOBuffer::U64(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_u64::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_u64::<LE>(x).unwrap())
}
},
IOBuffer::I64(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_i64::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_i64::<LE>(x).unwrap())
}
},
IOBuffer::F32(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_f32::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_f32::<LE>(x).unwrap())
}
},
IOBuffer::F64(v) => match bo {
ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_f64::<BE>(x).unwrap()),
ByteOrder::LittleEndian => {
v.into_iter().for_each(|x| out.write_f64::<LE>(x).unwrap())
}
},
}
}
pub fn from_bytes(bytes: &[u8], scalar_type: ScalarType, bo: ByteOrder) -> Result<Self, Error> {
match scalar_type {
ScalarType::Bit => Ok(IOBuffer::u8_from_bytes(bytes)),
ScalarType::I8 => Ok(IOBuffer::i8_from_bytes(bytes)),
ScalarType::U8 => Ok(IOBuffer::u8_from_bytes(bytes)),
ScalarType::I16 => IOBuffer::i16_from_bytes(bytes, bo),
ScalarType::U16 => IOBuffer::u16_from_bytes(bytes, bo),
ScalarType::I32 => IOBuffer::i32_from_bytes(bytes, bo),
ScalarType::U32 => IOBuffer::u32_from_bytes(bytes, bo),
ScalarType::I64 => IOBuffer::i64_from_bytes(bytes, bo),
ScalarType::U64 => IOBuffer::u64_from_bytes(bytes, bo),
ScalarType::F32 => IOBuffer::f32_from_bytes(bytes, bo),
ScalarType::F64 => IOBuffer::f64_from_bytes(bytes, bo),
}
}
pub fn from_byte_vec(
bytes: Vec<u8>,
scalar_type: ScalarType,
bo: ByteOrder,
) -> Result<Self, Error> {
match scalar_type {
ScalarType::Bit => Ok(IOBuffer::u8_from_byte_vec(bytes)),
ScalarType::I8 => Ok(IOBuffer::i8_from_byte_vec(bytes)),
ScalarType::U8 => Ok(IOBuffer::u8_from_byte_vec(bytes)),
ScalarType::I16 => IOBuffer::i16_from_byte_vec(bytes, bo),
ScalarType::U16 => IOBuffer::u16_from_byte_vec(bytes, bo),
ScalarType::I32 => IOBuffer::i32_from_byte_vec(bytes, bo),
ScalarType::U32 => IOBuffer::u32_from_byte_vec(bytes, bo),
ScalarType::I64 => IOBuffer::i64_from_byte_vec(bytes, bo),
ScalarType::U64 => IOBuffer::u64_from_byte_vec(bytes, bo),
ScalarType::F32 => IOBuffer::f32_from_byte_vec(bytes, bo),
ScalarType::F64 => IOBuffer::f64_from_byte_vec(bytes, bo),
}
}
pub fn u8_from_bytes(bytes: &[u8]) -> Self {
IOBuffer::U8(bytes.to_vec())
}
pub fn i8_from_bytes(bytes: &[u8]) -> Self {
IOBuffer::I8(cast_slice(bytes).to_vec())
}
pub fn u16_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_u16_into, u16, U16)
}
pub fn i16_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_i16_into, i16, I16)
}
pub fn u32_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_u32_into, u32, U32)
}
pub fn i32_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_i32_into, i32, I32)
}
pub fn u64_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_u64_into, u64, U64)
}
pub fn i64_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_i64_into, i64, I64)
}
pub fn f32_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_f32_into, f32, F32)
}
pub fn f64_from_bytes(bytes: &[u8], bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_f64_into, f64, F64)
}
pub fn u8_from_byte_vec(bytes: Vec<u8>) -> Self {
IOBuffer::U8(bytes)
}
pub fn i8_from_byte_vec(bytes: Vec<u8>) -> Self {
IOBuffer::I8(cast_vec(bytes))
}
pub fn u16_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_u16_into, u16, U16)
}
pub fn i16_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_i16_into, i16, I16)
}
pub fn u32_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_u32_into, u32, U32)
}
pub fn i32_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_i32_into, i32, I32)
}
pub fn u64_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_u64_into, u64, U64)
}
pub fn i64_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_i64_into, i64, I64)
}
pub fn f32_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_f32_into, f32, F32)
}
pub fn f64_from_byte_vec(bytes: Vec<u8>, bo: ByteOrder) -> Result<Self, Error> {
impl_bytes_constructor!(bytes, bo, read_f64_into, f64, F64)
}
pub fn iter<T: Scalar>(&self) -> Option<std::slice::Iter<T>> {
T::io_buf_vec_ref(self).map(|v| v.iter())
}
pub fn into_vec<T: Scalar>(self) -> Option<Vec<T>> {
T::io_buf_into_vec(self)
}
pub fn cast_into<T: Scalar>(self) -> Option<Vec<T>> {
use IOBuffer::*;
match self {
Bit(_) => None,
U8(v) => v.into_iter().map(|x| T::from_u8(x)).collect(),
I8(v) => v.into_iter().map(|x| T::from_i8(x)).collect(),
U16(v) => v.into_iter().map(|x| T::from_u16(x)).collect(),
I16(v) => v.into_iter().map(|x| T::from_i16(x)).collect(),
U32(v) => v.into_iter().map(|x| T::from_u32(x)).collect(),
I32(v) => v.into_iter().map(|x| T::from_i32(x)).collect(),
U64(v) => v.into_iter().map(|x| T::from_u64(x)).collect(),
I64(v) => v.into_iter().map(|x| T::from_i64(x)).collect(),
F32(v) => v.into_iter().map(|x| T::from_f32(x)).collect(),
F64(v) => v.into_iter().map(|x| T::from_f64(x)).collect(),
}
}
}
macro_rules! impl_from_bytes {
($bytes:ident, $bo:ident, $read:ident) => {{
use byteorder::ReadBytesExt;
let mut reader = std::io::Cursor::new($bytes);
Ok(match $bo {
ByteOrder::BigEndian => reader
.$read::<byteorder::BE>()
.map_err(|e| Error::InvalidCast(e))?,
ByteOrder::LittleEndian => reader
.$read::<byteorder::LE>()
.map_err(|e| Error::InvalidCast(e))?,
})
}};
}
pub trait Scalar: num_traits::FromPrimitive
where
Self: Sized,
{
fn io_buf_vec_ref(io_buf: &IOBuffer) -> Option<&Vec<Self>>;
fn io_buf_into_vec(io_buf: IOBuffer) -> Option<Vec<Self>>;
fn from_bytes(bytes: &[u8], byte_order: ByteOrder) -> Result<Self, Error>;
}
macro_rules! impl_scalar {
(@iobuf $t:ident, $v:ident) => {
fn io_buf_vec_ref(io_buf: &IOBuffer) -> Option<&Vec<Self>> {
match io_buf {
IOBuffer::$v(v) => Some(v),
_ => None,
}
}
fn io_buf_into_vec(io_buf: IOBuffer) -> Option<Vec<Self>> {
match io_buf {
IOBuffer::$v(v) => Some(v),
_ => None,
}
}
};
($t:ident, $v:ident, read_u8) => {
impl Scalar for $t {
impl_scalar! { @iobuf $t, $v }
fn from_bytes(bytes: &[u8], _: ByteOrder) -> Result<Self, Error> {
use byteorder::ReadBytesExt;
std::io::Cursor::new(bytes)
.read_u8()
.map_err(|e| Error::InvalidCast(e))
}
}
};
($t:ident, $v:ident, read_i8) => {
impl Scalar for $t {
impl_scalar! { @iobuf $t, $v }
fn from_bytes(bytes: &[u8], _: ByteOrder) -> Result<Self, Error> {
use byteorder::ReadBytesExt;
std::io::Cursor::new(bytes)
.read_i8()
.map_err(|e| Error::InvalidCast(e))
}
}
};
($t:ident, $v:ident, $read:ident) => {
impl Scalar for $t {
impl_scalar! { @iobuf $t, $v }
fn from_bytes(bytes: &[u8], byte_order: ByteOrder) -> Result<Self, Error> {
impl_from_bytes!(bytes, byte_order, $read)
}
}
};
}
impl_scalar!(u8, U8, read_u8);
impl_scalar!(i8, I8, read_i8);
impl_scalar!(u16, U16, read_u16);
impl_scalar!(i16, I16, read_i16);
impl_scalar!(u32, U32, read_u32);
impl_scalar!(i32, I32, read_i32);
impl_scalar!(u64, U64, read_u64);
impl_scalar!(i64, I64, read_i64);
impl_scalar!(f32, F32, read_f32);
impl_scalar!(f64, F64, read_f64);
impl std::fmt::Display for IOBuffer {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match_buf!(self, v => {
let mut iter = v.iter();
if let Some(next) = iter.next() {
write!(f, "{}", next)?;
for i in iter {
write!(f, " {}", i)?;
}
}
});
Ok(())
}
}
#[derive(Clone, PartialEq, Debug)]
pub struct DataArrayBase<E> {
pub name: String,
pub elem: E,
pub data: IOBuffer,
}
pub type FieldArray = DataArrayBase<u32>;
pub type DataArray = DataArrayBase<ElementType>;
impl Default for DataArray {
fn default() -> DataArray {
DataArray {
name: String::new(),
elem: ElementType::default(),
data: IOBuffer::default(),
}
}
}
impl Default for FieldArray {
fn default() -> FieldArray {
FieldArray {
name: String::new(),
elem: 1,
data: IOBuffer::default(),
}
}
}
impl From<IOBuffer> for DataArray {
fn from(buf: IOBuffer) -> DataArray {
DataArray {
name: String::new(),
elem: ElementType::Generic(1),
data: buf,
}
}
}
impl<E> DataArrayBase<E> {
pub fn scalar_type(&self) -> ScalarType {
self.data.scalar_type()
}
pub fn num_elem(&self) -> usize {
self.data.len()
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
pub fn with_vec<T: ToPrimitive + 'static>(self, data: Vec<T>) -> Self {
self.with_buf(IOBuffer::new(data))
}
pub fn with_buf(mut self, data: IOBuffer) -> Self {
self.data = data;
self
}
pub fn with_data(self, new_data: impl Into<IOBuffer>) -> Self {
self.with_buf(new_data.into())
}
}
impl DataArray {
pub fn scalars_with_lookup(
name: impl Into<String>,
num_comp: u32,
lookup_table: impl Into<String>,
) -> Self {
DataArray {
name: name.into(),
elem: ElementType::Scalars {
num_comp,
lookup_table: Some(lookup_table.into()),
},
..Default::default()
}
}
pub fn scalars(name: impl Into<String>, num_comp: u32) -> Self {
DataArray {
name: name.into(),
elem: ElementType::Scalars {
num_comp,
lookup_table: None,
},
..Default::default()
}
}
pub fn color_scalars(name: impl Into<String>, num_comp: u32) -> Self {
DataArray {
name: name.into(),
elem: ElementType::ColorScalars(num_comp),
..Default::default()
}
}
pub fn lookup_table(name: impl Into<String>) -> Self {
DataArray {
name: name.into(),
elem: ElementType::LookupTable,
..Default::default()
}
}
pub fn vectors(name: impl Into<String>) -> Self {
DataArray {
name: name.into(),
elem: ElementType::Vectors,
..Default::default()
}
}
pub fn normals(name: impl Into<String>) -> Self {
DataArray {
name: name.into(),
elem: ElementType::Normals,
..Default::default()
}
}
pub fn tensors(name: impl Into<String>) -> Self {
DataArray {
name: name.into(),
elem: ElementType::Tensors,
..Default::default()
}
}
pub fn tcoords(name: impl Into<String>, num_comp: u32) -> Self {
DataArray {
name: name.into(),
elem: ElementType::TCoords(num_comp),
..Default::default()
}
}
pub fn new(name: impl Into<String>, num_comp: u32) -> Self {
DataArray {
name: name.into(),
elem: ElementType::Generic(num_comp),
..Default::default()
}
}
pub fn num_comp(&self) -> usize {
self.elem.num_comp() as usize
}
}
impl FieldArray {
pub fn new(name: impl Into<String>, num_comp: u32) -> FieldArray {
FieldArray {
name: name.into(),
elem: num_comp,
data: IOBuffer::default(),
}
}
pub fn num_comp(&self) -> usize {
self.elem as usize
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum ElementType {
ColorScalars(u32),
LookupTable,
Scalars {
num_comp: u32,
lookup_table: Option<String>,
},
Vectors,
Normals,
TCoords(u32),
Tensors,
Generic(u32),
}
impl Default for ElementType {
fn default() -> ElementType {
ElementType::Generic(1)
}
}
impl ElementType {
pub fn num_comp(&self) -> u32 {
match self {
ElementType::ColorScalars(n) => *n,
ElementType::LookupTable => 4,
ElementType::Scalars { num_comp, .. } => *num_comp as u32,
ElementType::Vectors | ElementType::Normals => 3,
ElementType::TCoords(n) => *n as u32,
ElementType::Tensors => 9,
ElementType::Generic(n) => *n,
}
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum Attribute {
DataArray(DataArray),
Field {
name: String,
data_array: Vec<FieldArray>,
},
}
impl Attribute {
pub fn name(&self) -> &str {
match self {
Attribute::Field { name, .. } => name.as_str(),
Attribute::DataArray(data_array) => data_array.name.as_str(),
}
}
pub fn scalars_with_lookup(
name: impl Into<String>,
num_comp: u32,
lookup_table: impl Into<String>,
) -> Attribute {
Attribute::DataArray(DataArray::scalars_with_lookup(name, num_comp, lookup_table))
}
pub fn scalars(name: impl Into<String>, num_comp: u32) -> Attribute {
Attribute::DataArray(DataArray::scalars(name, num_comp))
}
pub fn color_scalars(name: impl Into<String>, num_comp: u32) -> Attribute {
Attribute::DataArray(DataArray::color_scalars(name, num_comp))
}
pub fn lookup_table(name: impl Into<String>) -> Attribute {
Attribute::DataArray(DataArray::lookup_table(name))
}
pub fn vectors(name: impl Into<String>) -> Attribute {
Attribute::DataArray(DataArray::vectors(name))
}
pub fn normals(name: impl Into<String>) -> Attribute {
Attribute::DataArray(DataArray::normals(name))
}
pub fn tensors(name: impl Into<String>) -> Attribute {
Attribute::DataArray(DataArray::tensors(name))
}
pub fn tcoords(name: impl Into<String>, num_comp: u32) -> Attribute {
Attribute::DataArray(DataArray::tcoords(name, num_comp))
}
pub fn generic(name: impl Into<String>, num_comp: u32) -> Attribute {
Attribute::DataArray(DataArray::new(name, num_comp))
}
pub fn field(name: impl Into<String>) -> Attribute {
Attribute::Field {
name: name.into(),
data_array: Vec::new(),
}
}
pub fn with_data(mut self, new_data: impl Into<IOBuffer>) -> Self {
if let Attribute::DataArray(DataArray { data, .. }) = &mut self {
*data = new_data.into();
}
self
}
pub fn with_field_data(mut self, arrays: impl IntoIterator<Item = FieldArray>) -> Self {
if let Attribute::Field { data_array, .. } = &mut self {
data_array.extend(arrays.into_iter());
}
self
}
pub fn add_field_data(mut self, data: impl Into<FieldArray>) -> Self {
if let Attribute::Field { data_array, .. } = &mut self {
data_array.push(data.into());
}
self
}
}
#[derive(Clone, PartialEq, Debug, Default)]
pub struct Attributes {
pub point: Vec<Attribute>,
pub cell: Vec<Attribute>,
}
impl Attributes {
pub fn new() -> Self {
Default::default()
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum VertexNumbers {
Legacy {
num_cells: u32,
vertices: Vec<u32>,
},
XML {
connectivity: Vec<u64>,
offsets: Vec<u64>,
},
}
impl Default for VertexNumbers {
fn default() -> VertexNumbers {
VertexNumbers::XML {
connectivity: Vec::new(),
offsets: Vec::new(),
}
}
}
impl VertexNumbers {
#[inline]
pub fn num_verts(&self) -> usize {
match self {
VertexNumbers::Legacy {
vertices,
num_cells,
} => vertices.len() - *num_cells as usize,
VertexNumbers::XML { connectivity, .. } => connectivity.len(),
}
}
#[inline]
pub fn num_cells(&self) -> usize {
match self {
VertexNumbers::Legacy { num_cells, .. } => *num_cells as usize,
VertexNumbers::XML { offsets, .. } => offsets.len(),
}
}
pub fn into_legacy(self) -> (u32, Vec<u32>) {
match self {
VertexNumbers::Legacy {
num_cells,
vertices,
} => (num_cells, vertices),
VertexNumbers::XML {
connectivity,
offsets,
} => {
let num_cells = offsets.len();
let num_verts = connectivity.len();
let mut vertices = Vec::with_capacity(num_verts + num_cells);
let mut i = 0u32;
for off in offsets.into_iter() {
let off = u32::try_from(off).unwrap();
vertices.push(off - i);
while i < off {
vertices.push(u32::try_from(connectivity[i as usize]).unwrap());
i += 1;
}
}
(u32::try_from(num_cells).unwrap(), vertices)
}
}
}
pub fn into_xml(self) -> (Vec<u64>, Vec<u64>) {
match self {
VertexNumbers::Legacy {
num_cells,
vertices,
} => {
let num_cells = usize::try_from(num_cells).unwrap();
let num_verts = vertices.len();
let mut connectivity = Vec::with_capacity(vertices.len() - num_cells);
let mut offsets = Vec::with_capacity(num_cells);
let mut n = -1i64;
let mut prev_off = 0;
for v in vertices {
let v = u64::from(v);
if n > 0 {
connectivity.push(v);
n -= 1;
} else {
offsets.push(v + prev_off);
prev_off += v;
n = v as i64;
}
}
assert_eq!(connectivity.len(), num_verts - num_cells);
assert_eq!(offsets.len(), num_cells);
(connectivity, offsets)
}
VertexNumbers::XML {
connectivity,
offsets,
} => (connectivity, offsets),
}
}
}
#[derive(Clone, PartialEq, Debug, Default)]
pub struct Cells {
pub cell_verts: VertexNumbers,
pub types: Vec<CellType>,
}
impl Cells {
#[inline]
pub fn num_verts(&self) -> usize {
self.cell_verts.num_verts()
}
#[inline]
pub fn num_cells(&self) -> usize {
self.types.len()
}
}
#[derive(Copy, Clone, PartialEq, Debug, FromPrimitive)]
pub enum CellType {
Vertex = 1,
PolyVertex = 2,
Line = 3,
PolyLine = 4,
Triangle = 5,
TriangleStrip = 6,
Polygon = 7,
Pixel = 8,
Quad = 9,
Tetra = 10,
Voxel = 11,
Hexahedron = 12,
Wedge = 13,
Pyramid = 14,
QuadraticEdge = 21,
QuadraticTriangle = 22,
QuadraticQuad = 23,
QuadraticTetra = 24,
QuadraticHexahedron = 25,
QuadraticWedge = 26,
QuadraticPyramid = 27,
BiquadraticQuad = 28,
TriquadraticHexahedron = 29,
QuadraticLinearQuad = 30,
QuadraticLinearWedge = 31,
BiquadraticQuadraticWedge = 32,
BiquadraticQuadraticHexahedron = 33,
BiquadraticTriangle = 34,
CubicLine = 35,
ConvexPointSet = 41,
Polyhedron = 42,
ParametricCurve = 51,
ParametricSurface = 52,
ParametricTriSurface = 53,
ParametricQuadSurface = 54,
ParametricTetraRegion = 55,
ParametricHexRegion = 56,
HigherOrderEdge = 60,
HigherOrderTriangle = 61,
HigherOrderQuad = 62,
HigherOrderPolygon = 63,
HigherOrderTetrahedron = 64,
HigherOrderWedge = 65,
HigherOrderPyramid = 66,
HigherOrderHexahedron = 67,
LagrangeCurve = 68,
LagrangeTriangle = 69,
LagrangeQuadrilateral = 70,
LagrangeTetrahedron = 71,
LagrangeHexahedron = 72,
LagrangeWedge = 73,
LagrangePyramid = 74,
BezierCurve = 75,
BezierTriangle = 76,
BezierQuadrilateral = 77,
BezierTetrahedron = 78,
BezierHexahedron = 79,
BezierWedge = 80,
BezierPyramid = 81,
}
#[derive(Clone, Debug, PartialEq, Default)]
pub struct Coordinates {
pub x: IOBuffer,
pub y: IOBuffer,
pub z: IOBuffer,
}
#[derive(Clone, PartialEq, Debug)]
pub enum Extent {
Dims([u32; 3]),
Ranges(RangeExtent),
}
pub type RangeExtent = [RangeInclusive<i32>; 3];
impl Extent {
pub fn into_dims(self) -> [u32; 3] {
match self {
Extent::Dims(dims) => dims,
Extent::Ranges([x, y, z]) => {
let dist = |x: RangeInclusive<i32>| (x.end() - x.start() + 1).max(0) as u32;
[dist(x), dist(y), dist(z)]
}
}
}
pub fn into_ranges(self) -> [RangeInclusive<i32>; 3] {
match self {
Extent::Dims([nx, ny, nz]) => [0..=nx as i32, 0..=ny as i32, 0..=nz as i32],
Extent::Ranges(rng) => rng,
}
}
pub fn num_points(&self) -> u64 {
let [nx, ny, nz] = self.clone().into_dims();
nx as u64 * ny as u64 * nz as u64
}
pub fn num_cells(&self) -> u64 {
let [nx, ny, nz] = self.clone().into_dims();
(nx as u64 - 1) * (ny as u64 - 1) * (nz as u64 - 1)
}
}
impl Default for Extent {
fn default() -> Extent {
Extent::Ranges([0..=0, 0..=0, 0..=0])
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum Piece<P> {
Source(String, Option<Extent>),
Loaded(Box<DataSet>),
Inline(Box<P>),
}
pub trait PieceData: Sized {
fn from_data_set(data_set: DataSet, source_path: Option<&Path>) -> Result<Self, Error>;
}
fn build_piece_path(path: impl AsRef<Path>, source_path: Option<&Path>) -> PathBuf {
let path = path.as_ref();
if !path.has_root() {
if let Some(root) = source_path.and_then(|p| p.parent()) {
root.join(path)
} else {
PathBuf::from(path)
}
} else {
PathBuf::from(path)
}
}
impl<P: PieceData> Piece<P> {
pub fn load_piece_in_place_recursive(
&mut self,
source_path: Option<&Path>,
) -> Result<(), Error> {
match self {
Piece::Source(path, _) => {
let piece_path = build_piece_path(path, source_path);
let mut piece_vtk = Vtk::import(&piece_path)?;
piece_vtk.load_all_pieces()?;
let piece = Box::new(piece_vtk.data);
*self = Piece::Loaded(piece);
}
_ => {}
}
Ok(())
}
pub fn load_piece_data(&self, source_path: Option<&Path>) -> Result<P, Error>
where
P: Clone,
{
match self {
Piece::Source(path, _) => {
let piece_path = build_piece_path(path, source_path);
let piece_vtk = Vtk::import(&piece_path)?;
P::from_data_set(piece_vtk.data, Some(piece_path.as_ref()))
}
Piece::Loaded(data_set) => P::from_data_set(*data_set.clone(), source_path),
Piece::Inline(piece_data) => Ok(*piece_data.clone()),
}
}
pub fn into_loaded_piece_data(self, source_path: Option<&Path>) -> Result<P, Error> {
match self {
Piece::Source(path, _) => {
let piece_path = build_piece_path(path, source_path);
let piece_vtk = Vtk::import(&piece_path)?;
P::from_data_set(piece_vtk.data, Some(piece_path.as_ref()))
}
Piece::Loaded(data_set) => P::from_data_set(*data_set, source_path),
Piece::Inline(piece_data) => Ok(*piece_data),
}
}
#[cfg(feature = "async_blocked")]
pub async fn into_loaded_piece_data_async(
mut self,
source_path: Option<&Path>,
) -> Result<P, Error> {
match self {
Piece::Source(path, _) => {
let piece_path = build_piece_path(path, source_path);
let piece_vtk = crate::import_async(&piece_path).await?;
P::from_data_set(piece_vtk.data, Some(piece_path.as_ref()))
}
Piece::Loaded(data_set) => P::from_data_set(*data_set, source_path),
Piece::Inline(piece_data) => Ok(*piece_data),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct ImageDataPiece {
pub extent: Extent,
pub data: Attributes,
}
#[derive(Clone, Debug, PartialEq)]
pub struct RectilinearGridPiece {
pub extent: Extent,
pub coords: Coordinates,
pub data: Attributes,
}
#[derive(Clone, Debug, PartialEq)]
pub struct StructuredGridPiece {
pub extent: Extent,
pub points: IOBuffer,
pub data: Attributes,
}
impl StructuredGridPiece {
pub fn num_points(&self) -> usize {
self.points.len() / 3
}
}
#[derive(Clone, Debug, PartialEq, Default)]
pub struct PolyDataPiece {
pub points: IOBuffer,
pub verts: Option<VertexNumbers>,
pub lines: Option<VertexNumbers>,
pub polys: Option<VertexNumbers>,
pub strips: Option<VertexNumbers>,
pub data: Attributes,
}
impl PolyDataPiece {
pub fn num_points(&self) -> usize {
self.points.len() / 3
}
pub fn num_verts(&self) -> usize {
self.verts
.as_ref()
.map(|verts| verts.num_cells())
.unwrap_or(0)
}
pub fn num_lines(&self) -> usize {
self.lines
.as_ref()
.map(|lines| lines.num_cells())
.unwrap_or(0)
}
pub fn num_polys(&self) -> usize {
self.polys
.as_ref()
.map(|polys| polys.num_cells())
.unwrap_or(0)
}
pub fn num_strips(&self) -> usize {
self.strips
.as_ref()
.map(|strips| strips.num_cells())
.unwrap_or(0)
}
pub fn num_cells(&self) -> usize {
self.num_verts() + self.num_lines() + self.num_polys() + self.num_strips()
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct UnstructuredGridPiece {
pub points: IOBuffer,
pub cells: Cells,
pub data: Attributes,
}
impl UnstructuredGridPiece {
pub fn num_points(&self) -> usize {
self.points.len() / 3
}
}
macro_rules! impl_piece_data {
($data_set:ident, $piece:ident) => {
impl TryFrom<DataSet> for $piece {
type Error = Error;
fn try_from(data_set: DataSet) -> Result<Self, Error> {
Self::from_data_set(data_set, None)
}
}
impl PieceData for $piece {
fn from_data_set(data_set: DataSet, source_path: Option<&Path>) -> Result<Self, Error> {
match data_set {
DataSet::$data_set { pieces, .. } => pieces
.into_iter()
.next()
.ok_or(Error::MissingPieceData)?
.into_loaded_piece_data(source_path),
_ => Err(Error::PieceDataMismatch),
}
}
}
};
}
impl_piece_data!(ImageData, ImageDataPiece);
impl_piece_data!(RectilinearGrid, RectilinearGridPiece);
impl_piece_data!(StructuredGrid, StructuredGridPiece);
impl_piece_data!(PolyData, PolyDataPiece);
impl_piece_data!(UnstructuredGrid, UnstructuredGridPiece);
#[derive(Clone, PartialEq, Debug)]
pub enum DataSet {
ImageData {
extent: Extent,
origin: [f32; 3],
spacing: [f32; 3],
meta: Option<Box<MetaData>>,
pieces: Vec<Piece<ImageDataPiece>>,
},
StructuredGrid {
extent: Extent,
meta: Option<Box<MetaData>>,
pieces: Vec<Piece<StructuredGridPiece>>,
},
RectilinearGrid {
extent: Extent,
meta: Option<Box<MetaData>>,
pieces: Vec<Piece<RectilinearGridPiece>>,
},
UnstructuredGrid {
meta: Option<Box<MetaData>>,
pieces: Vec<Piece<UnstructuredGridPiece>>,
},
PolyData {
meta: Option<Box<MetaData>>,
pieces: Vec<Piece<PolyDataPiece>>,
},
Field {
name: String,
data_array: Vec<FieldArray>,
},
}
impl DataSet {
pub fn inline(p: impl Into<DataSet>) -> DataSet {
p.into()
}
}
impl From<ImageDataPiece> for DataSet {
fn from(p: ImageDataPiece) -> DataSet {
DataSet::ImageData {
extent: p.extent.clone(),
origin: [0.0; 3],
spacing: [1.0; 3],
meta: None,
pieces: vec![Piece::Inline(Box::new(p))],
}
}
}
impl From<StructuredGridPiece> for DataSet {
fn from(p: StructuredGridPiece) -> DataSet {
DataSet::StructuredGrid {
extent: p.extent.clone(),
meta: None,
pieces: vec![Piece::Inline(Box::new(p))],
}
}
}
impl From<RectilinearGridPiece> for DataSet {
fn from(p: RectilinearGridPiece) -> DataSet {
DataSet::RectilinearGrid {
extent: p.extent.clone(),
meta: None,
pieces: vec![Piece::Inline(Box::new(p))],
}
}
}
impl From<UnstructuredGridPiece> for DataSet {
fn from(p: UnstructuredGridPiece) -> DataSet {
DataSet::UnstructuredGrid {
meta: None,
pieces: vec![Piece::Inline(Box::new(p))],
}
}
}
impl From<PolyDataPiece> for DataSet {
fn from(p: PolyDataPiece) -> DataSet {
DataSet::PolyData {
meta: None,
pieces: vec![Piece::Inline(Box::new(p))],
}
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum MetaData {
ImageData {
ghost_level: u32,
attributes: AttributesMetaData,
},
RectilinearGrid {
ghost_level: u32,
coords: [ScalarType; 3],
attributes: AttributesMetaData,
},
StructuredGrid {
ghost_level: u32,
points_type: ScalarType,
attributes: AttributesMetaData,
},
UnstructuredGrid {
ghost_level: u32,
points_type: ScalarType,
attributes: AttributesMetaData,
},
PolyData {
ghost_level: u32,
points_type: ScalarType,
attributes: AttributesMetaData,
},
}
#[derive(Clone, PartialEq, Debug)]
pub struct AttributesMetaData {
pub point_data: Vec<ArrayMetaData>,
pub cell_data: Vec<ArrayMetaData>,
}
#[derive(Clone, PartialEq, Debug)]
pub struct ArrayMetaData {
pub name: String,
pub elem: ElementType,
pub scalar_type: ScalarType,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum ScalarType {
Bit,
U8,
I8,
U16,
I16,
U32,
I32,
U64,
I64,
F32,
F64,
}
impl ScalarType {
pub fn size(self) -> usize {
use std::mem::size_of;
match self {
ScalarType::Bit => size_of::<u8>(),
ScalarType::I8 => size_of::<i8>(),
ScalarType::U8 => size_of::<u8>(),
ScalarType::I16 => size_of::<i16>(),
ScalarType::U16 => size_of::<u16>(),
ScalarType::I32 => size_of::<i32>(),
ScalarType::U32 => size_of::<u32>(),
ScalarType::I64 => size_of::<i64>(),
ScalarType::U64 => size_of::<u64>(),
ScalarType::F32 => size_of::<f32>(),
ScalarType::F64 => size_of::<f64>(),
}
}
}
impl fmt::Display for ScalarType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ScalarType::Bit => write!(f, "bit"),
ScalarType::U8 => write!(f, "unsigned_char"),
ScalarType::I8 => write!(f, "char"),
ScalarType::U16 => write!(f, "unsigned_short"),
ScalarType::I16 => write!(f, "short"),
ScalarType::U32 => write!(f, "unsigned_int"),
ScalarType::I32 => write!(f, "int"),
ScalarType::U64 => write!(f, "unsigned_long"),
ScalarType::I64 => write!(f, "long"),
ScalarType::F32 => write!(f, "float"),
ScalarType::F64 => write!(f, "double"),
}
}
}
impl From<TypeId> for ScalarType {
fn from(dt: TypeId) -> Self {
match dt {
x if x == TypeId::of::<u8>() => ScalarType::U8,
x if x == TypeId::of::<i8>() => ScalarType::I8,
x if x == TypeId::of::<u16>() => ScalarType::U16,
x if x == TypeId::of::<i16>() => ScalarType::I16,
x if x == TypeId::of::<u32>() => ScalarType::U32,
x if x == TypeId::of::<i32>() => ScalarType::I32,
x if x == TypeId::of::<u64>() => ScalarType::U64,
x if x == TypeId::of::<i64>() => ScalarType::I64,
x if x == TypeId::of::<f32>() => ScalarType::F32,
x if x == TypeId::of::<f64>() => ScalarType::F64,
_ => panic!("Specified type is unsupported by VTK."),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn io_buffer_iter() {
let v = vec![1, 2, 3, 4];
let buf = IOBuffer::U32(v);
assert!(buf.iter::<u32>().is_some());
assert!(buf.iter::<f32>().is_none());
}
#[test]
fn io_buffer_from_into_vec() {
let v = vec![1_u32, 2, 3, 4];
let buf = IOBuffer::from(v.clone());
assert!(buf.clone().into_vec::<f32>().is_none());
assert_eq!(buf.into_vec::<u32>(), Some(v));
}
}