mod array_builder;
mod array_bytes;
mod array_bytes_fixed_disjoint_view;
mod array_errors;
mod array_metadata_options;
mod array_representation;
mod bytes_representation;
mod chunk_cache;
mod element;
pub mod chunk_grid;
pub mod chunk_key_encoding;
pub mod codec;
pub mod concurrency;
pub mod data_type;
pub mod storage_transformer;
#[cfg(feature = "dlpack")]
mod array_dlpack_ext;
#[cfg(feature = "sharding")]
mod array_sharded_ext;
#[cfg(feature = "sharding")]
mod array_sync_sharded_readable_ext;
use std::sync::Arc;
pub use self::{
array_builder::{
ArrayBuilder, ArrayBuilderChunkGrid, ArrayBuilderChunkGridMetadata, ArrayBuilderDataType,
ArrayBuilderFillValue,
},
array_bytes::{
copy_fill_value_into, update_array_bytes, ArrayBytes, ArrayBytesError, RawBytes,
RawBytesOffsets, RawBytesOffsetsCreateError, RawBytesOffsetsOutOfBoundsError,
},
array_bytes_fixed_disjoint_view::{
ArrayBytesFixedDisjointView, ArrayBytesFixedDisjointViewCreateError,
},
array_errors::{AdditionalFieldUnsupportedError, ArrayCreateError, ArrayError},
array_metadata_options::ArrayMetadataOptions,
array_representation::{
ArrayRepresentation, ArrayRepresentationBase, ArraySize, ChunkRepresentation,
},
bytes_representation::BytesRepresentation,
chunk_grid::ChunkGrid,
chunk_key_encoding::{ChunkKeyEncoding, ChunkKeySeparator},
codec::ArrayCodecTraits,
codec::CodecChain,
concurrency::RecommendedConcurrency,
element::{Element, ElementFixedLength, ElementOwned},
storage_transformer::StorageTransformerChain,
};
pub use data_type::{DataType, FillValue, NamedDataType};
use crate::config::global_config;
pub use crate::metadata::v2::{ArrayMetadataV2, FillValueMetadataV2};
pub use crate::metadata::v3::{
ArrayMetadataV3, FillValueMetadataV3, ZARR_NAN_BF16, ZARR_NAN_F16, ZARR_NAN_F32, ZARR_NAN_F64,
};
pub use crate::metadata::{
ArrayMetadata, ArrayShape, ChunkShape, DataTypeSize, DimensionName, Endianness,
};
use zarrs_metadata_ext::v2_to_v3::ArrayMetadataV2ToV3Error;
pub use chunk_cache::{
chunk_cache_lru::*, ChunkCache, ChunkCacheType, ChunkCacheTypeDecoded, ChunkCacheTypeEncoded,
ChunkCacheTypePartialDecoder,
};
#[cfg(all(feature = "sharding", feature = "async"))]
pub use array_async_sharded_readable_ext::{
AsyncArrayShardedReadableExt, AsyncArrayShardedReadableExtCache,
};
#[cfg(feature = "dlpack")]
pub use array_dlpack_ext::{
ArrayDlPackExt, ArrayDlPackExtError, AsyncArrayDlPackExt, RawBytesDlPack,
};
#[cfg(feature = "sharding")]
pub use array_sharded_ext::ArrayShardedExt;
#[cfg(feature = "sharding")]
pub use array_sync_sharded_readable_ext::{ArrayShardedReadableExt, ArrayShardedReadableExtCache};
use zarrs_metadata::v2::DataTypeMetadataV2;
use crate::{
array_subset::{ArraySubset, IncompatibleDimensionalityError},
config::MetadataConvertVersion,
node::{data_key, NodePath},
storage::StoreKey,
};
use zarrs_metadata_ext::v2_to_v3::array_metadata_v2_to_v3;
pub type ArrayIndices = Vec<u64>;
#[must_use]
pub fn chunk_shape_to_array_shape(chunk_shape: &[std::num::NonZeroU64]) -> ArrayShape {
chunk_shape.iter().map(|i| i.get()).collect()
}
#[derive(Debug)]
pub struct Array<TStorage: ?Sized> {
storage: Arc<TStorage>,
path: NodePath,
data_type: NamedDataType,
chunk_grid: ChunkGrid,
chunk_key_encoding: ChunkKeyEncoding,
fill_value: FillValue,
codecs: Arc<CodecChain>,
storage_transformers: StorageTransformerChain,
dimension_names: Option<Vec<DimensionName>>,
metadata: ArrayMetadata,
}
impl<TStorage: ?Sized> Array<TStorage> {
pub fn with_storage<TStorage2: ?Sized>(&self, storage: Arc<TStorage2>) -> Array<TStorage2> {
Array {
storage,
path: self.path.clone(),
data_type: self.data_type.clone(),
chunk_grid: self.chunk_grid.clone(),
chunk_key_encoding: self.chunk_key_encoding.clone(),
fill_value: self.fill_value.clone(),
codecs: self.codecs.clone(),
storage_transformers: self.storage_transformers.clone(),
dimension_names: self.dimension_names.clone(),
metadata: self.metadata.clone(),
}
}
pub fn new_with_metadata(
storage: Arc<TStorage>,
path: &str,
metadata: ArrayMetadata,
) -> Result<Self, ArrayCreateError> {
let path = NodePath::new(path)?;
let metadata_v3 = {
let config = global_config();
match &metadata {
ArrayMetadata::V3(v3) => Ok(v3.clone()),
ArrayMetadata::V2(v2) => array_metadata_v2_to_v3(
v2,
config.codec_aliases_v2(),
config.codec_aliases_v3(),
config.data_type_aliases_v2(),
config.data_type_aliases_v3(),
)
.map_err(|err| ArrayCreateError::UnsupportedZarrV2Array(err.to_string())),
}?
};
let data_type = DataType::from_metadata(
&metadata_v3.data_type,
global_config().data_type_aliases_v3(),
)
.map_err(ArrayCreateError::DataTypeCreateError)?;
let data_type = NamedDataType::new(metadata_v3.data_type.name().to_string(), data_type);
let chunk_grid = ChunkGrid::from_metadata(&metadata_v3.chunk_grid, &metadata_v3.shape)
.map_err(ArrayCreateError::ChunkGridCreateError)?;
if chunk_grid.dimensionality() != metadata_v3.shape.len() {
return Err(ArrayCreateError::InvalidChunkGridDimensionality(
chunk_grid.dimensionality(),
metadata_v3.shape.len(),
));
}
let fill_value = data_type
.fill_value_from_metadata(&metadata_v3.fill_value)
.map_err(ArrayCreateError::InvalidFillValueMetadata)?;
let codecs = Arc::new(
CodecChain::from_metadata(&metadata_v3.codecs)
.map_err(ArrayCreateError::CodecsCreateError)?,
);
let storage_transformers =
StorageTransformerChain::from_metadata(&metadata_v3.storage_transformers, &path)
.map_err(ArrayCreateError::StorageTransformersCreateError)?;
let chunk_key_encoding = ChunkKeyEncoding::from_metadata(&metadata_v3.chunk_key_encoding)
.map_err(ArrayCreateError::ChunkKeyEncodingCreateError)?;
if let Some(dimension_names) = &metadata_v3.dimension_names {
if dimension_names.len() != metadata_v3.shape.len() {
return Err(ArrayCreateError::InvalidDimensionNames(
dimension_names.len(),
metadata_v3.shape.len(),
));
}
}
Ok(Self {
storage,
path,
data_type,
chunk_grid,
chunk_key_encoding,
fill_value,
codecs,
storage_transformers,
dimension_names: metadata_v3.dimension_names,
metadata,
})
}
#[must_use]
pub fn storage(&self) -> Arc<TStorage> {
self.storage.clone()
}
#[must_use]
pub const fn path(&self) -> &NodePath {
&self.path
}
#[must_use]
pub const fn data_type(&self) -> &DataType {
self.data_type.data_type()
}
#[must_use]
pub const fn fill_value(&self) -> &FillValue {
&self.fill_value
}
#[must_use]
pub fn shape(&self) -> &[u64] {
self.chunk_grid().array_shape()
}
pub fn set_shape(&mut self, array_shape: ArrayShape) -> Result<&mut Self, ArrayCreateError> {
self.chunk_grid =
ChunkGrid::from_metadata(&self.chunk_grid.create_metadata(), &array_shape)
.map_err(ArrayCreateError::ChunkGridCreateError)?;
match &mut self.metadata {
ArrayMetadata::V3(metadata) => {
metadata.shape = array_shape;
}
ArrayMetadata::V2(metadata) => {
metadata.shape = array_shape;
}
}
Ok(self)
}
#[must_use]
pub fn dimensionality(&self) -> usize {
self.shape().len()
}
#[must_use]
pub fn codecs(&self) -> Arc<CodecChain> {
self.codecs.clone()
}
#[must_use]
pub const fn chunk_grid(&self) -> &ChunkGrid {
&self.chunk_grid
}
#[must_use]
pub const fn chunk_key_encoding(&self) -> &ChunkKeyEncoding {
&self.chunk_key_encoding
}
#[must_use]
pub const fn storage_transformers(&self) -> &StorageTransformerChain {
&self.storage_transformers
}
#[must_use]
pub const fn dimension_names(&self) -> &Option<Vec<DimensionName>> {
&self.dimension_names
}
pub fn set_dimension_names(
&mut self,
dimension_names: Option<Vec<DimensionName>>,
) -> &mut Self {
self.dimension_names = dimension_names;
self
}
#[must_use]
pub const fn attributes(&self) -> &serde_json::Map<String, serde_json::Value> {
match &self.metadata {
ArrayMetadata::V3(metadata) => &metadata.attributes,
ArrayMetadata::V2(metadata) => &metadata.attributes,
}
}
#[must_use]
pub fn attributes_mut(&mut self) -> &mut serde_json::Map<String, serde_json::Value> {
match &mut self.metadata {
ArrayMetadata::V3(metadata) => &mut metadata.attributes,
ArrayMetadata::V2(metadata) => &mut metadata.attributes,
}
}
#[must_use]
pub fn metadata(&self) -> &ArrayMetadata {
&self.metadata
}
#[allow(clippy::missing_panics_doc)]
#[must_use]
pub fn metadata_opt(&self, options: &ArrayMetadataOptions) -> ArrayMetadata {
use ArrayMetadata as AM;
use MetadataConvertVersion as V;
let mut metadata = self.metadata.clone();
if options.include_zarrs_metadata() {
#[derive(serde::Serialize)]
struct ZarrsMetadata {
description: String,
repository: String,
version: String,
}
let zarrs_metadata = ZarrsMetadata {
description: "This array was created with zarrs".to_string(),
repository: env!("CARGO_PKG_REPOSITORY").to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
};
let attributes = match &mut metadata {
AM::V3(metadata) => &mut metadata.attributes,
AM::V2(metadata) => &mut metadata.attributes,
};
attributes.insert("_zarrs".to_string(), unsafe {
serde_json::to_value(zarrs_metadata).unwrap_unchecked()
});
}
match &mut metadata {
ArrayMetadata::V3(metadata) => {
metadata.codecs = self.codecs().create_metadatas_opt(options.codec_options());
}
ArrayMetadata::V2(_metadata) => {
}
}
let mut metadata = match (metadata, options.metadata_convert_version()) {
(AM::V3(metadata), V::Default | V::V3) => ArrayMetadata::V3(metadata),
(AM::V2(metadata), V::Default) => ArrayMetadata::V2(metadata),
(AM::V2(metadata), V::V3) => {
let metadata = {
let config = global_config();
array_metadata_v2_to_v3(
&metadata,
config.codec_aliases_v2(),
config.codec_aliases_v3(),
config.data_type_aliases_v2(),
config.data_type_aliases_v3(),
)
.expect("conversion succeeded on array creation")
};
AM::V3(metadata)
}
};
if options.convert_aliased_extension_names() {
let config = global_config();
match &mut metadata {
AM::V3(metadata) => {
let codec_aliases = config.codec_aliases_v3();
metadata.codecs.iter_mut().for_each(|codec| {
let identifier = codec_aliases.identifier(codec.name());
codec.set_name(codec_aliases.default_name(identifier).to_string());
});
let data_type_aliases = config.data_type_aliases_v3();
{
let name = metadata.data_type.name();
let identifier = data_type_aliases.identifier(name);
metadata
.data_type
.set_name(data_type_aliases.default_name(identifier).to_string());
}
}
AM::V2(metadata) => {
let codec_aliases = config.codec_aliases_v2();
{
if let Some(filters) = &mut metadata.filters {
for filter in filters.iter_mut() {
let identifier = codec_aliases.identifier(filter.id());
filter.set_id(codec_aliases.default_name(identifier).to_string());
}
}
if let Some(compressor) = &mut metadata.compressor {
let identifier = codec_aliases.identifier(compressor.id());
compressor.set_id(codec_aliases.default_name(identifier).to_string());
}
}
let data_type_aliases = config.data_type_aliases_v2();
{
match &mut metadata.dtype {
DataTypeMetadataV2::Simple(dtype) => {
let identifier = data_type_aliases.identifier(dtype);
*dtype = data_type_aliases.default_name(identifier).to_string();
}
DataTypeMetadataV2::Structured(_) => {
}
}
}
}
}
}
metadata
}
pub(crate) fn fill_value_metadata_v3(&self) -> FillValueMetadataV3 {
self.data_type
.metadata_fill_value(&self.fill_value)
.expect("data type and fill value are compatible")
}
#[must_use]
pub fn builder(&self) -> ArrayBuilder {
ArrayBuilder::from_array(self)
}
#[must_use]
pub fn chunk_grid_shape(&self) -> &ArrayShape {
self.chunk_grid().grid_shape()
}
#[must_use]
pub fn chunk_key(&self, chunk_indices: &[u64]) -> StoreKey {
data_key(self.path(), &self.chunk_key_encoding.encode(chunk_indices))
}
pub fn chunk_origin(&self, chunk_indices: &[u64]) -> Result<ArrayIndices, ArrayError> {
self.chunk_grid()
.chunk_origin(chunk_indices)
.map_err(|_| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))?
.ok_or_else(|| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))
}
pub fn chunk_shape(&self, chunk_indices: &[u64]) -> Result<ChunkShape, ArrayError> {
self.chunk_grid()
.chunk_shape(chunk_indices)
.map_err(|_| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))?
.ok_or_else(|| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))
}
#[must_use]
pub fn subset_all(&self) -> ArraySubset {
ArraySubset::new_with_shape(self.shape().to_vec())
}
pub fn chunk_shape_usize(&self, chunk_indices: &[u64]) -> Result<Vec<usize>, ArrayError> {
Ok(self
.chunk_shape(chunk_indices)?
.iter()
.map(|d| usize::try_from(d.get()).unwrap())
.collect())
}
pub fn chunk_subset(&self, chunk_indices: &[u64]) -> Result<ArraySubset, ArrayError> {
self.chunk_grid()
.subset(chunk_indices)
.map_err(|_| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))?
.ok_or_else(|| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))
}
pub fn chunk_subset_bounded(&self, chunk_indices: &[u64]) -> Result<ArraySubset, ArrayError> {
let chunk_subset = self.chunk_subset(chunk_indices)?;
Ok(chunk_subset.bound(self.shape())?)
}
#[allow(clippy::similar_names)]
pub fn chunks_subset(&self, chunks: &ArraySubset) -> Result<ArraySubset, ArrayError> {
match chunks.end_inc() {
Some(end) => {
let chunk0 = self.chunk_subset(chunks.start())?;
let chunk1 = self.chunk_subset(&end)?;
let start = chunk0.start().to_vec();
let end = chunk1.end_exc();
ArraySubset::new_with_start_end_exc(start, end).map_err(std::convert::Into::into)
}
None => Ok(ArraySubset::new_empty(chunks.dimensionality())),
}
}
pub fn chunks_subset_bounded(&self, chunks: &ArraySubset) -> Result<ArraySubset, ArrayError> {
let chunks_subset = self.chunks_subset(chunks)?;
Ok(chunks_subset.bound(self.shape())?)
}
pub fn chunk_array_representation(
&self,
chunk_indices: &[u64],
) -> Result<ChunkRepresentation, ArrayError> {
self.chunk_grid().chunk_shape(chunk_indices)?.map_or_else(
|| {
Err(ArrayError::InvalidChunkGridIndicesError(
chunk_indices.to_vec(),
))
},
|chunk_shape| {
Ok(unsafe {
ChunkRepresentation::new_unchecked(
chunk_shape.to_vec(),
self.data_type().clone(),
self.fill_value().clone(),
)
})
},
)
}
pub fn chunks_in_array_subset(
&self,
array_subset: &ArraySubset,
) -> Result<Option<ArraySubset>, IncompatibleDimensionalityError> {
self.chunk_grid.chunks_in_array_subset(array_subset)
}
fn recommended_codec_concurrency(
&self,
chunk_representation: &ChunkRepresentation,
) -> Result<RecommendedConcurrency, ArrayError> {
Ok(self
.codecs()
.recommended_concurrency(chunk_representation)?)
}
pub fn to_v3(self) -> Result<Self, ArrayMetadataV2ToV3Error> {
match self.metadata {
ArrayMetadata::V2(metadata) => {
let metadata: ArrayMetadata = {
let config = global_config();
array_metadata_v2_to_v3(
&metadata,
config.codec_aliases_v2(),
config.codec_aliases_v3(),
config.data_type_aliases_v2(),
config.data_type_aliases_v3(),
)?
.into()
};
Ok(Self {
storage: self.storage,
path: self.path,
data_type: self.data_type,
chunk_grid: self.chunk_grid,
chunk_key_encoding: self.chunk_key_encoding,
fill_value: self.fill_value,
codecs: self.codecs,
storage_transformers: self.storage_transformers,
dimension_names: self.dimension_names,
metadata,
})
}
ArrayMetadata::V3(_) => Ok(self),
}
}
fn validate_metadata(metadata: &ArrayMetadata) -> Result<(), ArrayCreateError> {
match &metadata {
ArrayMetadata::V2(_) => {}
ArrayMetadata::V3(_metadata) => {
}
}
match metadata {
ArrayMetadata::V2(_metadata) => {}
ArrayMetadata::V3(metadata) => {
let additional_fields = &metadata.additional_fields;
for (name, field) in additional_fields {
if field.must_understand() {
return Err(ArrayCreateError::AdditionalFieldUnsupportedError(
AdditionalFieldUnsupportedError::new(
name.clone(),
field.as_value().clone(),
),
));
}
}
}
}
Ok(())
}
}
#[cfg(feature = "ndarray")]
fn ndarray_into_vec<T: Clone, D: ndarray::Dimension>(array: ndarray::Array<T, D>) -> Vec<T> {
#[allow(deprecated)]
if array.is_standard_layout() {
array
} else {
array.as_standard_layout().into_owned()
}
.into_raw_vec()
}
mod array_sync_readable;
mod array_sync_writable;
mod array_sync_readable_writable;
#[cfg(feature = "async")]
mod array_async_readable;
#[cfg(feature = "async")]
mod array_async_writable;
#[cfg(feature = "async")]
mod array_async_readable_writable;
#[cfg(feature = "async")]
mod array_async_sharded_readable_ext;
#[must_use]
pub fn convert_from_bytes_slice<T: bytemuck::Pod>(from: &[u8]) -> Vec<T> {
bytemuck::allocation::pod_collect_to_vec(from)
}
#[must_use]
pub fn transmute_from_bytes_vec<T: bytemuck::Pod>(from: Vec<u8>) -> Vec<T> {
bytemuck::allocation::try_cast_vec(from)
.unwrap_or_else(|(_err, from)| convert_from_bytes_slice(&from))
}
#[must_use]
pub fn convert_to_bytes_vec<T: bytemuck::NoUninit>(from: &[T]) -> Vec<u8> {
bytemuck::allocation::pod_collect_to_vec(from)
}
#[must_use]
pub fn transmute_to_bytes_vec<T: bytemuck::NoUninit>(from: Vec<T>) -> Vec<u8> {
bytemuck::allocation::try_cast_vec(from)
.unwrap_or_else(|(_err, from)| convert_to_bytes_vec(&from))
}
#[must_use]
pub fn transmute_to_bytes<T: bytemuck::NoUninit>(from: &[T]) -> &[u8] {
bytemuck::must_cast_slice(from)
}
#[must_use]
pub fn unravel_index(mut index: u64, shape: &[u64]) -> Option<ArrayIndices> {
let len = shape.len();
let mut indices: ArrayIndices = Vec::with_capacity(len);
for (indices_i, &dim) in std::iter::zip(
indices.spare_capacity_mut().iter_mut().rev(),
shape.iter().rev(),
) {
indices_i.write(index % dim);
index /= dim;
}
unsafe { indices.set_len(len) };
if index == 0 {
Some(indices)
} else {
None
}
}
#[must_use]
pub fn ravel_indices(indices: &[u64], shape: &[u64]) -> Option<u64> {
let mut index: u64 = 0;
let mut count = 1;
for (i, s) in std::iter::zip(indices, shape).rev() {
if i >= s {
return None;
}
index += i * count;
count *= s;
}
Some(index)
}
#[cfg(feature = "ndarray")]
fn iter_u64_to_usize<'a, I: Iterator<Item = &'a u64>>(iter: I) -> Vec<usize> {
iter.map(|v| usize::try_from(*v).unwrap())
.collect::<Vec<_>>()
}
#[cfg(feature = "ndarray")]
pub fn elements_to_ndarray<T>(
shape: &[u64],
elements: Vec<T>,
) -> Result<ndarray::ArrayD<T>, ArrayError> {
let length = elements.len();
ndarray::ArrayD::<T>::from_shape_vec(iter_u64_to_usize(shape.iter()), elements).map_err(|_| {
ArrayError::CodecError(codec::InvalidArrayShapeError::new(shape.to_vec(), length).into())
})
}
#[cfg(feature = "ndarray")]
pub fn bytes_to_ndarray<T: bytemuck::Pod>(
shape: &[u64],
bytes: Vec<u8>,
) -> Result<ndarray::ArrayD<T>, ArrayError> {
let expected_len = shape.iter().product::<u64>() * size_of::<T>() as u64;
if bytes.len() as u64 != expected_len {
return Err(ArrayError::InvalidBytesInputSize(bytes.len(), expected_len));
}
let elements = transmute_from_bytes_vec::<T>(bytes);
elements_to_ndarray(shape, elements)
}
#[cfg(test)]
mod tests {
use crate::storage::store::MemoryStore;
use zarrs_filesystem::FilesystemStore;
use zarrs_metadata::v3::{AdditionalFieldV3, AdditionalFieldsV3};
use super::*;
#[test]
fn test_array_metadata_write_read() {
let store = Arc::new(MemoryStore::new());
let array_path = "/array";
let array = ArrayBuilder::new(vec![8, 8], vec![4, 4], DataType::UInt8, 0u8)
.build(store.clone(), array_path)
.unwrap();
array.store_metadata().unwrap();
let stored_metadata = array.metadata_opt(&ArrayMetadataOptions::default());
let array_other = Array::open(store, array_path).unwrap();
assert_eq!(array_other.metadata(), &stored_metadata);
}
#[test]
fn array_set_shape_and_attributes() {
let store = MemoryStore::new();
let array_path = "/group/array";
let mut array = ArrayBuilder::new(
vec![8, 8], vec![4, 4],
DataType::Float32,
ZARR_NAN_F32,
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Arc::new(codec::GzipCodec::new(5).unwrap()),
])
.build(store.into(), array_path)
.unwrap();
array.set_shape(vec![16, 16]).unwrap();
array
.attributes_mut()
.insert("test".to_string(), "apple".into());
assert_eq!(array.shape(), &[16, 16]);
assert_eq!(
array.attributes().get_key_value("test"),
Some((
&"test".to_string(),
&serde_json::Value::String("apple".to_string())
))
);
}
#[test]
fn array_subset_round_trip() {
let store = Arc::new(MemoryStore::default());
let array_path = "/array";
let array = ArrayBuilder::new(
vec![8, 8], vec![4, 4], DataType::Float32,
1f32,
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Arc::new(codec::GzipCodec::new(5).unwrap()),
])
.build(store, array_path)
.unwrap();
array
.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[3..6, 3..6]),
&[1.0, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
)
.unwrap();
let subset_all = array.subset_all();
let data_all = array
.retrieve_array_subset_elements::<f32>(&subset_all)
.unwrap();
assert_eq!(
data_all,
vec![
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.3, 1.0, 1.0, 1.0, 1.0, 1.0, 0.4, 0.5, 0.6, 1.0, 1.0, 1.0, 1.0, 1.0, 0.7, 0.8, 0.9, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ]
);
assert!(array
.retrieve_chunk_elements_if_exists::<f32>(&[0; 2])
.unwrap()
.is_none());
#[cfg(feature = "ndarray")]
assert!(array
.retrieve_chunk_ndarray_if_exists::<f32>(&[0; 2])
.unwrap()
.is_none());
}
#[allow(dead_code)]
fn array_v2_to_v3(path_in: &str, path_out: &str) {
let store = Arc::new(FilesystemStore::new(path_in).unwrap());
let array_in = Array::open(store, "/").unwrap();
println!("{array_in:?}");
let subset_all = ArraySubset::new_with_shape(array_in.shape().to_vec());
let elements = array_in
.retrieve_array_subset_elements::<f32>(&subset_all)
.unwrap();
assert_eq!(
&elements,
&[
0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0, 75.0, 76.0, 77.0, 78.0, 79.0, 80.0, 81.0, 82.0, 83.0, 84.0, 85.0, 86.0, 87.0, 88.0, 89.0, 90.0, 91.0, 92.0, 93.0, 94.0, 95.0, 96.0, 97.0, 98.0, 99.0, ],
);
let store = Arc::new(FilesystemStore::new(path_out).unwrap());
let array_out = Array::new_with_metadata(store, "/", array_in.metadata().clone()).unwrap();
array_out
.store_array_subset_elements::<f32>(&subset_all, &elements)
.unwrap();
for version in [MetadataConvertVersion::Default, MetadataConvertVersion::V3] {
array_out
.store_metadata_opt(
&ArrayMetadataOptions::default()
.with_metadata_convert_version(version)
.with_include_zarrs_metadata(false),
)
.unwrap();
}
}
#[test]
fn array_v2_none_c() {
array_v2_to_v3(
"tests/data/v2/array_none_C.zarr",
"tests/data/v3/array_none.zarr",
)
}
#[cfg(feature = "transpose")]
#[test]
fn array_v2_none_f() {
array_v2_to_v3(
"tests/data/v2/array_none_F.zarr",
"tests/data/v3/array_none_transpose.zarr",
)
}
#[cfg(feature = "blosc")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_blosc_c() {
array_v2_to_v3(
"tests/data/v2/array_blosc_C.zarr",
"tests/data/v3/array_blosc.zarr",
)
}
#[cfg(feature = "blosc")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_blosc_f() {
array_v2_to_v3(
"tests/data/v2/array_blosc_F.zarr",
"tests/data/v3/array_blosc_transpose.zarr",
)
}
#[cfg(feature = "gzip")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_gzip_c() {
array_v2_to_v3(
"tests/data/v2/array_gzip_C.zarr",
"tests/data/v3/array_gzip.zarr",
)
}
#[cfg(feature = "bz2")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_bz2_c() {
array_v2_to_v3(
"tests/data/v2/array_bz2_C.zarr",
"tests/data/v3/array_bz2.zarr",
)
}
#[cfg(feature = "zfp")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_zfpy_c() {
array_v2_to_v3(
"tests/data/v2/array_zfpy_C.zarr",
"tests/data/v3/array_zfpy.zarr",
)
}
#[cfg(feature = "zstd")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_zstd_c() {
array_v2_to_v3(
"tests/data/v2/array_zstd_C.zarr",
"tests/data/v3/array_zstd.zarr",
)
}
#[cfg(feature = "pcodec")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_pcodec_c() {
array_v2_to_v3(
"tests/data/v2/array_pcodec_C.zarr",
"tests/data/v3/array_pcodec.zarr",
)
}
#[allow(dead_code)]
fn array_v3_numcodecs(path_in: &str) {
let store = Arc::new(FilesystemStore::new(path_in).unwrap());
let array_in = Array::open(store, "/").unwrap();
println!(
"{:?}",
array_in.metadata_opt(
&ArrayMetadataOptions::default()
.with_metadata_convert_version(MetadataConvertVersion::V3)
)
);
println!("{array_in:?}");
let subset_all = ArraySubset::new_with_shape(array_in.shape().to_vec());
let elements = array_in
.retrieve_array_subset_elements::<f32>(&subset_all)
.unwrap();
assert_eq!(
&elements,
&[
0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0, 75.0, 76.0, 77.0, 78.0, 79.0, 80.0, 81.0, 82.0, 83.0, 84.0, 85.0, 86.0, 87.0, 88.0, 89.0, 90.0, 91.0, 92.0, 93.0, 94.0, 95.0, 96.0, 97.0, 98.0, 99.0, ],
);
}
#[test]
fn array_v3_none() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_none.zarr")
}
#[cfg(feature = "blosc")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_blosc() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_blosc.zarr")
}
#[cfg(feature = "bz2")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_bz2() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_bz2.zarr")
}
#[cfg(feature = "fletcher32")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_fletcher32() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_fletcher32.zarr")
}
#[cfg(feature = "adler32")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_adler32() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_adler32.zarr")
}
#[cfg(feature = "zlib")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_zlib() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_zlib.zarr")
}
#[cfg(feature = "gzip")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_gzip() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_gzip.zarr")
}
#[cfg(feature = "pcodec")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_pcodec() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_pcodec.zarr")
}
#[cfg(feature = "zfp")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_zfpy() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_zfpy.zarr")
}
#[cfg(feature = "zstd")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_zstd() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_zstd.zarr")
}
#[test]
fn array_additional_fields() {
let store = Arc::new(MemoryStore::new());
let array_path = "/group/array";
for must_understand in [true, false] {
let additional_field = serde_json::Map::new();
let additional_field = AdditionalFieldV3::new(additional_field, must_understand);
let mut additional_fields = AdditionalFieldsV3::new();
additional_fields.insert("key".to_string(), additional_field);
let array = ArrayBuilder::new(
vec![8, 8], vec![4, 4],
DataType::Float32,
ZARR_NAN_F32,
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Arc::new(codec::GzipCodec::new(5).unwrap()),
])
.additional_fields(additional_fields)
.build(store.clone(), array_path)
.unwrap();
array.store_metadata().unwrap();
let array = Array::open(store.clone(), array_path);
if must_understand {
assert!(array.is_err());
} else {
assert!(array.is_ok());
}
}
}
}