mod array_bytes_internal;
mod array_errors;
mod array_metadata_options;
mod element;
mod from_array_bytes;
mod into_array_bytes;
mod tensor;
pub mod builder;
pub mod chunk_cache;
pub mod chunk_grid;
pub mod chunk_key_encoding;
pub mod codec;
pub mod concurrency;
pub mod data_type;
pub mod storage_transformer;
#[cfg(feature = "dlpack")]
mod array_dlpack_ext;
#[cfg(feature = "sharding")]
mod array_sharded_ext;
#[cfg(feature = "sharding")]
mod array_sync_sharded_readable_ext;
use std::borrow::Cow;
use std::num::NonZeroU64;
use std::sync::Arc;
pub use self::builder::ArrayBuilder;
use self::chunk_grid::RegularBoundedChunkGridConfiguration;
use self::chunk_key_encoding::V2ChunkKeyEncoding;
use crate::config::{MetadataConvertVersion, MetadataEraseVersion, global_config};
use crate::convert::{ArrayMetadataV2ToV3Error, array_metadata_v2_to_v3};
use crate::node::{NodePath, data_key};
pub use zarrs_chunk_grid::{
ArrayIndices, ArrayIndicesTinyVec, ArrayShape, ArraySubset, ArraySubsetError,
ArraySubsetTraits, ChunkGrid, ChunkGridTraits, ChunkGridTraitsIterators, ChunkShape,
ChunkShapeTraits, IncompatibleDimensionalityError, Indexer, IndexerError, iterators,
};
pub use zarrs_chunk_key_encoding::{ChunkKeyEncoding, ChunkKeyEncodingTraits};
pub use zarrs_codec::{
ArrayBytes, ArrayBytesDecodeIntoTarget, ArrayBytesError, ArrayBytesFixedDisjointView,
ArrayBytesFixedDisjointViewCreateError, ArrayBytesOffsets, ArrayBytesOptional, ArrayBytesRaw,
ArrayBytesRawOffsetsCreateError, ArrayBytesRawOffsetsOutOfBoundsError,
ArrayBytesVariableLength, ArrayCodecTraits, ArrayPartialDecoderTraits,
ArrayPartialEncoderTraits, ArrayToArrayCodecTraits, ArrayToBytesCodecTraits,
BytesPartialDecoderTraits, BytesPartialEncoderTraits, BytesRepresentation,
BytesToBytesCodecTraits, Codec, CodecError, CodecMetadataOptions, CodecOptions,
CodecSpecificOptions, CodecTraits, CodecTraitsV2, CodecTraitsV3, RecommendedConcurrency,
StoragePartialDecoder, copy_fill_value_into, update_array_bytes,
};
#[cfg(feature = "async")]
pub use zarrs_codec::{
AsyncArrayPartialDecoderTraits, AsyncArrayPartialEncoderTraits, AsyncBytesPartialDecoderTraits,
AsyncBytesPartialEncoderTraits,
};
pub use zarrs_data_type::{
DataType, DataTypeTraits, DataTypeTraitsV2, DataTypeTraitsV3, FillValue,
};
pub use zarrs_metadata::v2::ArrayMetadataV2;
use zarrs_metadata::v2::DataTypeMetadataV2;
use zarrs_metadata::v3::MetadataV3;
pub use zarrs_metadata::v3::{
ArrayMetadataV3, ZARR_NAN_BF16, ZARR_NAN_F16, ZARR_NAN_F32, ZARR_NAN_F64,
};
pub use zarrs_metadata::{
ArrayMetadata, ChunkKeySeparator, DataTypeSize, DimensionName, Endianness, FillValueMetadata,
};
use zarrs_plugin::{
ExtensionAliasesV2, ExtensionAliasesV3, ExtensionName, PluginCreateError, ZarrVersion,
};
use zarrs_storage::StoreKey;
pub use self::array_errors::{AdditionalFieldUnsupportedError, ArrayCreateError, ArrayError};
pub use self::array_metadata_options::ArrayMetadataOptions;
use self::chunk_grid::RegularChunkGrid;
pub use self::codec::CodecChain;
pub use self::element::{Element, ElementError, ElementOwned};
pub use self::from_array_bytes::FromArrayBytes;
pub use self::into_array_bytes::IntoArrayBytes;
pub use self::storage_transformer::{StorageTransformerChain, StorageTransformerTraits};
pub use self::tensor::{Tensor, TensorError};
#[cfg(all(feature = "sharding", feature = "async"))]
pub use array_async_sharded_readable_ext::{
AsyncArrayShardedReadableExt, AsyncArrayShardedReadableExtCache,
};
#[cfg(feature = "sharding")]
pub use array_sharded_ext::ArrayShardedExt;
#[cfg(feature = "sharding")]
pub use array_sync_sharded_readable_ext::{ArrayShardedReadableExt, ArrayShardedReadableExtCache};
#[must_use]
pub fn chunk_shape_to_array_shape(chunk_shape: &[std::num::NonZeroU64]) -> ArrayShape {
chunk_shape.iter().map(|i| i.get()).collect()
}
#[derive(Debug)]
pub struct Array<TStorage: ?Sized> {
storage: Arc<TStorage>,
path: NodePath,
data_type: DataType,
chunk_grid: ChunkGrid,
chunk_key_encoding: ChunkKeyEncoding,
fill_value: FillValue,
codecs: Arc<CodecChain>,
storage_transformers: StorageTransformerChain,
dimension_names: Option<Vec<DimensionName>>,
metadata: ArrayMetadata,
codec_options: CodecOptions,
metadata_options: ArrayMetadataOptions,
metadata_erase_version: MetadataEraseVersion,
}
impl<TStorage: ?Sized> Array<TStorage> {
pub fn with_storage<TStorage2: ?Sized>(&self, storage: Arc<TStorage2>) -> Array<TStorage2> {
Array {
storage,
path: self.path.clone(),
data_type: self.data_type.clone(),
chunk_grid: self.chunk_grid.clone(),
chunk_key_encoding: self.chunk_key_encoding.clone(),
fill_value: self.fill_value.clone(),
codecs: self.codecs.clone(),
storage_transformers: self.storage_transformers.clone(),
dimension_names: self.dimension_names.clone(),
metadata: self.metadata.clone(),
codec_options: self.codec_options,
metadata_options: self.metadata_options,
metadata_erase_version: self.metadata_erase_version,
}
}
pub fn new_with_metadata(
storage: Arc<TStorage>,
path: &str,
metadata: ArrayMetadata,
) -> Result<Self, ArrayCreateError> {
let path = NodePath::new(path)?;
match metadata {
ArrayMetadata::V3(v3) => Self::new_with_metadata_v3(storage, path, v3),
ArrayMetadata::V2(v2) => Self::new_with_metadata_v2(storage, path, v2),
}
}
fn new_with_metadata_v3(
storage: Arc<TStorage>,
path: NodePath,
v3: ArrayMetadataV3,
) -> Result<Self, ArrayCreateError> {
let data_type = DataType::from_metadata(&v3.data_type)
.map_err(ArrayCreateError::DataTypeCreateError)?;
let chunk_grid = ChunkGrid::from_metadata(&v3.chunk_grid, &v3.shape)
.map_err(ArrayCreateError::ChunkGridCreateError)?;
if chunk_grid.dimensionality() != v3.shape.len() {
return Err(ArrayCreateError::InvalidChunkGridDimensionality(
chunk_grid.dimensionality(),
v3.shape.len(),
));
}
let fill_value = data_type.fill_value_v3(&v3.fill_value).map_err(|_| {
ArrayCreateError::InvalidFillValueMetadata {
data_type_name: v3.data_type.name().to_string(),
fill_value_metadata: v3.fill_value.clone(),
}
})?;
let codecs = Arc::new(
CodecChain::from_metadata(&v3.codecs).map_err(ArrayCreateError::CodecsCreateError)?,
);
let storage_transformers =
StorageTransformerChain::from_metadata(&v3.storage_transformers, &path)
.map_err(ArrayCreateError::StorageTransformersCreateError)?;
let chunk_key_encoding = ChunkKeyEncoding::from_metadata(&v3.chunk_key_encoding)
.map_err(ArrayCreateError::ChunkKeyEncodingCreateError)?;
if let Some(dimension_names) = &v3.dimension_names
&& dimension_names.len() != v3.shape.len()
{
return Err(ArrayCreateError::InvalidDimensionNames(
dimension_names.len(),
v3.shape.len(),
));
}
let (codec_options, metadata_options, metadata_erase_version) = {
let config = global_config();
(
config.codec_options(),
config.array_metadata_options(),
config.metadata_erase_version(),
)
};
Ok(Self {
storage,
path,
data_type,
chunk_grid,
chunk_key_encoding,
fill_value,
codecs,
storage_transformers,
dimension_names: v3.dimension_names.clone(),
metadata: ArrayMetadata::V3(v3),
codec_options,
metadata_options,
metadata_erase_version,
})
}
fn new_with_metadata_v2(
storage: Arc<TStorage>,
path: NodePath,
v2: ArrayMetadataV2,
) -> Result<Self, ArrayCreateError> {
use zarrs_metadata::v2::data_type_metadata_v2_to_endianness;
let data_type =
DataType::from_metadata(&v2.dtype).map_err(ArrayCreateError::DataTypeCreateError)?;
let chunk_grid = ChunkGrid::new(
RegularChunkGrid::new(v2.shape.clone(), v2.chunks.clone()).map_err(|err| {
ArrayCreateError::ChunkGridCreateError(PluginCreateError::Other(err.to_string()))
})?,
);
let fill_value = data_type.fill_value_v2(&v2.fill_value).map_err(|_| {
let data_type_name = match &v2.dtype {
DataTypeMetadataV2::Simple(s) => s.clone(),
DataTypeMetadataV2::Structured(_) => data_type
.name_v3()
.map_or_else(String::new, Cow::into_owned),
};
ArrayCreateError::InvalidFillValueMetadata {
data_type_name,
fill_value_metadata: v2.fill_value.clone(),
}
})?;
let endianness = data_type_metadata_v2_to_endianness(&v2.dtype)
.map_err(|e| ArrayCreateError::UnsupportedZarrV2Array(e.to_string()))?;
let codecs = Arc::new(
create_codec_chain_from_v2(
v2.order,
v2.shape.len(),
&data_type,
endianness,
v2.filters.as_ref(),
v2.compressor.as_ref(),
)
.map_err(|e| ArrayCreateError::UnsupportedZarrV2Array(e.to_string()))?,
);
let chunk_key_encoding =
ChunkKeyEncoding::new(V2ChunkKeyEncoding::new(v2.dimension_separator));
let storage_transformers = StorageTransformerChain::default();
let (codec_options, metadata_options, metadata_erase_version) = {
let config = global_config();
(
config.codec_options(),
config.array_metadata_options(),
config.metadata_erase_version(),
)
};
Ok(Self {
storage,
path,
data_type,
chunk_grid,
chunk_key_encoding,
fill_value,
codecs,
storage_transformers,
dimension_names: None,
codec_options,
metadata: ArrayMetadata::V2(v2),
metadata_options,
metadata_erase_version,
})
}
#[must_use]
pub fn with_codec_options(mut self, codec_options: CodecOptions) -> Self {
self.codec_options = codec_options;
self
}
pub fn set_codec_options(&mut self, codec_options: CodecOptions) -> &mut Self {
self.codec_options = codec_options;
self
}
#[must_use]
pub fn with_codec_specific_options(mut self, opts: &CodecSpecificOptions) -> Self {
self.codecs = Arc::new(Arc::unwrap_or_clone(self.codecs).with_codec_specific_options(opts));
self
}
pub fn set_codec_specific_options(&mut self, opts: &CodecSpecificOptions) -> &mut Self {
self.codecs = Arc::new((*self.codecs).clone().with_codec_specific_options(opts));
self
}
#[must_use]
pub fn with_metadata_options(mut self, metadata_options: ArrayMetadataOptions) -> Self {
self.metadata_options = metadata_options;
self
}
pub fn set_metadata_options(&mut self, metadata_options: ArrayMetadataOptions) -> &mut Self {
self.metadata_options = metadata_options;
self
}
#[must_use]
pub fn storage(&self) -> Arc<TStorage> {
self.storage.clone()
}
#[must_use]
pub const fn path(&self) -> &NodePath {
&self.path
}
#[must_use]
pub const fn data_type(&self) -> &DataType {
&self.data_type
}
#[must_use]
pub const fn fill_value(&self) -> &FillValue {
&self.fill_value
}
#[must_use]
pub fn shape(&self) -> &[u64] {
self.chunk_grid().array_shape()
}
pub fn set_shape(&mut self, array_shape: ArrayShape) -> Result<&mut Self, ArrayCreateError> {
self.chunk_grid = ChunkGrid::from_metadata(&self.chunk_grid.metadata(), &array_shape)
.map_err(ArrayCreateError::ChunkGridCreateError)?;
match &mut self.metadata {
ArrayMetadata::V3(metadata) => {
metadata.shape = array_shape;
}
ArrayMetadata::V2(metadata) => {
metadata.shape = array_shape;
}
}
Ok(self)
}
pub unsafe fn set_shape_and_chunk_grid(
&mut self,
array_shape: ArrayShape,
chunk_grid_metadata: impl Into<builder::ArrayBuilderChunkGridMetadata>,
) -> Result<&mut Self, ArrayCreateError> {
let chunk_grid_metadata: builder::ArrayBuilderChunkGridMetadata =
chunk_grid_metadata.into();
let chunk_grid_metadata = chunk_grid_metadata.to_metadata()?;
self.chunk_grid = ChunkGrid::from_metadata(&chunk_grid_metadata, &array_shape)
.map_err(ArrayCreateError::ChunkGridCreateError)?;
match &mut self.metadata {
ArrayMetadata::V3(metadata) => {
metadata.shape = array_shape;
metadata.chunk_grid = chunk_grid_metadata;
}
ArrayMetadata::V2(metadata) => {
let err = || {
ArrayCreateError::ChunkGridCreateError(PluginCreateError::Other(
"Only regular chunk grids are supported in Zarr V2".to_string(),
))
};
if !RegularChunkGrid::matches_name_v3(chunk_grid_metadata.name()) {
return Err(err());
}
let regular_chunk_grid_configuration = chunk_grid_metadata
.to_typed_configuration::<RegularBoundedChunkGridConfiguration>()
.map_err(|_| err())?;
let regular_chunk_grid = RegularChunkGrid::new(
array_shape.clone(),
regular_chunk_grid_configuration.chunk_shape,
)
.map_err(|_| {
ArrayCreateError::ChunkGridCreateError(PluginCreateError::Other(
"Chunk grid is not compatible with array shape".to_string(),
))
})?;
metadata.shape = array_shape;
metadata.chunks = regular_chunk_grid.chunk_shape().to_vec();
}
}
Ok(self)
}
#[must_use]
pub fn dimensionality(&self) -> usize {
self.shape().len()
}
#[must_use]
pub fn codecs(&self) -> Arc<CodecChain> {
self.codecs.clone()
}
#[must_use]
pub const fn chunk_grid(&self) -> &ChunkGrid {
&self.chunk_grid
}
#[must_use]
pub const fn chunk_key_encoding(&self) -> &ChunkKeyEncoding {
&self.chunk_key_encoding
}
#[must_use]
pub const fn storage_transformers(&self) -> &StorageTransformerChain {
&self.storage_transformers
}
#[must_use]
pub const fn dimension_names(&self) -> &Option<Vec<DimensionName>> {
&self.dimension_names
}
pub fn set_dimension_names(
&mut self,
dimension_names: Option<Vec<DimensionName>>,
) -> &mut Self {
self.dimension_names = dimension_names;
self
}
#[must_use]
pub const fn attributes(&self) -> &serde_json::Map<String, serde_json::Value> {
match &self.metadata {
ArrayMetadata::V3(metadata) => &metadata.attributes,
ArrayMetadata::V2(metadata) => &metadata.attributes,
}
}
#[must_use]
pub fn attributes_mut(&mut self) -> &mut serde_json::Map<String, serde_json::Value> {
match &mut self.metadata {
ArrayMetadata::V3(metadata) => &mut metadata.attributes,
ArrayMetadata::V2(metadata) => &mut metadata.attributes,
}
}
#[must_use]
pub fn metadata(&self) -> &ArrayMetadata {
&self.metadata
}
#[allow(clippy::missing_panics_doc, clippy::too_many_lines)]
#[must_use]
pub fn metadata_opt(&self, options: &ArrayMetadataOptions) -> ArrayMetadata {
use ArrayMetadata as AM;
use MetadataConvertVersion as V;
let mut metadata = self.metadata.clone();
if options.include_zarrs_metadata() {
#[derive(serde::Serialize)]
struct ZarrsMetadata {
description: String,
repository: String,
version: String,
}
let zarrs_metadata = ZarrsMetadata {
description: "This array was created with zarrs".to_string(),
repository: env!("CARGO_PKG_REPOSITORY").to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
};
let attributes = match &mut metadata {
AM::V3(metadata) => &mut metadata.attributes,
AM::V2(metadata) => &mut metadata.attributes,
};
attributes.insert("_zarrs".to_string(), unsafe {
serde_json::to_value(zarrs_metadata).unwrap_unchecked()
});
}
let mut metadata = match (metadata, options.metadata_convert_version()) {
(AM::V3(metadata), V::Default | V::V3) => ArrayMetadata::V3(metadata),
(AM::V2(metadata), V::Default) => ArrayMetadata::V2(metadata),
(AM::V2(metadata), V::V3) => {
let metadata = array_metadata_v2_to_v3(&metadata)
.expect("conversion succeeded on array creation");
AM::V3(metadata)
}
};
if options.convert_aliased_extension_names() {
match &mut metadata {
AM::V3(metadata) => {
for codec in &mut metadata.codecs {
let name = codec_default_name(codec, ZarrVersion::V3).into_owned();
codec.set_name(name);
}
{
let name =
data_type::data_type_v3_default_name(&metadata.data_type).into_owned();
metadata.data_type.set_name(name);
}
{
let array_shape: ArrayShape = metadata.shape.clone();
let name = chunk_grid_default_name(
&metadata.chunk_grid,
&array_shape,
ZarrVersion::V3,
)
.into_owned();
metadata.chunk_grid.set_name(name);
}
{
let name = chunk_key_encoding_default_name(
&metadata.chunk_key_encoding,
ZarrVersion::V3,
)
.into_owned();
metadata.chunk_key_encoding.set_name(name);
}
for transformer in &mut metadata.storage_transformers {
let name = storage_transformer_default_name(
transformer,
&self.path,
ZarrVersion::V3,
)
.into_owned();
transformer.set_name(name);
}
}
AM::V2(metadata) => {
if let Some(filters) = &mut metadata.filters {
for filter in filters {
let filter_metadata = MetadataV3::new_with_serializable_configuration(
filter.id().to_string(),
filter.configuration(),
)
.unwrap_or_else(|_| MetadataV3::new(filter.id()));
let name =
codec_default_name(&filter_metadata, ZarrVersion::V2).into_owned();
filter.set_id(name);
}
}
if let Some(compressor) = &mut metadata.compressor {
let compressor_metadata = MetadataV3::new_with_serializable_configuration(
compressor.id().to_string(),
compressor.configuration(),
)
.unwrap_or_else(|_| MetadataV3::new(compressor.id()));
let name =
codec_default_name(&compressor_metadata, ZarrVersion::V2).into_owned();
compressor.set_id(name);
}
match &mut metadata.dtype {
DataTypeMetadataV2::Simple(dtype) => {
*dtype = data_type::data_type_v2_default_name(dtype).into_owned();
}
DataTypeMetadataV2::Structured(_) => {
}
}
}
}
}
metadata
}
pub(crate) fn fill_value_metadata(&self) -> FillValueMetadata {
self.data_type
.metadata_fill_value(&self.fill_value)
.expect("data type and fill value are compatible")
}
#[must_use]
pub fn builder(&self) -> ArrayBuilder {
ArrayBuilder::from_array(self)
}
#[must_use]
pub fn chunk_grid_shape(&self) -> &[u64] {
self.chunk_grid().grid_shape()
}
#[must_use]
pub fn chunk_key(&self, chunk_indices: &[u64]) -> StoreKey {
data_key(self.path(), &self.chunk_key_encoding.encode(chunk_indices))
}
pub fn chunk_origin(&self, chunk_indices: &[u64]) -> Result<ArrayIndices, ArrayError> {
self.chunk_grid()
.chunk_origin(chunk_indices)
.map_err(|_| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))?
.ok_or_else(|| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))
}
pub fn chunk_shape(&self, chunk_indices: &[u64]) -> Result<ChunkShape, ArrayError> {
self.chunk_grid()
.chunk_shape(chunk_indices)
.map_err(|_| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))?
.ok_or_else(|| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))
}
#[must_use]
pub fn subset_all(&self) -> ArraySubset {
ArraySubset::new_with_shape(self.shape().to_vec())
}
pub fn chunk_shape_usize(&self, chunk_indices: &[u64]) -> Result<Vec<usize>, ArrayError> {
Ok(self
.chunk_shape(chunk_indices)?
.iter()
.map(|d| usize::try_from(d.get()).unwrap())
.collect())
}
pub fn chunk_subset(&self, chunk_indices: &[u64]) -> Result<ArraySubset, ArrayError> {
self.chunk_grid()
.subset(chunk_indices)
.map_err(|_| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))?
.ok_or_else(|| ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec()))
}
pub fn chunk_subset_bounded(&self, chunk_indices: &[u64]) -> Result<ArraySubset, ArrayError> {
let chunk_subset = self.chunk_subset(chunk_indices)?;
Ok(chunk_subset.bound(self.shape())?)
}
#[allow(clippy::similar_names)]
pub fn chunks_subset(&self, chunks: &dyn ArraySubsetTraits) -> Result<ArraySubset, ArrayError> {
match chunks.end_inc() {
Some(end) => {
let chunk0 = self.chunk_subset(&chunks.start())?;
let chunk1 = self.chunk_subset(&end)?;
let start = chunk0.start().to_vec();
let end = chunk1.end_exc();
ArraySubset::new_with_start_end_exc(start, end).map_err(std::convert::Into::into)
}
None => Ok(ArraySubset::new_empty(chunks.dimensionality())),
}
}
pub fn chunks_subset_bounded(
&self,
chunks: &dyn ArraySubsetTraits,
) -> Result<ArraySubset, ArrayError> {
let chunks_subset = self.chunks_subset(chunks)?;
Ok(chunks_subset.bound(self.shape())?)
}
pub fn chunks_in_array_subset(
&self,
array_subset: &dyn ArraySubsetTraits,
) -> Result<Option<ArraySubset>, IncompatibleDimensionalityError> {
self.chunk_grid.chunks_in_array_subset(array_subset)
}
fn recommended_codec_concurrency(
&self,
chunk_shape: &[NonZeroU64],
data_type: &DataType,
) -> Result<RecommendedConcurrency, ArrayError> {
Ok(self
.codecs()
.recommended_concurrency(chunk_shape, data_type)?)
}
pub fn to_v3(self) -> Result<Self, ArrayMetadataV2ToV3Error> {
match self.metadata {
ArrayMetadata::V2(metadata) => {
let metadata: ArrayMetadata = array_metadata_v2_to_v3(&metadata)?.into();
Ok(Self {
storage: self.storage,
path: self.path,
data_type: self.data_type,
chunk_grid: self.chunk_grid,
chunk_key_encoding: self.chunk_key_encoding,
fill_value: self.fill_value,
codecs: self.codecs,
storage_transformers: self.storage_transformers,
dimension_names: self.dimension_names,
metadata,
codec_options: self.codec_options,
metadata_options: self.metadata_options,
metadata_erase_version: self.metadata_erase_version,
})
}
ArrayMetadata::V3(_) => Ok(self),
}
}
fn validate_metadata(metadata: &ArrayMetadata) -> Result<(), ArrayCreateError> {
match &metadata {
ArrayMetadata::V2(_) => {}
ArrayMetadata::V3(_metadata) => {
}
}
match metadata {
ArrayMetadata::V2(_metadata) => {}
ArrayMetadata::V3(metadata) => {
let additional_fields = &metadata.additional_fields;
for (name, field) in additional_fields {
if field.must_understand() {
return Err(ArrayCreateError::AdditionalFieldUnsupportedError(
AdditionalFieldUnsupportedError::new(
name.clone(),
field.as_value().clone(),
),
));
}
}
}
}
Ok(())
}
}
#[must_use]
fn codec_default_name(metadata: &MetadataV3, version: impl Into<ZarrVersion>) -> Cow<'static, str> {
let version: ZarrVersion = version.into();
if let Ok(codec) = Codec::from_metadata(metadata)
&& let Some(name) = codec.name(version)
{
return name;
}
Cow::Owned(metadata.name().to_string())
}
#[must_use]
fn chunk_grid_default_name(
metadata: &MetadataV3,
array_shape: &ArrayShape,
version: impl Into<ZarrVersion>,
) -> Cow<'static, str> {
let version = version.into();
if let Ok(chunk_grid) = zarrs_chunk_grid::ChunkGrid::from_metadata(metadata, array_shape)
&& let Some(name) = chunk_grid.name(version)
{
return name;
}
Cow::Owned(metadata.name().to_string())
}
#[must_use]
fn chunk_key_encoding_default_name(
metadata: &MetadataV3,
version: impl Into<ZarrVersion>,
) -> Cow<'static, str> {
let version = version.into();
if let Ok(chunk_key_encoding) = ChunkKeyEncoding::from_metadata(metadata)
&& let Some(name) = chunk_key_encoding.name(version)
{
return name;
}
Cow::Owned(metadata.name().to_string())
}
#[must_use]
fn storage_transformer_default_name(
metadata: &MetadataV3,
path: &crate::node::NodePath,
version: impl Into<ZarrVersion>,
) -> Cow<'static, str> {
let version = version.into();
if let Ok(transformer) = storage_transformer::try_create_storage_transformer(metadata, path)
&& let Some(name) = transformer.name(version)
{
return name;
}
Cow::Owned(metadata.name().to_string())
}
mod array_sync_readable;
mod array_sync_writable;
mod array_sync_readable_writable;
#[cfg(feature = "async")]
mod array_async_readable;
#[cfg(feature = "async")]
mod array_async_writable;
#[cfg(feature = "async")]
mod array_async_readable_writable;
#[cfg(all(feature = "sharding", feature = "async"))]
mod array_async_sharded_readable_ext;
#[must_use]
pub fn convert_from_bytes_slice<T: bytemuck::Pod>(from: &[u8]) -> Vec<T> {
bytemuck::allocation::pod_collect_to_vec(from)
}
#[must_use]
pub fn transmute_from_bytes_vec<T: bytemuck::Pod>(from: Vec<u8>) -> Vec<T> {
bytemuck::allocation::try_cast_vec(from)
.unwrap_or_else(|(_err, from)| convert_from_bytes_slice(&from))
}
#[must_use]
pub fn convert_to_bytes_vec<T: bytemuck::NoUninit>(from: &[T]) -> Vec<u8> {
bytemuck::allocation::pod_collect_to_vec(from)
}
#[must_use]
pub fn transmute_to_bytes_vec<T: bytemuck::NoUninit>(from: Vec<T>) -> Vec<u8> {
bytemuck::allocation::try_cast_vec(from)
.unwrap_or_else(|(_err, from)| convert_to_bytes_vec(&from))
}
#[must_use]
pub fn transmute_to_bytes<T: bytemuck::NoUninit>(from: &[T]) -> &[u8] {
bytemuck::must_cast_slice(from)
}
#[must_use]
pub fn unravel_index(mut index: u64, shape: &[u64]) -> Option<ArrayIndices> {
let len = shape.len();
let mut indices: ArrayIndices = Vec::with_capacity(len);
for (indices_i, &dim) in std::iter::zip(
indices.spare_capacity_mut().iter_mut().rev(),
shape.iter().rev(),
) {
indices_i.write(index % dim);
index /= dim;
}
unsafe { indices.set_len(len) };
if index == 0 { Some(indices) } else { None }
}
pub use zarrs_chunk_grid::ravel_indices;
#[cfg(feature = "ndarray")]
fn iter_u64_to_usize<'a, I: Iterator<Item = &'a u64>>(iter: I) -> Vec<usize> {
iter.map(|v| usize::try_from(*v).unwrap())
.collect::<Vec<_>>()
}
#[cfg(feature = "ndarray")]
pub fn elements_to_ndarray<T>(
shape: &[u64],
elements: Vec<T>,
) -> Result<ndarray::ArrayD<T>, ArrayError> {
let length = elements.len();
ndarray::ArrayD::<T>::from_shape_vec(iter_u64_to_usize(shape.iter()), elements).map_err(|_| {
ArrayError::CodecError(
zarrs_codec::InvalidArrayShapeError::new(shape.to_vec(), length).into(),
)
})
}
#[cfg(feature = "ndarray")]
pub fn bytes_to_ndarray<T: bytemuck::Pod>(
shape: &[u64],
bytes: Vec<u8>,
) -> Result<ndarray::ArrayD<T>, ArrayError> {
let expected_len = shape.iter().product::<u64>() * size_of::<T>() as u64;
if bytes.len() as u64 != expected_len {
return Err(ArrayError::InvalidBytesInputSize(bytes.len(), expected_len));
}
let elements = transmute_from_bytes_vec::<T>(bytes);
elements_to_ndarray(shape, elements)
}
#[allow(clippy::too_many_lines)]
fn create_codec_chain_from_v2(
order: zarrs_metadata::v2::ArrayMetadataV2Order,
dimensionality: usize,
data_type: &DataType,
endianness: Option<zarrs_metadata::Endianness>,
filters: Option<&Vec<zarrs_metadata::v2::MetadataV2>>,
compressor: Option<&zarrs_metadata::v2::MetadataV2>,
) -> Result<CodecChain, crate::convert::ArrayMetadataV2ToV3Error> {
use crate::convert::ArrayMetadataV2ToV3Error;
let mut array_to_array: Vec<Arc<dyn ArrayToArrayCodecTraits>> = vec![];
let mut array_to_bytes: Option<Arc<dyn ArrayToBytesCodecTraits>> = None;
let mut bytes_to_bytes: Vec<Arc<dyn BytesToBytesCodecTraits>> = vec![];
#[cfg(feature = "transpose")]
if order == zarrs_metadata::v2::ArrayMetadataV2Order::F {
use self::codec::TransposeCodec;
use zarrs_metadata_ext::codec::transpose::TransposeOrder;
let f_order: Vec<usize> = (0..dimensionality).rev().collect();
let transpose_order = unsafe {
TransposeOrder::new(&f_order).unwrap_unchecked()
};
let transpose = Arc::new(TransposeCodec::new(transpose_order));
array_to_array.push(transpose);
}
#[cfg(not(feature = "transpose"))]
if order == zarrs_metadata::v2::ArrayMetadataV2Order::F {
return Err(ArrayMetadataV2ToV3Error::Other(
"transpose feature is required for F-order arrays".to_string(),
));
}
if let Some(filters) = filters {
for filter in filters {
let codec = Codec::from_metadata(filter)
.map_err(|e: PluginCreateError| ArrayMetadataV2ToV3Error::Other(e.to_string()))?;
match codec {
Codec::ArrayToArray(c) => {
array_to_array.push(c);
}
Codec::ArrayToBytes(c) => {
if array_to_bytes.is_some() {
return Err(ArrayMetadataV2ToV3Error::MultipleArrayToBytesCodecs);
}
array_to_bytes = Some(c);
}
Codec::BytesToBytes(c) => {
bytes_to_bytes.push(c);
}
}
}
}
if let Some(compressor) = compressor {
#[cfg(feature = "blosc")]
if self::codec::BloscCodec::matches_name_v2(compressor.id()) {
use self::codec::BloscCodec;
use zarrs_metadata_ext::codec::blosc::{
BloscCodecConfigurationNumcodecs, BloscShuffleModeNumcodecs,
codec_blosc_v2_numcodecs_to_v3,
};
let blosc_config = serde_json::from_value::<BloscCodecConfigurationNumcodecs>(
serde_json::to_value(compressor.configuration())?,
)?;
let data_type_size = if blosc_config.shuffle == BloscShuffleModeNumcodecs::NoShuffle {
None
} else {
Some(data_type.size())
};
let v3_config = codec_blosc_v2_numcodecs_to_v3(&blosc_config, data_type_size);
let blosc = BloscCodec::new_with_configuration(&v3_config)
.map_err(|e| ArrayMetadataV2ToV3Error::Other(e.to_string()))?;
bytes_to_bytes.push(Arc::new(blosc));
} else {
let codec = Codec::from_metadata(compressor)
.map_err(|e: PluginCreateError| ArrayMetadataV2ToV3Error::Other(e.to_string()))?;
match codec {
Codec::ArrayToArray(c) => {
array_to_array.push(c);
}
Codec::ArrayToBytes(c) => {
if array_to_bytes.is_some() {
return Err(ArrayMetadataV2ToV3Error::MultipleArrayToBytesCodecs);
}
array_to_bytes = Some(c);
}
Codec::BytesToBytes(c) => {
bytes_to_bytes.push(c);
}
}
}
#[cfg(not(feature = "blosc"))]
{
let codec = Codec::from_metadata(compressor)
.map_err(|e: PluginCreateError| ArrayMetadataV2ToV3Error::Other(e.to_string()))?;
match codec {
Codec::ArrayToArray(c) => {
array_to_array.push(c);
}
Codec::ArrayToBytes(c) => {
if array_to_bytes.is_some() {
return Err(ArrayMetadataV2ToV3Error::MultipleArrayToBytesCodecs);
}
array_to_bytes = Some(c);
}
Codec::BytesToBytes(c) => {
bytes_to_bytes.push(c);
}
}
}
}
if array_to_bytes.is_none() {
use self::codec::BytesCodec;
let bytes_codec = Arc::new(BytesCodec::new(endianness));
array_to_bytes = Some(bytes_codec);
}
let array_to_bytes = array_to_bytes.ok_or_else(|| {
ArrayMetadataV2ToV3Error::Other("No array-to-bytes codec found".to_string())
})?;
Ok(CodecChain::new(
array_to_array,
array_to_bytes,
bytes_to_bytes,
))
}
#[cfg(test)]
mod tests {
use zarrs_filesystem::FilesystemStore;
use super::*;
use zarrs_metadata::v3::{AdditionalFieldV3, AdditionalFieldsV3};
use zarrs_storage::store::MemoryStore;
#[test]
fn test_array_metadata_write_read() {
let store = Arc::new(MemoryStore::new());
let array_path = "/array";
let array = ArrayBuilder::new(vec![8, 8], vec![4, 4], data_type::uint8(), 0u8)
.build(store.clone(), array_path)
.unwrap();
array.store_metadata().unwrap();
let stored_metadata = array.metadata_opt(&ArrayMetadataOptions::default());
let array_other = Array::open(store, array_path).unwrap();
assert_eq!(array_other.metadata(), &stored_metadata);
}
#[test]
fn array_set_shape_and_attributes() {
let store = MemoryStore::new();
let array_path = "/group/array";
let mut array = ArrayBuilder::new(
vec![8, 8], vec![4, 4],
data_type::float32(),
ZARR_NAN_F32,
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Arc::new(codec::GzipCodec::new(5).unwrap()),
])
.build(store.into(), array_path)
.unwrap();
array.set_shape(vec![16, 16]).unwrap();
array
.attributes_mut()
.insert("test".to_string(), "apple".into());
assert_eq!(array.shape(), &[16, 16]);
assert_eq!(
array.attributes().get_key_value("test"),
Some((
&"test".to_string(),
&serde_json::Value::String("apple".to_string())
))
);
}
#[test]
fn array_set_shape_and_chunk_grid() {
use self::chunk_grid::RectangularChunkGridConfiguration;
use zarrs_metadata::v3::MetadataV3;
let store = MemoryStore::new();
let array_path = "/group/array";
let mut array = ArrayBuilder::new(
vec![8, 8], vec![4, 4], data_type::uint8(),
0u8,
)
.build(store.into(), array_path)
.unwrap();
let chunk_grid_metadata = MetadataV3::new_with_configuration(
"rectangular",
RectangularChunkGridConfiguration {
chunk_shape: vec![
vec![
NonZeroU64::new(4).unwrap(),
NonZeroU64::new(4).unwrap(),
NonZeroU64::new(6).unwrap(),
]
.into(), vec![
NonZeroU64::new(4).unwrap(),
NonZeroU64::new(4).unwrap(),
NonZeroU64::new(4).unwrap(),
NonZeroU64::new(3).unwrap(),
NonZeroU64::new(2).unwrap(),
NonZeroU64::new(1).unwrap(),
]
.into(), ],
},
);
unsafe {
array
.set_shape_and_chunk_grid(vec![14, 18], chunk_grid_metadata)
.unwrap();
}
assert_eq!(array.shape(), &[14, 18]);
assert_eq!(
array.chunk_shape(&[0, 0]).unwrap().as_slice(),
&[
std::num::NonZeroU64::new(4).unwrap(),
std::num::NonZeroU64::new(4).unwrap()
]
);
assert_eq!(
array.chunk_shape(&[2, 3]).unwrap().as_slice(),
&[
std::num::NonZeroU64::new(6).unwrap(),
std::num::NonZeroU64::new(3).unwrap()
]
);
}
#[test]
fn array_subset_round_trip() {
let store = Arc::new(MemoryStore::default());
let array_path = "/array";
let array = ArrayBuilder::new(
vec![8, 8], vec![4, 4], data_type::float32(),
1f32,
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Arc::new(codec::GzipCodec::new(5).unwrap()),
])
.build(store, array_path)
.unwrap();
array
.store_array_subset(
&[3..6, 3..6],
&[1.0f32, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
)
.unwrap();
let subset_all = array.subset_all();
let data_all = array
.retrieve_array_subset::<Vec<f32>>(&subset_all)
.unwrap();
assert_eq!(
data_all,
vec![
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.3, 1.0, 1.0, 1.0, 1.0, 1.0, 0.4, 0.5, 0.6, 1.0, 1.0, 1.0, 1.0, 1.0, 0.7, 0.8, 0.9, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ]
);
assert!(
array
.retrieve_chunk_if_exists::<Vec<f32>>(&[0; 2])
.unwrap()
.is_none()
);
#[cfg(feature = "ndarray")]
assert!(
array
.retrieve_chunk_if_exists::<ndarray::ArrayD<f32>>(&[0; 2])
.unwrap()
.is_none()
);
}
#[allow(dead_code)]
fn array_v2_to_v3(path_in: &str, path_out: &str) {
let store = Arc::new(FilesystemStore::new(path_in).unwrap());
let array_in = Array::open(store, "/").unwrap();
println!("{array_in:?}");
let subset_all = ArraySubset::new_with_shape(array_in.shape().to_vec());
let elements = array_in
.retrieve_array_subset::<Vec<f32>>(&subset_all)
.unwrap();
assert_eq!(
&elements,
&[
0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0, 75.0, 76.0, 77.0, 78.0, 79.0, 80.0, 81.0, 82.0, 83.0, 84.0, 85.0, 86.0, 87.0, 88.0, 89.0, 90.0, 91.0, 92.0, 93.0, 94.0, 95.0, 96.0, 97.0, 98.0, 99.0, ],
);
let store = Arc::new(FilesystemStore::new(path_out).unwrap());
let array_out = Array::new_with_metadata(store, "/", array_in.metadata().clone()).unwrap();
array_out
.store_array_subset(&subset_all, &elements)
.unwrap();
for version in [MetadataConvertVersion::Default, MetadataConvertVersion::V3] {
array_out
.store_metadata_opt(
&ArrayMetadataOptions::default()
.with_metadata_convert_version(version)
.with_include_zarrs_metadata(false)
.with_convert_aliased_extension_names(true),
)
.unwrap();
}
}
#[test]
fn array_v2_none_c() {
array_v2_to_v3(
"tests/data/v2/array_none_C.zarr",
"tests/data/v3/array_none.zarr",
);
}
#[cfg(feature = "transpose")]
#[test]
fn array_v2_none_f() {
array_v2_to_v3(
"tests/data/v2/array_none_F.zarr",
"tests/data/v3/array_none_transpose.zarr",
);
}
#[cfg(feature = "blosc")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_blosc_c() {
array_v2_to_v3(
"tests/data/v2/array_blosc_C.zarr",
"tests/data/v3/array_blosc.zarr",
);
}
#[cfg(feature = "blosc")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_blosc_f() {
array_v2_to_v3(
"tests/data/v2/array_blosc_F.zarr",
"tests/data/v3/array_blosc_transpose.zarr",
);
}
#[cfg(feature = "gzip")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_gzip_c() {
array_v2_to_v3(
"tests/data/v2/array_gzip_C.zarr",
"tests/data/v3/array_gzip.zarr",
);
}
#[cfg(feature = "bz2")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_bz2_c() {
array_v2_to_v3(
"tests/data/v2/array_bz2_C.zarr",
"tests/data/v3/array_bz2.zarr",
);
}
#[cfg(feature = "zfp")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_zfpy_c() {
array_v2_to_v3(
"tests/data/v2/array_zfpy_C.zarr",
"tests/data/v3/array_zfpy.zarr",
);
}
#[cfg(feature = "zstd")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_zstd_c() {
array_v2_to_v3(
"tests/data/v2/array_zstd_C.zarr",
"tests/data/v3/array_zstd.zarr",
);
}
#[cfg(feature = "pcodec")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v2_pcodec_c() {
array_v2_to_v3(
"tests/data/v2/array_pcodec_C.zarr",
"tests/data/v3/array_pcodec.zarr",
);
}
#[test]
fn array_v2_invalid_fill_value() {
use std::num::NonZeroU64;
use zarrs_metadata::v2::{ArrayMetadataV2, DataTypeMetadataV2};
let store = Arc::new(MemoryStore::new());
let metadata = ArrayMetadataV2::new(
vec![10, 10],
vec![NonZeroU64::new(5).unwrap(); 2],
DataTypeMetadataV2::Simple("<i4".to_string()),
FillValueMetadata::from("invalid"),
None, None, );
let err = Array::new_with_metadata(store, "/", ArrayMetadata::V2(metadata)).unwrap_err();
assert_eq!(
err.to_string(),
"invalid fill value metadata for data type `<i4`: \"invalid\""
);
}
#[allow(dead_code)]
fn array_v3_numcodecs(path_in: &str) {
let store = Arc::new(FilesystemStore::new(path_in).unwrap());
let array_in = Array::open(store, "/").unwrap();
println!(
"{:?}",
array_in.metadata_opt(
&ArrayMetadataOptions::default()
.with_metadata_convert_version(MetadataConvertVersion::V3)
)
);
println!("{array_in:?}");
let subset_all = ArraySubset::new_with_shape(array_in.shape().to_vec());
let elements = array_in
.retrieve_array_subset::<Vec<f32>>(&subset_all)
.unwrap();
assert_eq!(
&elements,
&[
0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0, 75.0, 76.0, 77.0, 78.0, 79.0, 80.0, 81.0, 82.0, 83.0, 84.0, 85.0, 86.0, 87.0, 88.0, 89.0, 90.0, 91.0, 92.0, 93.0, 94.0, 95.0, 96.0, 97.0, 98.0, 99.0, ],
);
}
#[test]
fn array_v3_none() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_none.zarr");
}
#[cfg(feature = "blosc")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_blosc() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_blosc.zarr");
}
#[cfg(feature = "bz2")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_bz2() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_bz2.zarr");
}
#[cfg(feature = "fletcher32")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_fletcher32() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_fletcher32.zarr");
}
#[cfg(feature = "adler32")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_adler32() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_adler32.zarr");
}
#[cfg(feature = "zlib")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_zlib() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_zlib.zarr");
}
#[cfg(feature = "gzip")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_gzip() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_gzip.zarr");
}
#[cfg(feature = "pcodec")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_pcodec() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_pcodec.zarr");
}
#[cfg(feature = "zfp")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_zfpy() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_zfpy.zarr");
}
#[cfg(feature = "zstd")]
#[test]
#[cfg_attr(miri, ignore)]
fn array_v3_zstd() {
array_v3_numcodecs("tests/data/v3_zarr_python/array_zstd.zarr");
}
#[test]
fn array_additional_fields() {
let store = Arc::new(MemoryStore::new());
let array_path = "/group/array";
for must_understand in [true, false] {
let additional_field = serde_json::Map::new();
let additional_field = AdditionalFieldV3::new(additional_field, must_understand);
let mut additional_fields = AdditionalFieldsV3::new();
additional_fields.insert("key".to_string(), additional_field);
let array = ArrayBuilder::new(
vec![8, 8], vec![4, 4],
data_type::float32(),
ZARR_NAN_F32,
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Arc::new(codec::GzipCodec::new(5).unwrap()),
])
.additional_fields(additional_fields)
.build(store.clone(), array_path)
.unwrap();
array.store_metadata().unwrap();
let array = Array::open(store.clone(), array_path);
if must_understand {
assert!(array.is_err());
} else {
assert!(array.is_ok());
}
}
}
}