Struct zarrs::array_subset::ArraySubset
source · pub struct ArraySubset { /* private fields */ }
Expand description
An array subset.
The unsafe _unchecked methods
are mostly intended for internal use to avoid redundant input validation.
Implementations§
source§impl ArraySubset
impl ArraySubset
sourcepub fn new_with_ranges(ranges: &[Range<u64>]) -> Self
pub fn new_with_ranges(ranges: &[Range<u64>]) -> Self
Create a new array subset from a ranges
.
Examples found in repository?
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
fn http_array_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::Array,
array_subset::ArraySubset,
storage::{
storage_transformer::{StorageTransformerExtension, UsageLogStorageTransformer},
store,
},
};
const HTTP_URL: &str =
"https://raw.githubusercontent.com/LDeakin/zarrs/main/tests/data/array_write_read.zarr";
const ARRAY_PATH: &str = "/group/array";
// Create a HTTP store
let mut store: ReadableStorage = Arc::new(store::HTTPStore::new(HTTP_URL)?);
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log.clone().create_readable_transformer(store);
}
}
// Init the existing array, reading metadata
let array = Array::new(store, ARRAY_PATH)?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Read the whole array
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("The whole array is:\n{data_all}\n");
// Read a chunk back from the store
let chunk_indices = vec![1, 0];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("Chunk [1,0] is:\n{data_chunk}\n");
// Read the central 4x2 subset of the array
let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_4x2 = array.retrieve_array_subset_ndarray::<f32>(&subset_4x2)?;
println!("The middle 4x2 subset is:\n{data_4x2}\n");
Ok(())
}
More examples
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
fn rectangular_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use rayon::prelude::{IntoParallelIterator, ParallelIterator};
use zarrs::array::ChunkGrid;
use zarrs::{
array::{chunk_grid::RectangularChunkGrid, codec, FillValue},
node::Node,
};
use zarrs::{
array::{DataType, ZARR_NAN_F32},
array_subset::ArraySubset,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
let mut store: ReadableWritableListableStorage = std::sync::Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
ChunkGrid::new(RectangularChunkGrid::new(&[
[1, 2, 3, 2].try_into()?,
4.try_into()?,
])),
FillValue::from(ZARR_NAN_F32),
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Box::new(codec::GzipCodec::new(5)?),
])
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
// Write some chunks (in parallel)
(0..4).into_par_iter().try_for_each(|i| {
let chunk_grid = array.chunk_grid();
let chunk_indices = vec![i, 0];
if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
let chunk_array = ndarray::ArrayD::<f32>::from_elem(
chunk_shape
.iter()
.map(|u| u.get() as usize)
.collect::<Vec<_>>(),
i as f32,
);
array.store_chunk_ndarray(&chunk_indices, chunk_array)
} else {
Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
chunk_indices.to_vec(),
))
}
})?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write a subset spanning multiple chunks, including updating chunks already written
array.store_array_subset_ndarray(
&[3, 3], // start
ndarray::ArrayD::<f32>::from_shape_vec(
vec![3, 3],
vec![0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
)?,
)?;
// Store elements directly, in this case set the 7th column to 123.0
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![123.0; 8],
)?;
// Store elements directly in a chunk, in this case set the last row of the bottom right chunk
array.store_chunk_subset_elements::<f32>(
// chunk indices
&[3, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[1..2, 0..4]),
vec![-4.0; 4],
)?;
// Read the whole array
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("The whole array is:\n{data_all}\n");
// Read a chunk back from the store
let chunk_indices = vec![1, 0];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("Chunk [1,0] is:\n{data_chunk}\n");
// Read the central 4x2 subset of the array
let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_4x2 = array.retrieve_array_subset_ndarray::<f32>(&subset_4x2)?;
println!("The middle 4x2 subset is:\n{data_4x2}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("The zarr hierarchy tree is:\n{tree}");
Ok(())
}
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_elements(
&chunk_indices,
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
array.store_chunks_elements::<f32>(
&ArraySubset::new_with_ranges(&[1..2, 0..2]),
vec![
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[3..6, 3..6]),
vec![-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
array.store_chunk_subset_elements::<f32>(
// chunk indices
&[1, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[3..4, 0..4]),
vec![-7.4, -7.5, -7.6, -7.7],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
fn sharded_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use zarrs::{
array::{
codec::{self, array_to_bytes::sharding::ShardingCodecBuilder},
DataType, FillValue,
},
array_subset::ArraySubset,
node::Node,
storage::store,
};
use rayon::prelude::{IntoParallelIterator, ParallelIterator};
use std::sync::Arc;
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new("tests/data/sharded_array_write_read.zarr")?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
// Create an array
let array_path = "/group/array";
let shard_shape = vec![4, 8];
let inner_chunk_shape = vec![4, 4];
let mut sharding_codec_builder =
ShardingCodecBuilder::new(inner_chunk_shape.as_slice().try_into()?);
sharding_codec_builder.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Box::new(codec::GzipCodec::new(5)?),
]);
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::UInt16,
shard_shape.try_into()?,
FillValue::from(0u16),
)
.array_to_bytes_codec(Box::new(sharding_codec_builder.build()))
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
// The array metadata is
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some shards (in parallel)
(0..2).into_par_iter().try_for_each(|s| {
let chunk_grid = array.chunk_grid();
let chunk_indices = vec![s, 0];
if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
let chunk_array = ndarray::ArrayD::<u16>::from_shape_fn(
chunk_shape
.iter()
.map(|u| u.get() as usize)
.collect::<Vec<_>>(),
|ij| {
(s * chunk_shape[0].get() * chunk_shape[1].get()
+ ij[0] as u64 * chunk_shape[1].get()
+ ij[1] as u64) as u16
},
);
array.store_chunk_ndarray(&chunk_indices, chunk_array)
} else {
Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
chunk_indices.to_vec(),
))
}
})?;
// Read the whole array
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec()); // the center 4x2 region
let data_all = array.retrieve_array_subset_ndarray::<u16>(&subset_all)?;
println!("The whole array is:\n{data_all}\n");
// Read a shard back from the store
let shard_indices = vec![1, 0];
let data_shard = array.retrieve_chunk_ndarray::<u16>(&shard_indices)?;
println!("Shard [1,0] is:\n{data_shard}\n");
// Read an inner chunk from the store
let subset_chunk_1_0 = ArraySubset::new_with_ranges(&[4..8, 0..4]);
let data_chunk = array.retrieve_array_subset_ndarray::<u16>(&subset_chunk_1_0)?;
println!("Chunk [1,0] is:\n{data_chunk}\n");
// Read the central 4x2 subset of the array
let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_4x2 = array.retrieve_array_subset_ndarray::<u16>(&subset_4x2)?;
println!("The middle 4x2 subset is:\n{data_4x2}\n");
// Decode inner chunks
// In some cases, it might be preferable to decode inner chunks in a shard directly.
// If using the partial decoder, then the shard index will only be read once from the store.
let partial_decoder = array.partial_decoder(&[0, 0])?;
let inner_chunks_to_decode = vec![
ArraySubset::new_with_start_shape(vec![0, 0], inner_chunk_shape.clone())?,
ArraySubset::new_with_start_shape(vec![0, 4], inner_chunk_shape.clone())?,
];
let decoded_inner_chunks_bytes = partial_decoder.partial_decode(&inner_chunks_to_decode)?;
let decoded_inner_chunks_ndarray = decoded_inner_chunks_bytes
.into_iter()
.map(|bytes| bytes_to_ndarray::<u16>(&inner_chunk_shape, bytes))
.collect::<Result<Vec<_>, _>>()?;
println!("Decoded inner chunks:");
for (inner_chunk_subset, decoded_inner_chunk) in
std::iter::zip(inner_chunks_to_decode, decoded_inner_chunks_ndarray)
{
println!("{inner_chunk_subset}\n{decoded_inner_chunk}\n");
}
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("The zarr hierarchy tree is:\n{}", tree);
println!(
"The keys in the store are:\n[{}]",
store.list().unwrap_or_default().iter().format(", ")
);
Ok(())
}
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_ndarray(
&chunk_indices,
ArrayD::<f32>::from_shape_vec(
chunk_subset.shape_usize(),
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
.unwrap(),
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
let ndarray_chunks: Array2<f32> = array![
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
];
array.store_chunks_ndarray(&ArraySubset::new_with_ranges(&[1..2, 0..2]), ndarray_chunks)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
let ndarray_subset: Array2<f32> =
array![[-3.3, -3.4, -3.5,], [-4.3, -4.4, -4.5,], [-5.3, -5.4, -5.5],];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[3..6, 3..6]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
let ndarray_subset: Array2<f32> = array![
[-0.6],
[-1.6],
[-2.6],
[-3.6],
[-4.6],
[-5.6],
[-6.6],
[-7.6],
];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[0..8, 6..7]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
let ndarray_chunk_subset: Array2<f32> = array![[-7.4, -7.5, -7.6, -7.7],];
array.store_chunk_subset_ndarray(
// chunk indices
&[1, 1],
// subset within chunk
ArraySubset::new_with_ranges(&[3..4, 0..4]).start(),
ndarray_chunk_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
async fn async_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use futures::{stream::FuturesUnordered, StreamExt};
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::AsyncFilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::AsyncFilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: AsyncReadableWritableListableStorage = Arc::new(store::AsyncObjectStore::new(
object_store::memory::InMemory::new(),
));
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_async_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.async_store_metadata().await?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata())?
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.async_store_metadata().await?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata())?
);
// Write some chunks
let subsets = (0..2)
.map(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})
.map(|chunk_subset| (i, chunk_indices, chunk_subset))
})
.collect::<Result<Vec<_>, _>>()?;
let mut futures = subsets
.iter()
.map(|(i, chunk_indices, chunk_subset)| {
array.async_store_chunk_elements(
&chunk_indices,
vec![*i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
})
.collect::<FuturesUnordered<_>>();
while let Some(item) = futures.next().await {
item?;
}
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
array
.async_store_chunks_elements::<f32>(
&ArraySubset::new_with_ranges(&[1..2, 0..2]),
vec![
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
array
.async_store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[3..6, 3..6]),
vec![-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
array
.async_store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
array
.async_store_chunk_subset_elements::<f32>(
// chunk indices
&[1, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[3..4, 0..4]),
vec![-7.4, -7.5, -7.6, -7.7],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.async_erase_chunk(&[0, 0]).await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array
.async_retrieve_chunk_ndarray::<f32>(&chunk_indices)
.await?;
println!("async_retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.async_retrieve_chunks_ndarray::<f32>(&chunks).await?;
println!("async_retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array
.async_retrieve_array_subset_ndarray::<f32>(&subset)
.await?;
println!("async_retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::async_new(&*store, "/").await.unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
sourcepub fn new_with_shape(shape: ArrayShape) -> Self
pub fn new_with_shape(shape: ArrayShape) -> Self
Create a new array subset with size
starting at the origin.
Examples found in repository?
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
fn http_array_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::Array,
array_subset::ArraySubset,
storage::{
storage_transformer::{StorageTransformerExtension, UsageLogStorageTransformer},
store,
},
};
const HTTP_URL: &str =
"https://raw.githubusercontent.com/LDeakin/zarrs/main/tests/data/array_write_read.zarr";
const ARRAY_PATH: &str = "/group/array";
// Create a HTTP store
let mut store: ReadableStorage = Arc::new(store::HTTPStore::new(HTTP_URL)?);
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log.clone().create_readable_transformer(store);
}
}
// Init the existing array, reading metadata
let array = Array::new(store, ARRAY_PATH)?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Read the whole array
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("The whole array is:\n{data_all}\n");
// Read a chunk back from the store
let chunk_indices = vec![1, 0];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("Chunk [1,0] is:\n{data_chunk}\n");
// Read the central 4x2 subset of the array
let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_4x2 = array.retrieve_array_subset_ndarray::<f32>(&subset_4x2)?;
println!("The middle 4x2 subset is:\n{data_4x2}\n");
Ok(())
}
More examples
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
fn rectangular_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use rayon::prelude::{IntoParallelIterator, ParallelIterator};
use zarrs::array::ChunkGrid;
use zarrs::{
array::{chunk_grid::RectangularChunkGrid, codec, FillValue},
node::Node,
};
use zarrs::{
array::{DataType, ZARR_NAN_F32},
array_subset::ArraySubset,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
let mut store: ReadableWritableListableStorage = std::sync::Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
ChunkGrid::new(RectangularChunkGrid::new(&[
[1, 2, 3, 2].try_into()?,
4.try_into()?,
])),
FillValue::from(ZARR_NAN_F32),
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Box::new(codec::GzipCodec::new(5)?),
])
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
// Write some chunks (in parallel)
(0..4).into_par_iter().try_for_each(|i| {
let chunk_grid = array.chunk_grid();
let chunk_indices = vec![i, 0];
if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
let chunk_array = ndarray::ArrayD::<f32>::from_elem(
chunk_shape
.iter()
.map(|u| u.get() as usize)
.collect::<Vec<_>>(),
i as f32,
);
array.store_chunk_ndarray(&chunk_indices, chunk_array)
} else {
Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
chunk_indices.to_vec(),
))
}
})?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write a subset spanning multiple chunks, including updating chunks already written
array.store_array_subset_ndarray(
&[3, 3], // start
ndarray::ArrayD::<f32>::from_shape_vec(
vec![3, 3],
vec![0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
)?,
)?;
// Store elements directly, in this case set the 7th column to 123.0
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![123.0; 8],
)?;
// Store elements directly in a chunk, in this case set the last row of the bottom right chunk
array.store_chunk_subset_elements::<f32>(
// chunk indices
&[3, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[1..2, 0..4]),
vec![-4.0; 4],
)?;
// Read the whole array
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("The whole array is:\n{data_all}\n");
// Read a chunk back from the store
let chunk_indices = vec![1, 0];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("Chunk [1,0] is:\n{data_chunk}\n");
// Read the central 4x2 subset of the array
let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_4x2 = array.retrieve_array_subset_ndarray::<f32>(&subset_4x2)?;
println!("The middle 4x2 subset is:\n{data_4x2}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("The zarr hierarchy tree is:\n{tree}");
Ok(())
}
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_elements(
&chunk_indices,
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
array.store_chunks_elements::<f32>(
&ArraySubset::new_with_ranges(&[1..2, 0..2]),
vec![
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[3..6, 3..6]),
vec![-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
array.store_chunk_subset_elements::<f32>(
// chunk indices
&[1, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[3..4, 0..4]),
vec![-7.4, -7.5, -7.6, -7.7],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
fn sharded_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use zarrs::{
array::{
codec::{self, array_to_bytes::sharding::ShardingCodecBuilder},
DataType, FillValue,
},
array_subset::ArraySubset,
node::Node,
storage::store,
};
use rayon::prelude::{IntoParallelIterator, ParallelIterator};
use std::sync::Arc;
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new("tests/data/sharded_array_write_read.zarr")?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
// Create an array
let array_path = "/group/array";
let shard_shape = vec![4, 8];
let inner_chunk_shape = vec![4, 4];
let mut sharding_codec_builder =
ShardingCodecBuilder::new(inner_chunk_shape.as_slice().try_into()?);
sharding_codec_builder.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Box::new(codec::GzipCodec::new(5)?),
]);
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::UInt16,
shard_shape.try_into()?,
FillValue::from(0u16),
)
.array_to_bytes_codec(Box::new(sharding_codec_builder.build()))
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
// The array metadata is
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some shards (in parallel)
(0..2).into_par_iter().try_for_each(|s| {
let chunk_grid = array.chunk_grid();
let chunk_indices = vec![s, 0];
if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
let chunk_array = ndarray::ArrayD::<u16>::from_shape_fn(
chunk_shape
.iter()
.map(|u| u.get() as usize)
.collect::<Vec<_>>(),
|ij| {
(s * chunk_shape[0].get() * chunk_shape[1].get()
+ ij[0] as u64 * chunk_shape[1].get()
+ ij[1] as u64) as u16
},
);
array.store_chunk_ndarray(&chunk_indices, chunk_array)
} else {
Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
chunk_indices.to_vec(),
))
}
})?;
// Read the whole array
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec()); // the center 4x2 region
let data_all = array.retrieve_array_subset_ndarray::<u16>(&subset_all)?;
println!("The whole array is:\n{data_all}\n");
// Read a shard back from the store
let shard_indices = vec![1, 0];
let data_shard = array.retrieve_chunk_ndarray::<u16>(&shard_indices)?;
println!("Shard [1,0] is:\n{data_shard}\n");
// Read an inner chunk from the store
let subset_chunk_1_0 = ArraySubset::new_with_ranges(&[4..8, 0..4]);
let data_chunk = array.retrieve_array_subset_ndarray::<u16>(&subset_chunk_1_0)?;
println!("Chunk [1,0] is:\n{data_chunk}\n");
// Read the central 4x2 subset of the array
let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_4x2 = array.retrieve_array_subset_ndarray::<u16>(&subset_4x2)?;
println!("The middle 4x2 subset is:\n{data_4x2}\n");
// Decode inner chunks
// In some cases, it might be preferable to decode inner chunks in a shard directly.
// If using the partial decoder, then the shard index will only be read once from the store.
let partial_decoder = array.partial_decoder(&[0, 0])?;
let inner_chunks_to_decode = vec![
ArraySubset::new_with_start_shape(vec![0, 0], inner_chunk_shape.clone())?,
ArraySubset::new_with_start_shape(vec![0, 4], inner_chunk_shape.clone())?,
];
let decoded_inner_chunks_bytes = partial_decoder.partial_decode(&inner_chunks_to_decode)?;
let decoded_inner_chunks_ndarray = decoded_inner_chunks_bytes
.into_iter()
.map(|bytes| bytes_to_ndarray::<u16>(&inner_chunk_shape, bytes))
.collect::<Result<Vec<_>, _>>()?;
println!("Decoded inner chunks:");
for (inner_chunk_subset, decoded_inner_chunk) in
std::iter::zip(inner_chunks_to_decode, decoded_inner_chunks_ndarray)
{
println!("{inner_chunk_subset}\n{decoded_inner_chunk}\n");
}
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("The zarr hierarchy tree is:\n{}", tree);
println!(
"The keys in the store are:\n[{}]",
store.list().unwrap_or_default().iter().format(", ")
);
Ok(())
}
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_ndarray(
&chunk_indices,
ArrayD::<f32>::from_shape_vec(
chunk_subset.shape_usize(),
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
.unwrap(),
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
let ndarray_chunks: Array2<f32> = array![
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
];
array.store_chunks_ndarray(&ArraySubset::new_with_ranges(&[1..2, 0..2]), ndarray_chunks)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
let ndarray_subset: Array2<f32> =
array![[-3.3, -3.4, -3.5,], [-4.3, -4.4, -4.5,], [-5.3, -5.4, -5.5],];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[3..6, 3..6]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
let ndarray_subset: Array2<f32> = array![
[-0.6],
[-1.6],
[-2.6],
[-3.6],
[-4.6],
[-5.6],
[-6.6],
[-7.6],
];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[0..8, 6..7]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
let ndarray_chunk_subset: Array2<f32> = array![[-7.4, -7.5, -7.6, -7.7],];
array.store_chunk_subset_ndarray(
// chunk indices
&[1, 1],
// subset within chunk
ArraySubset::new_with_ranges(&[3..4, 0..4]).start(),
ndarray_chunk_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
async fn async_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use futures::{stream::FuturesUnordered, StreamExt};
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::AsyncFilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::AsyncFilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: AsyncReadableWritableListableStorage = Arc::new(store::AsyncObjectStore::new(
object_store::memory::InMemory::new(),
));
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_async_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.async_store_metadata().await?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata())?
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.async_store_metadata().await?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata())?
);
// Write some chunks
let subsets = (0..2)
.map(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})
.map(|chunk_subset| (i, chunk_indices, chunk_subset))
})
.collect::<Result<Vec<_>, _>>()?;
let mut futures = subsets
.iter()
.map(|(i, chunk_indices, chunk_subset)| {
array.async_store_chunk_elements(
&chunk_indices,
vec![*i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
})
.collect::<FuturesUnordered<_>>();
while let Some(item) = futures.next().await {
item?;
}
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
array
.async_store_chunks_elements::<f32>(
&ArraySubset::new_with_ranges(&[1..2, 0..2]),
vec![
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
array
.async_store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[3..6, 3..6]),
vec![-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
array
.async_store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
array
.async_store_chunk_subset_elements::<f32>(
// chunk indices
&[1, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[3..4, 0..4]),
vec![-7.4, -7.5, -7.6, -7.7],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.async_erase_chunk(&[0, 0]).await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array
.async_retrieve_chunk_ndarray::<f32>(&chunk_indices)
.await?;
println!("async_retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.async_retrieve_chunks_ndarray::<f32>(&chunks).await?;
println!("async_retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array
.async_retrieve_array_subset_ndarray::<f32>(&subset)
.await?;
println!("async_retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::async_new(&*store, "/").await.unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
sourcepub fn new_with_start_shape(
start: ArrayIndices,
shape: ArrayShape
) -> Result<Self, IncompatibleDimensionalityError>
pub fn new_with_start_shape( start: ArrayIndices, shape: ArrayShape ) -> Result<Self, IncompatibleDimensionalityError>
Create a new array subset.
§Errors
Returns IncompatibleDimensionalityError
if the size of start
and size
do not match.
Examples found in repository?
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
fn sharded_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use zarrs::{
array::{
codec::{self, array_to_bytes::sharding::ShardingCodecBuilder},
DataType, FillValue,
},
array_subset::ArraySubset,
node::Node,
storage::store,
};
use rayon::prelude::{IntoParallelIterator, ParallelIterator};
use std::sync::Arc;
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new("tests/data/sharded_array_write_read.zarr")?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
// Create an array
let array_path = "/group/array";
let shard_shape = vec![4, 8];
let inner_chunk_shape = vec![4, 4];
let mut sharding_codec_builder =
ShardingCodecBuilder::new(inner_chunk_shape.as_slice().try_into()?);
sharding_codec_builder.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Box::new(codec::GzipCodec::new(5)?),
]);
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::UInt16,
shard_shape.try_into()?,
FillValue::from(0u16),
)
.array_to_bytes_codec(Box::new(sharding_codec_builder.build()))
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
// The array metadata is
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some shards (in parallel)
(0..2).into_par_iter().try_for_each(|s| {
let chunk_grid = array.chunk_grid();
let chunk_indices = vec![s, 0];
if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
let chunk_array = ndarray::ArrayD::<u16>::from_shape_fn(
chunk_shape
.iter()
.map(|u| u.get() as usize)
.collect::<Vec<_>>(),
|ij| {
(s * chunk_shape[0].get() * chunk_shape[1].get()
+ ij[0] as u64 * chunk_shape[1].get()
+ ij[1] as u64) as u16
},
);
array.store_chunk_ndarray(&chunk_indices, chunk_array)
} else {
Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
chunk_indices.to_vec(),
))
}
})?;
// Read the whole array
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec()); // the center 4x2 region
let data_all = array.retrieve_array_subset_ndarray::<u16>(&subset_all)?;
println!("The whole array is:\n{data_all}\n");
// Read a shard back from the store
let shard_indices = vec![1, 0];
let data_shard = array.retrieve_chunk_ndarray::<u16>(&shard_indices)?;
println!("Shard [1,0] is:\n{data_shard}\n");
// Read an inner chunk from the store
let subset_chunk_1_0 = ArraySubset::new_with_ranges(&[4..8, 0..4]);
let data_chunk = array.retrieve_array_subset_ndarray::<u16>(&subset_chunk_1_0)?;
println!("Chunk [1,0] is:\n{data_chunk}\n");
// Read the central 4x2 subset of the array
let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_4x2 = array.retrieve_array_subset_ndarray::<u16>(&subset_4x2)?;
println!("The middle 4x2 subset is:\n{data_4x2}\n");
// Decode inner chunks
// In some cases, it might be preferable to decode inner chunks in a shard directly.
// If using the partial decoder, then the shard index will only be read once from the store.
let partial_decoder = array.partial_decoder(&[0, 0])?;
let inner_chunks_to_decode = vec![
ArraySubset::new_with_start_shape(vec![0, 0], inner_chunk_shape.clone())?,
ArraySubset::new_with_start_shape(vec![0, 4], inner_chunk_shape.clone())?,
];
let decoded_inner_chunks_bytes = partial_decoder.partial_decode(&inner_chunks_to_decode)?;
let decoded_inner_chunks_ndarray = decoded_inner_chunks_bytes
.into_iter()
.map(|bytes| bytes_to_ndarray::<u16>(&inner_chunk_shape, bytes))
.collect::<Result<Vec<_>, _>>()?;
println!("Decoded inner chunks:");
for (inner_chunk_subset, decoded_inner_chunk) in
std::iter::zip(inner_chunks_to_decode, decoded_inner_chunks_ndarray)
{
println!("{inner_chunk_subset}\n{decoded_inner_chunk}\n");
}
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("The zarr hierarchy tree is:\n{}", tree);
println!(
"The keys in the store are:\n[{}]",
store.list().unwrap_or_default().iter().format(", ")
);
Ok(())
}
sourcepub unsafe fn new_with_start_shape_unchecked(
start: ArrayIndices,
shape: ArrayShape
) -> Self
pub unsafe fn new_with_start_shape_unchecked( start: ArrayIndices, shape: ArrayShape ) -> Self
sourcepub fn new_with_start_end_inc(
start: ArrayIndices,
end: ArrayIndices
) -> Result<Self, IncompatibleStartEndIndicesError>
pub fn new_with_start_end_inc( start: ArrayIndices, end: ArrayIndices ) -> Result<Self, IncompatibleStartEndIndicesError>
Create a new array subset from a start and end (inclusive).
§Errors
Returns IncompatibleStartEndIndicesError
if start
and end
are incompatible, such as if any element of end
is less than start
or they differ in length.
sourcepub unsafe fn new_with_start_end_inc_unchecked(
start: ArrayIndices,
end: ArrayIndices
) -> Self
pub unsafe fn new_with_start_end_inc_unchecked( start: ArrayIndices, end: ArrayIndices ) -> Self
Create a new array subset from a start and end (inclusive).
§Safety
The length of start
and end
must match.
sourcepub fn new_with_start_end_exc(
start: ArrayIndices,
end: ArrayIndices
) -> Result<Self, IncompatibleStartEndIndicesError>
pub fn new_with_start_end_exc( start: ArrayIndices, end: ArrayIndices ) -> Result<Self, IncompatibleStartEndIndicesError>
Create a new array subset from a start and end (exclusive).
§Errors
Returns IncompatibleStartEndIndicesError
if start
and end
are incompatible, such as if any element of end
is less than start
or they differ in length.
sourcepub unsafe fn new_with_start_end_exc_unchecked(
start: ArrayIndices,
end: ArrayIndices
) -> Self
pub unsafe fn new_with_start_end_exc_unchecked( start: ArrayIndices, end: ArrayIndices ) -> Self
Create a new array subset from a start and end (exclusive).
§Safety
The length of start
and end
must match.
sourcepub fn bound(
&self,
end: &[u64]
) -> Result<Self, IncompatibleDimensionalityError>
pub fn bound( &self, end: &[u64] ) -> Result<Self, IncompatibleDimensionalityError>
Bound the array subset to the domain within end
(exclusive).
§Errors
Returns an error if end
does not match the array subset dimensionality.
sourcepub unsafe fn bound_unchecked(&self, end: &[u64]) -> Self
pub unsafe fn bound_unchecked(&self, end: &[u64]) -> Self
Bound the array subset to the domain within end
(exclusive).
§Safety
The length of end
must match the array subset dimensionality.
sourcepub fn start(&self) -> &[u64]
pub fn start(&self) -> &[u64]
Return the start of the array subset.
Examples found in repository?
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_ndarray(
&chunk_indices,
ArrayD::<f32>::from_shape_vec(
chunk_subset.shape_usize(),
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
.unwrap(),
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
let ndarray_chunks: Array2<f32> = array![
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
];
array.store_chunks_ndarray(&ArraySubset::new_with_ranges(&[1..2, 0..2]), ndarray_chunks)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
let ndarray_subset: Array2<f32> =
array![[-3.3, -3.4, -3.5,], [-4.3, -4.4, -4.5,], [-5.3, -5.4, -5.5],];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[3..6, 3..6]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
let ndarray_subset: Array2<f32> = array![
[-0.6],
[-1.6],
[-2.6],
[-3.6],
[-4.6],
[-5.6],
[-6.6],
[-7.6],
];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[0..8, 6..7]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
let ndarray_chunk_subset: Array2<f32> = array![[-7.4, -7.5, -7.6, -7.7],];
array.store_chunk_subset_ndarray(
// chunk indices
&[1, 1],
// subset within chunk
ArraySubset::new_with_ranges(&[3..4, 0..4]).start(),
ndarray_chunk_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
sourcepub fn shape_usize(&self) -> Vec<usize>
pub fn shape_usize(&self) -> Vec<usize>
Examples found in repository?
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_ndarray(
&chunk_indices,
ArrayD::<f32>::from_shape_vec(
chunk_subset.shape_usize(),
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
.unwrap(),
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
let ndarray_chunks: Array2<f32> = array![
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
];
array.store_chunks_ndarray(&ArraySubset::new_with_ranges(&[1..2, 0..2]), ndarray_chunks)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
let ndarray_subset: Array2<f32> =
array![[-3.3, -3.4, -3.5,], [-4.3, -4.4, -4.5,], [-5.3, -5.4, -5.5],];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[3..6, 3..6]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
let ndarray_subset: Array2<f32> = array![
[-0.6],
[-1.6],
[-2.6],
[-3.6],
[-4.6],
[-5.6],
[-6.6],
[-7.6],
];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[0..8, 6..7]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
let ndarray_chunk_subset: Array2<f32> = array![[-7.4, -7.5, -7.6, -7.7],];
array.store_chunk_subset_ndarray(
// chunk indices
&[1, 1],
// subset within chunk
ArraySubset::new_with_ranges(&[3..4, 0..4]).start(),
ndarray_chunk_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
sourcepub fn is_empty(&self) -> bool
pub fn is_empty(&self) -> bool
Returns if the array subset is empty (i.e. has a zero element in its shape).
sourcepub fn dimensionality(&self) -> usize
pub fn dimensionality(&self) -> usize
Return the dimensionality of the array subset.
sourcepub fn end_inc(&self) -> Option<ArrayIndices>
pub fn end_inc(&self) -> Option<ArrayIndices>
Return the end (inclusive) of the array subset.
Returns None
if the array subset is empty.
sourcepub fn end_exc(&self) -> ArrayIndices
pub fn end_exc(&self) -> ArrayIndices
Return the end (exclusive) of the array subset.
sourcepub fn num_elements(&self) -> u64
pub fn num_elements(&self) -> u64
Return the number of elements of the array subset.
Equal to the product of the components of its shape.
Examples found in repository?
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_elements(
&chunk_indices,
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
array.store_chunks_elements::<f32>(
&ArraySubset::new_with_ranges(&[1..2, 0..2]),
vec![
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[3..6, 3..6]),
vec![-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
array.store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
array.store_chunk_subset_elements::<f32>(
// chunk indices
&[1, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[3..4, 0..4]),
vec![-7.4, -7.5, -7.6, -7.7],
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
More examples
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178
fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::FilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.store_metadata()?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata()).unwrap()
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.store_metadata()?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata()).unwrap()
);
// Write some chunks
(0..2).into_par_iter().try_for_each(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
let chunk_subset = array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})?;
array.store_chunk_ndarray(
&chunk_indices,
ArrayD::<f32>::from_shape_vec(
chunk_subset.shape_usize(),
vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
.unwrap(),
)
})?;
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
let ndarray_chunks: Array2<f32> = array![
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
[1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
];
array.store_chunks_ndarray(&ArraySubset::new_with_ranges(&[1..2, 0..2]), ndarray_chunks)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
let ndarray_subset: Array2<f32> =
array![[-3.3, -3.4, -3.5,], [-4.3, -4.4, -4.5,], [-5.3, -5.4, -5.5],];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[3..6, 3..6]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
let ndarray_subset: Array2<f32> = array![
[-0.6],
[-1.6],
[-2.6],
[-3.6],
[-4.6],
[-5.6],
[-6.6],
[-7.6],
];
array.store_array_subset_ndarray(
ArraySubset::new_with_ranges(&[0..8, 6..7]).start(),
ndarray_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
let ndarray_chunk_subset: Array2<f32> = array![[-7.4, -7.5, -7.6, -7.7],];
array.store_chunk_subset_ndarray(
// chunk indices
&[1, 1],
// subset within chunk
ArraySubset::new_with_ranges(&[3..4, 0..4]).start(),
ndarray_chunk_subset,
)?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.erase_chunk(&[0, 0])?;
let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::new(&*store, "/").unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
async fn async_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
use futures::{stream::FuturesUnordered, StreamExt};
use std::sync::Arc;
use zarrs::{
array::{DataType, FillValue, ZARR_NAN_F32},
array_subset::ArraySubset,
node::Node,
storage::store,
};
// Create a store
// let path = tempfile::TempDir::new()?;
// let mut store: ReadableWritableListableStorage = Arc::new(store::AsyncFilesystemStore::new(path.path())?);
// let mut store: ReadableWritableListableStorage = Arc::new(store::AsyncFilesystemStore::new(
// "tests/data/array_write_read.zarr",
// )?);
let mut store: AsyncReadableWritableListableStorage = Arc::new(store::AsyncObjectStore::new(
object_store::memory::InMemory::new(),
));
if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
if arg1 == "--usage-log" {
let log_writer = Arc::new(std::sync::Mutex::new(
// std::io::BufWriter::new(
std::io::stdout(),
// )
));
let usage_log = Arc::new(UsageLogStorageTransformer::new(log_writer, || {
chrono::Utc::now().format("[%T%.3f] ").to_string()
}));
store = usage_log
.clone()
.create_async_readable_writable_listable_transformer(store);
}
}
// Create a group
let group_path = "/group";
let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
// Update group metadata
group
.attributes_mut()
.insert("foo".into(), serde_json::Value::String("bar".into()));
// Write group metadata to store
group.async_store_metadata().await?;
println!(
"The group metadata is:\n{}\n",
serde_json::to_string_pretty(&group.metadata())?
);
// Create an array
let array_path = "/group/array";
let array = zarrs::array::ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape
FillValue::from(ZARR_NAN_F32),
)
// .bytes_to_bytes_codecs(vec![]) // uncompressed
.dimension_names(["y", "x"].into())
// .storage_transformers(vec![].into())
.build(store.clone(), array_path)?;
// Write array metadata to store
array.async_store_metadata().await?;
println!(
"The array metadata is:\n{}\n",
serde_json::to_string_pretty(&array.metadata())?
);
// Write some chunks
let subsets = (0..2)
.map(|i| {
let chunk_indices: Vec<u64> = vec![0, i];
array
.chunk_grid()
.subset(&chunk_indices, array.shape())?
.ok_or_else(|| {
zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
})
.map(|chunk_subset| (i, chunk_indices, chunk_subset))
})
.collect::<Result<Vec<_>, _>>()?;
let mut futures = subsets
.iter()
.map(|(i, chunk_indices, chunk_subset)| {
array.async_store_chunk_elements(
&chunk_indices,
vec![*i as f32 * 0.1; chunk_subset.num_elements() as usize],
)
})
.collect::<FuturesUnordered<_>>();
while let Some(item) = futures.next().await {
item?;
}
let subset_all = ArraySubset::new_with_shape(array.shape().to_vec());
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
// Store multiple chunks
array
.async_store_chunks_elements::<f32>(
&ArraySubset::new_with_ranges(&[1..2, 0..2]),
vec![
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
//
1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
// Write a subset spanning multiple chunks, including updating chunks already written
array
.async_store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[3..6, 3..6]),
vec![-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
// Store array subset
array
.async_store_array_subset_elements::<f32>(
&ArraySubset::new_with_ranges(&[0..8, 6..7]),
vec![-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
// Store chunk subset
array
.async_store_chunk_subset_elements::<f32>(
// chunk indices
&[1, 1],
// subset within chunk
&ArraySubset::new_with_ranges(&[3..4, 0..4]),
vec![-7.4, -7.5, -7.6, -7.7],
)
.await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
// Erase a chunk
array.async_erase_chunk(&[0, 0]).await?;
let data_all = array
.async_retrieve_array_subset_ndarray::<f32>(&subset_all)
.await?;
println!("async_erase_chunk [0, 0]:\n{data_all:+4.1}\n");
// Read a chunk
let chunk_indices = vec![0, 1];
let data_chunk = array
.async_retrieve_chunk_ndarray::<f32>(&chunk_indices)
.await?;
println!("async_retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
// Read chunks
let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
let data_chunks = array.async_retrieve_chunks_ndarray::<f32>(&chunks).await?;
println!("async_retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
// Retrieve an array subset
let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
let data_subset = array
.async_retrieve_array_subset_ndarray::<f32>(&subset)
.await?;
println!("async_retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
// Show the hierarchy
let node = Node::async_new(&*store, "/").await.unwrap();
let tree = node.hierarchy_tree();
println!("hierarchy_tree:\n{}", tree);
Ok(())
}
sourcepub fn num_elements_usize(&self) -> usize
pub fn num_elements_usize(&self) -> usize
Return the number of elements of the array subset as a usize
.
§Panics
Panics if num_elements()
is greater than usize::MAX
.
sourcepub fn byte_ranges(
&self,
array_shape: &[u64],
element_size: usize
) -> Result<Vec<ByteRange>, IncompatibleArraySubsetAndShapeError>
pub fn byte_ranges( &self, array_shape: &[u64], element_size: usize ) -> Result<Vec<ByteRange>, IncompatibleArraySubsetAndShapeError>
Return the byte ranges of an array subset in an array with array_shape
and element_size
.
§Errors
Returns IncompatibleArraySubsetAndShapeError
if the array_shape
does not encapsulate this array subset.
sourcepub unsafe fn byte_ranges_unchecked(
&self,
array_shape: &[u64],
element_size: usize
) -> Vec<ByteRange>
pub unsafe fn byte_ranges_unchecked( &self, array_shape: &[u64], element_size: usize ) -> Vec<ByteRange>
Return the byte ranges of an array subset in an array with array_shape
and element_size
.
§Safety
The length of array_shape
must match the dimensionality of array_subset
.
sourcepub fn extract_bytes(
&self,
bytes: &[u8],
array_shape: &[u64],
element_size: usize
) -> Result<Vec<u8>, ArrayExtractBytesError>
pub fn extract_bytes( &self, bytes: &[u8], array_shape: &[u64], element_size: usize ) -> Result<Vec<u8>, ArrayExtractBytesError>
Return the bytes in this array subset from an array with shape array_shape
and element_size
.
§Errors
Returns ArrayExtractBytesError
if the length of array_shape
does not match the array subset dimensionality or the array subset is outside of the bounds of array_shape
.
§Panics
Panics if attempting to access a byte index beyond usize::MAX
.
sourcepub unsafe fn extract_bytes_unchecked(
&self,
bytes: &[u8],
array_shape: &[u64],
element_size: usize
) -> Vec<u8> ⓘ
pub unsafe fn extract_bytes_unchecked( &self, bytes: &[u8], array_shape: &[u64], element_size: usize ) -> Vec<u8> ⓘ
Return the bytes in this array subset from an array with shape array_shape
and element_size
.
§Safety
The length of array_shape
must match the array subset dimensionality and the array subset must be within the bounds of array_shape
.
§Panics
Panics if attempting to reference a byte beyond usize::MAX
.
sourcepub fn extract_elements<T: Copy>(
&self,
elements: &[T],
array_shape: &[u64]
) -> Result<Vec<T>, IncompatibleArraySubsetAndShapeError>
pub fn extract_elements<T: Copy>( &self, elements: &[T], array_shape: &[u64] ) -> Result<Vec<T>, IncompatibleArraySubsetAndShapeError>
Return the elements in this array subset from an array with shape array_shape
.
§Errors
Returns ArrayExtractBytesError
if the length of array_shape
does not match the array subset dimensionality or the array subset is outside of the bounds of array_shape
.
§Panics
Panics if attempting to access a byte index beyond usize::MAX
.
sourcepub unsafe fn extract_elements_unchecked<T: Copy>(
&self,
elements: &[T],
array_shape: &[u64]
) -> Vec<T>
pub unsafe fn extract_elements_unchecked<T: Copy>( &self, elements: &[T], array_shape: &[u64] ) -> Vec<T>
sourcepub fn store_bytes(
&self,
bytes_subset: &[u8],
bytes_array: &mut [u8],
array_shape: &[u64],
element_size: usize
) -> Result<(), ArrayStoreBytesError>
pub fn store_bytes( &self, bytes_subset: &[u8], bytes_array: &mut [u8], array_shape: &[u64], element_size: usize ) -> Result<(), ArrayStoreBytesError>
Store bytes_subset
corresponding to the bytes of an array (array_bytes
) with shape array_shape
and element_size
.
§Errors
Returns ArrayStoreBytesError
if:
- the length of
array_shape
does not match the array subset dimensionality or the array subset is outside of the bounds ofarray_shape
. - the length of
bytes_array
is not compatible with thearray_shape
andelement size
, or - the length of
bytes_subset
is not compatible with the shape of this subset andelement_size
.
§Panics
Panics if attempting to reference a byte beyond usize::MAX
.
sourcepub unsafe fn store_bytes_unchecked(
&self,
bytes_subset: &[u8],
bytes_array: &mut [u8],
array_shape: &[u64],
element_size: usize
)
pub unsafe fn store_bytes_unchecked( &self, bytes_subset: &[u8], bytes_array: &mut [u8], array_shape: &[u64], element_size: usize )
Store bytes_subset
corresponding to the bytes of an array (array_bytes
) with shape array_shape
and element_size
.
§Safety
The length of array_shape
must match the array subset dimensionality and the array subset must be within the bounds of array_shape
.
The length of bytes_array
must match the product of the array_shape
components and element_size
.
The length of bytes_subset
must match the product of the array subset shape components and element_size
.
§Panics
Panics if attempting to reference a byte beyond usize::MAX
.
sourcepub fn indices(&self) -> Indices
pub fn indices(&self) -> Indices
Returns an iterator over the indices of elements within the subset.
sourcepub fn linearised_indices(
&self,
array_shape: &[u64]
) -> Result<LinearisedIndices, IncompatibleArraySubsetAndShapeError>
pub fn linearised_indices( &self, array_shape: &[u64] ) -> Result<LinearisedIndices, IncompatibleArraySubsetAndShapeError>
Returns an iterator over the linearised indices of elements within the subset.
§Errors
Returns IncompatibleArraySubsetAndShapeError
if the array_shape
does not encapsulate this array subset.
sourcepub unsafe fn linearised_indices_unchecked(
&self,
array_shape: &[u64]
) -> LinearisedIndices
pub unsafe fn linearised_indices_unchecked( &self, array_shape: &[u64] ) -> LinearisedIndices
Returns an iterator over the indices of elements within the subset.
§Safety
array_shape
must match the dimensionality and encapsulate this array subset.
sourcepub fn contiguous_indices(
&self,
array_shape: &[u64]
) -> Result<ContiguousIndices, IncompatibleArraySubsetAndShapeError>
pub fn contiguous_indices( &self, array_shape: &[u64] ) -> Result<ContiguousIndices, IncompatibleArraySubsetAndShapeError>
Returns an iterator over the indices of contiguous elements within the subset.
§Errors
Returns IncompatibleArraySubsetAndShapeError
if the array_shape
does not encapsulate this array subset.
sourcepub unsafe fn contiguous_indices_unchecked(
&self,
array_shape: &[u64]
) -> ContiguousIndices
pub unsafe fn contiguous_indices_unchecked( &self, array_shape: &[u64] ) -> ContiguousIndices
Returns an iterator over the indices of contiguous elements within the subset.
§Safety
The length of array_shape
must match the array subset dimensionality.
sourcepub fn contiguous_linearised_indices(
&self,
array_shape: &[u64]
) -> Result<ContiguousLinearisedIndices, IncompatibleArraySubsetAndShapeError>
pub fn contiguous_linearised_indices( &self, array_shape: &[u64] ) -> Result<ContiguousLinearisedIndices, IncompatibleArraySubsetAndShapeError>
Returns an iterator over the linearised indices of contiguous elements within the subset.
§Errors
Returns IncompatibleArraySubsetAndShapeError
if the array_shape
does not encapsulate this array subset.
sourcepub unsafe fn contiguous_linearised_indices_unchecked(
&self,
array_shape: &[u64]
) -> ContiguousLinearisedIndices
pub unsafe fn contiguous_linearised_indices_unchecked( &self, array_shape: &[u64] ) -> ContiguousLinearisedIndices
Returns an iterator over the linearised indices of contiguous elements within the subset.
§Safety
The length of array_shape
must match the array subset dimensionality.
sourcepub fn chunks(
&self,
chunk_shape: &[NonZeroU64]
) -> Result<Chunks, IncompatibleDimensionalityError>
pub fn chunks( &self, chunk_shape: &[NonZeroU64] ) -> Result<Chunks, IncompatibleDimensionalityError>
Returns the Chunks
with chunk_shape
in the array subset which can be iterated over.
All chunks overlapping the array subset are returned, and they all have the same shape chunk_shape
.
Thus, the subsets of the chunks may extend out over the subset.
§Errors
Returns an error if chunk_shape
does not match the array subset dimensionality.
sourcepub unsafe fn chunks_unchecked(&self, chunk_shape: &[NonZeroU64]) -> Chunks
pub unsafe fn chunks_unchecked(&self, chunk_shape: &[NonZeroU64]) -> Chunks
Returns the Chunks
with chunk_shape
in the array subset which can be iterated over.
All chunks overlapping the array subset are returned, and they all have the same shape chunk_shape
.
Thus, the subsets of the chunks may extend out over the subset.
§Safety
The length of chunk_shape
must match the array subset dimensionality.
sourcepub fn overlap(
&self,
subset_other: &Self
) -> Result<Self, IncompatibleDimensionalityError>
pub fn overlap( &self, subset_other: &Self ) -> Result<Self, IncompatibleDimensionalityError>
Return the overlapping subset between this array subset and subset_other
.
§Errors
Returns IncompatibleDimensionalityError
if the dimensionality of subset_other
does not match the dimensionality of this array subset.
sourcepub unsafe fn overlap_unchecked(&self, subset_other: &Self) -> Self
pub unsafe fn overlap_unchecked(&self, subset_other: &Self) -> Self
Return the overlapping subset between this array subset and subset_other
.
§Safety
Panics if the dimensionality of subset_other
does not match the dimensionality of this array subset.
sourcepub fn relative_to(
&self,
start: &[u64]
) -> Result<Self, IncompatibleDimensionalityError>
pub fn relative_to( &self, start: &[u64] ) -> Result<Self, IncompatibleDimensionalityError>
Return the subset relative to start
.
Creates an array subset starting at ArraySubset::start()
- start
.
§Errors
Returns IncompatibleDimensionalityError
if the length of start
does not match the dimensionality of this array subset.
sourcepub unsafe fn relative_to_unchecked(&self, start: &[u64]) -> Self
pub unsafe fn relative_to_unchecked(&self, start: &[u64]) -> Self
Return the subset relative to start
.
Creates an array subset starting at ArraySubset::start()
- start
.
§Safety
Panics if the length of start
does not match the dimensionality of this array subset.
Trait Implementations§
source§impl Clone for ArraySubset
impl Clone for ArraySubset
source§fn clone(&self) -> ArraySubset
fn clone(&self) -> ArraySubset
1.0.0 · source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source
. Read moresource§impl Debug for ArraySubset
impl Debug for ArraySubset
source§impl Default for ArraySubset
impl Default for ArraySubset
source§fn default() -> ArraySubset
fn default() -> ArraySubset
source§impl Display for ArraySubset
impl Display for ArraySubset
source§impl Hash for ArraySubset
impl Hash for ArraySubset
source§impl Ord for ArraySubset
impl Ord for ArraySubset
source§fn cmp(&self, other: &ArraySubset) -> Ordering
fn cmp(&self, other: &ArraySubset) -> Ordering
1.21.0 · source§fn max(self, other: Self) -> Selfwhere
Self: Sized,
fn max(self, other: Self) -> Selfwhere
Self: Sized,
source§impl PartialEq for ArraySubset
impl PartialEq for ArraySubset
source§fn eq(&self, other: &ArraySubset) -> bool
fn eq(&self, other: &ArraySubset) -> bool
self
and other
values to be equal, and is used
by ==
.source§impl PartialOrd for ArraySubset
impl PartialOrd for ArraySubset
source§fn partial_cmp(&self, other: &ArraySubset) -> Option<Ordering>
fn partial_cmp(&self, other: &ArraySubset) -> Option<Ordering>
1.0.0 · source§fn le(&self, other: &Rhs) -> bool
fn le(&self, other: &Rhs) -> bool
self
and other
) and is used by the <=
operator. Read moreimpl Eq for ArraySubset
impl StructuralPartialEq for ArraySubset
Auto Trait Implementations§
impl Freeze for ArraySubset
impl RefUnwindSafe for ArraySubset
impl Send for ArraySubset
impl Sync for ArraySubset
impl Unpin for ArraySubset
impl UnwindSafe for ArraySubset
Blanket Implementations§
source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
source§impl<Q, K> Comparable<K> for Q
impl<Q, K> Comparable<K> for Q
source§impl<Q, K> Equivalent<K> for Q
impl<Q, K> Equivalent<K> for Q
source§impl<Q, K> Equivalent<K> for Q
impl<Q, K> Equivalent<K> for Q
source§fn equivalent(&self, key: &K) -> bool
fn equivalent(&self, key: &K) -> bool
key
and return true
if they are equal.