pub struct ArrayBuilder {
pub shape: ArrayShape,
pub data_type: DataType,
pub chunk_grid: ChunkGrid,
pub chunk_key_encoding: ChunkKeyEncoding,
pub fill_value: FillValue,
pub array_to_array_codecs: Vec<NamedArrayToArrayCodec>,
pub array_to_bytes_codec: NamedArrayToBytesCodec,
pub bytes_to_bytes_codecs: Vec<NamedBytesToBytesCodec>,
pub storage_transformers: StorageTransformerChain,
pub attributes: Map<String, Value>,
pub dimension_names: Option<Vec<DimensionName>>,
pub additional_fields: AdditionalFieldsV3,
}
Expand description
An Array
builder.
The array builder is initialised from an array shape, data type, chunk grid, and fill value.
- The only codec enabled by default is
bytes
(with native endian encoding), so the output is uncompressed. - The default chunk key encoding is
default
with the/
chunk key separator. - Attributes, storage transformers, and dimension names are empty.
- Codecs are configured to use multiple threads where possible.
Use the methods in the array builder to change the configuration away from these defaults, and then build the array at a path of some storage with ArrayBuilder::build
.
Note that build
does not modify the store; the array metadata has to be explicitly written with Array::store_metadata
.
For example:
use zarrs::array::{ArrayBuilder, DataType, FillValue, ZARR_NAN_F32};
let mut array = ArrayBuilder::new(
vec![8, 8], // array shape
DataType::Float32,
vec![4, 4].try_into()?, // regular chunk shape (elements must be non-zero)
FillValue::from(ZARR_NAN_F32),
)
.bytes_to_bytes_codecs(vec![
#[cfg(feature = "gzip")]
Arc::new(zarrs::array::codec::GzipCodec::new(5)?),
])
.dimension_names(["y", "x"].into())
.build(store.clone(), "/group/array")?;
array.store_metadata()?; // write metadata to the store
// array.store_chunk(...)
// array.store_array_subset(...)
array.set_shape(vec![16, 16]); // revise the shape if needed
array.store_metadata()?; // update stored metadata
Fields§
§shape: ArrayShape
Array shape.
data_type: DataType
Data type.
chunk_grid: ChunkGrid
Chunk grid.
chunk_key_encoding: ChunkKeyEncoding
Chunk key encoding.
fill_value: FillValue
Fill value.
array_to_array_codecs: Vec<NamedArrayToArrayCodec>
Array to array codecs.
array_to_bytes_codec: NamedArrayToBytesCodec
Array to bytes codec.
bytes_to_bytes_codecs: Vec<NamedBytesToBytesCodec>
Bytes to bytes codecs.
storage_transformers: StorageTransformerChain
Storage transformer chain.
attributes: Map<String, Value>
Attributes.
dimension_names: Option<Vec<DimensionName>>
Dimension names.
additional_fields: AdditionalFieldsV3
Additional fields.
Implementations§
Source§impl ArrayBuilder
impl ArrayBuilder
Sourcepub fn new(
shape: ArrayShape,
data_type: DataType,
chunk_grid: ChunkGrid,
fill_value: FillValue,
) -> Self
pub fn new( shape: ArrayShape, data_type: DataType, chunk_grid: ChunkGrid, fill_value: FillValue, ) -> Self
Create a new array builder for an array at path
.
The length of the array shape must match the dimensionality of the intended array, but it can be all zeros on initialisation.
The shape of the Array
can be be updated as required.
Examples found in repository?
153fn main() {
154 let store = std::sync::Arc::new(MemoryStore::default());
155 let array_path = "/array";
156 let array = ArrayBuilder::new(
157 vec![4, 1], // array shape
158 DataType::Extension(Arc::new(CustomDataTypeVariableSize)),
159 vec![3, 1].try_into().unwrap(), // regular chunk shape
160 FillValue::from(vec![]),
161 )
162 .array_to_array_codecs(vec![
163 #[cfg(feature = "transpose")]
164 Arc::new(zarrs::array::codec::TransposeCodec::new(
165 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
166 )),
167 ])
168 .bytes_to_bytes_codecs(vec![
169 #[cfg(feature = "gzip")]
170 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
171 #[cfg(feature = "crc32c")]
172 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
173 ])
174 // .storage_transformers(vec![].into())
175 .build(store, array_path)
176 .unwrap();
177 println!("{}", array.metadata().to_string_pretty());
178
179 let data = [
180 CustomDataTypeVariableSizeElement::from(Some(1.0)),
181 CustomDataTypeVariableSizeElement::from(None),
182 CustomDataTypeVariableSizeElement::from(Some(3.0)),
183 ];
184 array.store_chunk_elements(&[0, 0], &data).unwrap();
185
186 let data = array
187 .retrieve_array_subset_elements::<CustomDataTypeVariableSizeElement>(&array.subset_all())
188 .unwrap();
189
190 assert_eq!(data[0], CustomDataTypeVariableSizeElement::from(Some(1.0)));
191 assert_eq!(data[1], CustomDataTypeVariableSizeElement::from(None));
192 assert_eq!(data[2], CustomDataTypeVariableSizeElement::from(Some(3.0)));
193 assert_eq!(data[3], CustomDataTypeVariableSizeElement::from(None));
194
195 println!("{data:#?}");
196}
More examples
269fn main() {
270 let store = std::sync::Arc::new(MemoryStore::default());
271 let array_path = "/array";
272 let fill_value = CustomDataTypeFixedSizeElement { x: 1, y: 2.3 };
273 let array = ArrayBuilder::new(
274 vec![4, 1], // array shape
275 DataType::Extension(Arc::new(CustomDataTypeFixedSize)),
276 vec![2, 1].try_into().unwrap(), // regular chunk shape
277 FillValue::new(fill_value.to_ne_bytes().to_vec()),
278 )
279 .array_to_array_codecs(vec![
280 #[cfg(feature = "transpose")]
281 Arc::new(zarrs::array::codec::TransposeCodec::new(
282 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
283 )),
284 ])
285 .bytes_to_bytes_codecs(vec![
286 #[cfg(feature = "gzip")]
287 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
288 #[cfg(feature = "crc32c")]
289 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
290 ])
291 // .storage_transformers(vec![].into())
292 .build(store, array_path)
293 .unwrap();
294 println!("{}", array.metadata().to_string_pretty());
295
296 let data = [
297 CustomDataTypeFixedSizeElement { x: 3, y: 4.5 },
298 CustomDataTypeFixedSizeElement { x: 6, y: 7.8 },
299 ];
300 array.store_chunk_elements(&[0, 0], &data).unwrap();
301
302 let data = array
303 .retrieve_array_subset_elements::<CustomDataTypeFixedSizeElement>(&array.subset_all())
304 .unwrap();
305
306 assert_eq!(data[0], CustomDataTypeFixedSizeElement { x: 3, y: 4.5 });
307 assert_eq!(data[1], CustomDataTypeFixedSizeElement { x: 6, y: 7.8 });
308 assert_eq!(data[2], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
309 assert_eq!(data[3], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
310
311 println!("{data:#?}");
312}
205fn main() {
206 let store = std::sync::Arc::new(MemoryStore::default());
207 let array_path = "/array";
208 let fill_value = CustomDataTypeUInt12Element::try_from(15).unwrap();
209 let array = ArrayBuilder::new(
210 vec![4096, 1], // array shape
211 DataType::Extension(Arc::new(CustomDataTypeUInt12)),
212 vec![5, 1].try_into().unwrap(), // regular chunk shape
213 FillValue::new(fill_value.to_le_bytes().to_vec()),
214 )
215 .array_to_array_codecs(vec![
216 #[cfg(feature = "transpose")]
217 Arc::new(zarrs::array::codec::TransposeCodec::new(
218 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
219 )),
220 ])
221 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
222 .bytes_to_bytes_codecs(vec![
223 #[cfg(feature = "gzip")]
224 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
225 #[cfg(feature = "crc32c")]
226 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
227 ])
228 // .storage_transformers(vec![].into())
229 .build(store, array_path)
230 .unwrap();
231 println!("{}", array.metadata().to_string_pretty());
232
233 let data: Vec<CustomDataTypeUInt12Element> = (0..4096)
234 .into_iter()
235 .map(|i| CustomDataTypeUInt12Element::try_from(i).unwrap())
236 .collect();
237
238 array
239 .store_array_subset_elements(&array.subset_all(), &data)
240 .unwrap();
241
242 let data = array
243 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(&array.subset_all())
244 .unwrap();
245
246 for i in 0usize..4096 {
247 let element = CustomDataTypeUInt12Element::try_from(i as u64).unwrap();
248 assert_eq!(data[i], element);
249 let element_pd = array
250 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(
251 &ArraySubset::new_with_ranges(&[(i as u64)..i as u64 + 1, 0..1]),
252 )
253 .unwrap()[0];
254 assert_eq!(element_pd, element);
255 }
256}
217fn main() {
218 let store = std::sync::Arc::new(MemoryStore::default());
219 let array_path = "/array";
220 let fill_value = CustomDataTypeFloat8e3m4Element::from(1.23);
221 let array = ArrayBuilder::new(
222 vec![6, 1], // array shape
223 DataType::Extension(Arc::new(CustomDataTypeFloat8e3m4)),
224 vec![5, 1].try_into().unwrap(), // regular chunk shape
225 FillValue::new(fill_value.to_ne_bytes().to_vec()),
226 )
227 .array_to_array_codecs(vec![
228 #[cfg(feature = "transpose")]
229 Arc::new(zarrs::array::codec::TransposeCodec::new(
230 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
231 )),
232 ])
233 .bytes_to_bytes_codecs(vec![
234 #[cfg(feature = "gzip")]
235 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
236 #[cfg(feature = "crc32c")]
237 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
238 ])
239 // .storage_transformers(vec![].into())
240 .build(store, array_path)
241 .unwrap();
242 println!("{}", array.metadata().to_string_pretty());
243
244 let data = [
245 CustomDataTypeFloat8e3m4Element::from(2.34),
246 CustomDataTypeFloat8e3m4Element::from(3.45),
247 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY),
248 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY),
249 CustomDataTypeFloat8e3m4Element::from(f32::NAN),
250 ];
251 array.store_chunk_elements(&[0, 0], &data).unwrap();
252
253 let data = array
254 .retrieve_array_subset_elements::<CustomDataTypeFloat8e3m4Element>(&array.subset_all())
255 .unwrap();
256
257 for f in &data {
258 println!(
259 "float8_e3m4: {:08b} f32: {}",
260 f.to_ne_bytes()[0],
261 f.as_f32()
262 );
263 }
264
265 assert_eq!(data[0], CustomDataTypeFloat8e3m4Element::from(2.34));
266 assert_eq!(data[1], CustomDataTypeFloat8e3m4Element::from(3.45));
267 assert_eq!(
268 data[2],
269 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY)
270 );
271 assert_eq!(
272 data[3],
273 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY)
274 );
275 assert_eq!(data[4], CustomDataTypeFloat8e3m4Element::from(f32::NAN));
276 assert_eq!(data[5], CustomDataTypeFloat8e3m4Element::from(1.23));
277}
203fn main() {
204 let store = std::sync::Arc::new(MemoryStore::default());
205 let array_path = "/array";
206 let fill_value = CustomDataTypeUInt4Element::try_from(15).unwrap();
207 let array = ArrayBuilder::new(
208 vec![6, 1], // array shape
209 DataType::Extension(Arc::new(CustomDataTypeUInt4)),
210 vec![5, 1].try_into().unwrap(), // regular chunk shape
211 FillValue::new(fill_value.to_ne_bytes().to_vec()),
212 )
213 .array_to_array_codecs(vec![
214 #[cfg(feature = "transpose")]
215 Arc::new(zarrs::array::codec::TransposeCodec::new(
216 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
217 )),
218 ])
219 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
220 .bytes_to_bytes_codecs(vec![
221 #[cfg(feature = "gzip")]
222 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
223 #[cfg(feature = "crc32c")]
224 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
225 ])
226 // .storage_transformers(vec![].into())
227 .build(store, array_path)
228 .unwrap();
229 println!("{}", array.metadata().to_string_pretty());
230
231 let data = [
232 CustomDataTypeUInt4Element::try_from(1).unwrap(),
233 CustomDataTypeUInt4Element::try_from(2).unwrap(),
234 CustomDataTypeUInt4Element::try_from(3).unwrap(),
235 CustomDataTypeUInt4Element::try_from(4).unwrap(),
236 CustomDataTypeUInt4Element::try_from(5).unwrap(),
237 ];
238 array.store_chunk_elements(&[0, 0], &data).unwrap();
239
240 let data = array
241 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(&array.subset_all())
242 .unwrap();
243
244 for f in &data {
245 println!("uint4: {:08b} u8: {}", f.as_u8(), f.as_u8());
246 }
247
248 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(1).unwrap());
249 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(2).unwrap());
250 assert_eq!(data[2], CustomDataTypeUInt4Element::try_from(3).unwrap());
251 assert_eq!(data[3], CustomDataTypeUInt4Element::try_from(4).unwrap());
252 assert_eq!(data[4], CustomDataTypeUInt4Element::try_from(5).unwrap());
253 assert_eq!(data[5], CustomDataTypeUInt4Element::try_from(15).unwrap());
254
255 let data = array
256 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(
257 &ArraySubset::new_with_ranges(&[1..3, 0..1]),
258 )
259 .unwrap();
260 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(2).unwrap());
261 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(3).unwrap());
262}
10fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
11 use std::sync::Arc;
12 use zarrs::{
13 array::{DataType, FillValue},
14 array_subset::ArraySubset,
15 storage::store,
16 };
17
18 // Create a store
19 // let path = tempfile::TempDir::new()?;
20 // let mut store: ReadableWritableListableStorage =
21 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
22 // let mut store: ReadableWritableListableStorage = Arc::new(
23 // zarrs::filesystem::FilesystemStore::new("zarrs/tests/data/array_write_read.zarr")?,
24 // );
25 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
26 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
27 if arg1 == "--usage-log" {
28 let log_writer = Arc::new(std::sync::Mutex::new(
29 // std::io::BufWriter::new(
30 std::io::stdout(),
31 // )
32 ));
33 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
34 chrono::Utc::now().format("[%T%.3f] ").to_string()
35 }));
36 }
37 }
38
39 // Create the root group
40 zarrs::group::GroupBuilder::new()
41 .build(store.clone(), "/")?
42 .store_metadata()?;
43
44 // Create a group with attributes
45 let group_path = "/group";
46 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
47 group
48 .attributes_mut()
49 .insert("foo".into(), serde_json::Value::String("bar".into()));
50 group.store_metadata()?;
51
52 println!(
53 "The group metadata is:\n{}\n",
54 group.metadata().to_string_pretty()
55 );
56
57 // Create an array
58 let array_path = "/group/array";
59 let array = zarrs::array::ArrayBuilder::new(
60 vec![4, 4], // array shape
61 DataType::String,
62 vec![2, 2].try_into()?, // regular chunk shape
63 FillValue::from("_"),
64 )
65 // .bytes_to_bytes_codecs(vec![]) // uncompressed
66 .dimension_names(["y", "x"].into())
67 // .storage_transformers(vec![].into())
68 .build(store.clone(), array_path)?;
69
70 // Write array metadata to store
71 array.store_metadata()?;
72
73 println!(
74 "The array metadata is:\n{}\n",
75 array.metadata().to_string_pretty()
76 );
77
78 // Write some chunks
79 array.store_chunk_ndarray(
80 &[0, 0],
81 ArrayD::<&str>::from_shape_vec(vec![2, 2], vec!["a", "bb", "ccc", "dddd"]).unwrap(),
82 )?;
83 array.store_chunk_ndarray(
84 &[0, 1],
85 ArrayD::<&str>::from_shape_vec(vec![2, 2], vec!["4444", "333", "22", "1"]).unwrap(),
86 )?;
87 let subset_all = array.subset_all();
88 let data_all = array.retrieve_array_subset_ndarray::<String>(&subset_all)?;
89 println!("store_chunk [0, 0] and [0, 1]:\n{data_all}\n");
90
91 // Write a subset spanning multiple chunks, including updating chunks already written
92 let ndarray_subset: Array2<&str> = array![["!", "@@"], ["###", "$$$$"]];
93 array.store_array_subset_ndarray(
94 ArraySubset::new_with_ranges(&[1..3, 1..3]).start(),
95 ndarray_subset,
96 )?;
97 let data_all = array.retrieve_array_subset_ndarray::<String>(&subset_all)?;
98 println!("store_array_subset [1..3, 1..3]:\nndarray::ArrayD<String>\n{data_all}");
99
100 // Retrieve bytes directly, convert into a single string allocation, create a &str ndarray
101 // TODO: Add a convenience function for this?
102 let data_all = array.retrieve_array_subset(&subset_all)?;
103 let (bytes, offsets) = data_all.into_variable()?;
104 let string = String::from_utf8(bytes.into_owned())?;
105 let elements = offsets
106 .iter()
107 .tuple_windows()
108 .map(|(&curr, &next)| &string[curr..next])
109 .collect::<Vec<&str>>();
110 let ndarray = ArrayD::<&str>::from_shape_vec(subset_all.shape_usize(), elements)?;
111 println!("ndarray::ArrayD<&str>:\n{ndarray}");
112
113 Ok(())
114}
Sourcepub fn from_array<T: ?Sized>(array: &Array<T>) -> Self
pub fn from_array<T: ?Sized>(array: &Array<T>) -> Self
Create a new builder copying the configuration of an existing array.
Sourcepub fn shape(&mut self, shape: ArrayShape) -> &mut Self
pub fn shape(&mut self, shape: ArrayShape) -> &mut Self
Set the shape.
Sourcepub fn chunk_grid(&mut self, chunk_grid: ChunkGrid) -> &mut Self
pub fn chunk_grid(&mut self, chunk_grid: ChunkGrid) -> &mut Self
Set the chunk grid.
Sourcepub fn fill_value(&mut self, fill_value: FillValue) -> &mut Self
pub fn fill_value(&mut self, fill_value: FillValue) -> &mut Self
Set the fill value.
Sourcepub fn chunk_key_encoding(
&mut self,
chunk_key_encoding: ChunkKeyEncoding,
) -> &mut Self
pub fn chunk_key_encoding( &mut self, chunk_key_encoding: ChunkKeyEncoding, ) -> &mut Self
Set the chunk key encoding.
If left unmodified, the array will use default
chunk key encoding with the /
chunk key separator.
Sourcepub fn chunk_key_encoding_default_separator(
&mut self,
separator: ChunkKeySeparator,
) -> &mut Self
pub fn chunk_key_encoding_default_separator( &mut self, separator: ChunkKeySeparator, ) -> &mut Self
Set the chunk key encoding to default with separator
.
If left unmodified, the array will use default
chunk key encoding with the /
chunk key separator.
Sourcepub fn array_to_array_codecs(
&mut self,
array_to_array_codecs: Vec<Arc<dyn ArrayToArrayCodecTraits>>,
) -> &mut Self
pub fn array_to_array_codecs( &mut self, array_to_array_codecs: Vec<Arc<dyn ArrayToArrayCodecTraits>>, ) -> &mut Self
Set the array to array codecs.
If left unmodified, the array will have no array to array codecs.
Examples found in repository?
153fn main() {
154 let store = std::sync::Arc::new(MemoryStore::default());
155 let array_path = "/array";
156 let array = ArrayBuilder::new(
157 vec![4, 1], // array shape
158 DataType::Extension(Arc::new(CustomDataTypeVariableSize)),
159 vec![3, 1].try_into().unwrap(), // regular chunk shape
160 FillValue::from(vec![]),
161 )
162 .array_to_array_codecs(vec![
163 #[cfg(feature = "transpose")]
164 Arc::new(zarrs::array::codec::TransposeCodec::new(
165 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
166 )),
167 ])
168 .bytes_to_bytes_codecs(vec![
169 #[cfg(feature = "gzip")]
170 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
171 #[cfg(feature = "crc32c")]
172 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
173 ])
174 // .storage_transformers(vec![].into())
175 .build(store, array_path)
176 .unwrap();
177 println!("{}", array.metadata().to_string_pretty());
178
179 let data = [
180 CustomDataTypeVariableSizeElement::from(Some(1.0)),
181 CustomDataTypeVariableSizeElement::from(None),
182 CustomDataTypeVariableSizeElement::from(Some(3.0)),
183 ];
184 array.store_chunk_elements(&[0, 0], &data).unwrap();
185
186 let data = array
187 .retrieve_array_subset_elements::<CustomDataTypeVariableSizeElement>(&array.subset_all())
188 .unwrap();
189
190 assert_eq!(data[0], CustomDataTypeVariableSizeElement::from(Some(1.0)));
191 assert_eq!(data[1], CustomDataTypeVariableSizeElement::from(None));
192 assert_eq!(data[2], CustomDataTypeVariableSizeElement::from(Some(3.0)));
193 assert_eq!(data[3], CustomDataTypeVariableSizeElement::from(None));
194
195 println!("{data:#?}");
196}
More examples
269fn main() {
270 let store = std::sync::Arc::new(MemoryStore::default());
271 let array_path = "/array";
272 let fill_value = CustomDataTypeFixedSizeElement { x: 1, y: 2.3 };
273 let array = ArrayBuilder::new(
274 vec![4, 1], // array shape
275 DataType::Extension(Arc::new(CustomDataTypeFixedSize)),
276 vec![2, 1].try_into().unwrap(), // regular chunk shape
277 FillValue::new(fill_value.to_ne_bytes().to_vec()),
278 )
279 .array_to_array_codecs(vec![
280 #[cfg(feature = "transpose")]
281 Arc::new(zarrs::array::codec::TransposeCodec::new(
282 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
283 )),
284 ])
285 .bytes_to_bytes_codecs(vec![
286 #[cfg(feature = "gzip")]
287 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
288 #[cfg(feature = "crc32c")]
289 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
290 ])
291 // .storage_transformers(vec![].into())
292 .build(store, array_path)
293 .unwrap();
294 println!("{}", array.metadata().to_string_pretty());
295
296 let data = [
297 CustomDataTypeFixedSizeElement { x: 3, y: 4.5 },
298 CustomDataTypeFixedSizeElement { x: 6, y: 7.8 },
299 ];
300 array.store_chunk_elements(&[0, 0], &data).unwrap();
301
302 let data = array
303 .retrieve_array_subset_elements::<CustomDataTypeFixedSizeElement>(&array.subset_all())
304 .unwrap();
305
306 assert_eq!(data[0], CustomDataTypeFixedSizeElement { x: 3, y: 4.5 });
307 assert_eq!(data[1], CustomDataTypeFixedSizeElement { x: 6, y: 7.8 });
308 assert_eq!(data[2], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
309 assert_eq!(data[3], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
310
311 println!("{data:#?}");
312}
205fn main() {
206 let store = std::sync::Arc::new(MemoryStore::default());
207 let array_path = "/array";
208 let fill_value = CustomDataTypeUInt12Element::try_from(15).unwrap();
209 let array = ArrayBuilder::new(
210 vec![4096, 1], // array shape
211 DataType::Extension(Arc::new(CustomDataTypeUInt12)),
212 vec![5, 1].try_into().unwrap(), // regular chunk shape
213 FillValue::new(fill_value.to_le_bytes().to_vec()),
214 )
215 .array_to_array_codecs(vec![
216 #[cfg(feature = "transpose")]
217 Arc::new(zarrs::array::codec::TransposeCodec::new(
218 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
219 )),
220 ])
221 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
222 .bytes_to_bytes_codecs(vec![
223 #[cfg(feature = "gzip")]
224 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
225 #[cfg(feature = "crc32c")]
226 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
227 ])
228 // .storage_transformers(vec![].into())
229 .build(store, array_path)
230 .unwrap();
231 println!("{}", array.metadata().to_string_pretty());
232
233 let data: Vec<CustomDataTypeUInt12Element> = (0..4096)
234 .into_iter()
235 .map(|i| CustomDataTypeUInt12Element::try_from(i).unwrap())
236 .collect();
237
238 array
239 .store_array_subset_elements(&array.subset_all(), &data)
240 .unwrap();
241
242 let data = array
243 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(&array.subset_all())
244 .unwrap();
245
246 for i in 0usize..4096 {
247 let element = CustomDataTypeUInt12Element::try_from(i as u64).unwrap();
248 assert_eq!(data[i], element);
249 let element_pd = array
250 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(
251 &ArraySubset::new_with_ranges(&[(i as u64)..i as u64 + 1, 0..1]),
252 )
253 .unwrap()[0];
254 assert_eq!(element_pd, element);
255 }
256}
217fn main() {
218 let store = std::sync::Arc::new(MemoryStore::default());
219 let array_path = "/array";
220 let fill_value = CustomDataTypeFloat8e3m4Element::from(1.23);
221 let array = ArrayBuilder::new(
222 vec![6, 1], // array shape
223 DataType::Extension(Arc::new(CustomDataTypeFloat8e3m4)),
224 vec![5, 1].try_into().unwrap(), // regular chunk shape
225 FillValue::new(fill_value.to_ne_bytes().to_vec()),
226 )
227 .array_to_array_codecs(vec![
228 #[cfg(feature = "transpose")]
229 Arc::new(zarrs::array::codec::TransposeCodec::new(
230 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
231 )),
232 ])
233 .bytes_to_bytes_codecs(vec![
234 #[cfg(feature = "gzip")]
235 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
236 #[cfg(feature = "crc32c")]
237 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
238 ])
239 // .storage_transformers(vec![].into())
240 .build(store, array_path)
241 .unwrap();
242 println!("{}", array.metadata().to_string_pretty());
243
244 let data = [
245 CustomDataTypeFloat8e3m4Element::from(2.34),
246 CustomDataTypeFloat8e3m4Element::from(3.45),
247 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY),
248 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY),
249 CustomDataTypeFloat8e3m4Element::from(f32::NAN),
250 ];
251 array.store_chunk_elements(&[0, 0], &data).unwrap();
252
253 let data = array
254 .retrieve_array_subset_elements::<CustomDataTypeFloat8e3m4Element>(&array.subset_all())
255 .unwrap();
256
257 for f in &data {
258 println!(
259 "float8_e3m4: {:08b} f32: {}",
260 f.to_ne_bytes()[0],
261 f.as_f32()
262 );
263 }
264
265 assert_eq!(data[0], CustomDataTypeFloat8e3m4Element::from(2.34));
266 assert_eq!(data[1], CustomDataTypeFloat8e3m4Element::from(3.45));
267 assert_eq!(
268 data[2],
269 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY)
270 );
271 assert_eq!(
272 data[3],
273 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY)
274 );
275 assert_eq!(data[4], CustomDataTypeFloat8e3m4Element::from(f32::NAN));
276 assert_eq!(data[5], CustomDataTypeFloat8e3m4Element::from(1.23));
277}
203fn main() {
204 let store = std::sync::Arc::new(MemoryStore::default());
205 let array_path = "/array";
206 let fill_value = CustomDataTypeUInt4Element::try_from(15).unwrap();
207 let array = ArrayBuilder::new(
208 vec![6, 1], // array shape
209 DataType::Extension(Arc::new(CustomDataTypeUInt4)),
210 vec![5, 1].try_into().unwrap(), // regular chunk shape
211 FillValue::new(fill_value.to_ne_bytes().to_vec()),
212 )
213 .array_to_array_codecs(vec![
214 #[cfg(feature = "transpose")]
215 Arc::new(zarrs::array::codec::TransposeCodec::new(
216 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
217 )),
218 ])
219 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
220 .bytes_to_bytes_codecs(vec![
221 #[cfg(feature = "gzip")]
222 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
223 #[cfg(feature = "crc32c")]
224 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
225 ])
226 // .storage_transformers(vec![].into())
227 .build(store, array_path)
228 .unwrap();
229 println!("{}", array.metadata().to_string_pretty());
230
231 let data = [
232 CustomDataTypeUInt4Element::try_from(1).unwrap(),
233 CustomDataTypeUInt4Element::try_from(2).unwrap(),
234 CustomDataTypeUInt4Element::try_from(3).unwrap(),
235 CustomDataTypeUInt4Element::try_from(4).unwrap(),
236 CustomDataTypeUInt4Element::try_from(5).unwrap(),
237 ];
238 array.store_chunk_elements(&[0, 0], &data).unwrap();
239
240 let data = array
241 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(&array.subset_all())
242 .unwrap();
243
244 for f in &data {
245 println!("uint4: {:08b} u8: {}", f.as_u8(), f.as_u8());
246 }
247
248 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(1).unwrap());
249 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(2).unwrap());
250 assert_eq!(data[2], CustomDataTypeUInt4Element::try_from(3).unwrap());
251 assert_eq!(data[3], CustomDataTypeUInt4Element::try_from(4).unwrap());
252 assert_eq!(data[4], CustomDataTypeUInt4Element::try_from(5).unwrap());
253 assert_eq!(data[5], CustomDataTypeUInt4Element::try_from(15).unwrap());
254
255 let data = array
256 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(
257 &ArraySubset::new_with_ranges(&[1..3, 0..1]),
258 )
259 .unwrap();
260 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(2).unwrap());
261 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(3).unwrap());
262}
Sourcepub fn array_to_array_codecs_named(
&mut self,
array_to_array_codecs: Vec<impl Into<NamedArrayToArrayCodec>>,
) -> &mut Self
pub fn array_to_array_codecs_named( &mut self, array_to_array_codecs: Vec<impl Into<NamedArrayToArrayCodec>>, ) -> &mut Self
Set the array to array codecs with non-default names.
If left unmodified, the array will have no array to array codecs.
Sourcepub fn array_to_bytes_codec(
&mut self,
array_to_bytes_codec: Arc<dyn ArrayToBytesCodecTraits>,
) -> &mut Self
pub fn array_to_bytes_codec( &mut self, array_to_bytes_codec: Arc<dyn ArrayToBytesCodecTraits>, ) -> &mut Self
Set the array to bytes codec.
If left unmodified, the array will default to using the bytes
codec with native endian encoding.
Examples found in repository?
205fn main() {
206 let store = std::sync::Arc::new(MemoryStore::default());
207 let array_path = "/array";
208 let fill_value = CustomDataTypeUInt12Element::try_from(15).unwrap();
209 let array = ArrayBuilder::new(
210 vec![4096, 1], // array shape
211 DataType::Extension(Arc::new(CustomDataTypeUInt12)),
212 vec![5, 1].try_into().unwrap(), // regular chunk shape
213 FillValue::new(fill_value.to_le_bytes().to_vec()),
214 )
215 .array_to_array_codecs(vec![
216 #[cfg(feature = "transpose")]
217 Arc::new(zarrs::array::codec::TransposeCodec::new(
218 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
219 )),
220 ])
221 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
222 .bytes_to_bytes_codecs(vec![
223 #[cfg(feature = "gzip")]
224 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
225 #[cfg(feature = "crc32c")]
226 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
227 ])
228 // .storage_transformers(vec![].into())
229 .build(store, array_path)
230 .unwrap();
231 println!("{}", array.metadata().to_string_pretty());
232
233 let data: Vec<CustomDataTypeUInt12Element> = (0..4096)
234 .into_iter()
235 .map(|i| CustomDataTypeUInt12Element::try_from(i).unwrap())
236 .collect();
237
238 array
239 .store_array_subset_elements(&array.subset_all(), &data)
240 .unwrap();
241
242 let data = array
243 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(&array.subset_all())
244 .unwrap();
245
246 for i in 0usize..4096 {
247 let element = CustomDataTypeUInt12Element::try_from(i as u64).unwrap();
248 assert_eq!(data[i], element);
249 let element_pd = array
250 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(
251 &ArraySubset::new_with_ranges(&[(i as u64)..i as u64 + 1, 0..1]),
252 )
253 .unwrap()[0];
254 assert_eq!(element_pd, element);
255 }
256}
More examples
203fn main() {
204 let store = std::sync::Arc::new(MemoryStore::default());
205 let array_path = "/array";
206 let fill_value = CustomDataTypeUInt4Element::try_from(15).unwrap();
207 let array = ArrayBuilder::new(
208 vec![6, 1], // array shape
209 DataType::Extension(Arc::new(CustomDataTypeUInt4)),
210 vec![5, 1].try_into().unwrap(), // regular chunk shape
211 FillValue::new(fill_value.to_ne_bytes().to_vec()),
212 )
213 .array_to_array_codecs(vec![
214 #[cfg(feature = "transpose")]
215 Arc::new(zarrs::array::codec::TransposeCodec::new(
216 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
217 )),
218 ])
219 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
220 .bytes_to_bytes_codecs(vec![
221 #[cfg(feature = "gzip")]
222 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
223 #[cfg(feature = "crc32c")]
224 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
225 ])
226 // .storage_transformers(vec![].into())
227 .build(store, array_path)
228 .unwrap();
229 println!("{}", array.metadata().to_string_pretty());
230
231 let data = [
232 CustomDataTypeUInt4Element::try_from(1).unwrap(),
233 CustomDataTypeUInt4Element::try_from(2).unwrap(),
234 CustomDataTypeUInt4Element::try_from(3).unwrap(),
235 CustomDataTypeUInt4Element::try_from(4).unwrap(),
236 CustomDataTypeUInt4Element::try_from(5).unwrap(),
237 ];
238 array.store_chunk_elements(&[0, 0], &data).unwrap();
239
240 let data = array
241 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(&array.subset_all())
242 .unwrap();
243
244 for f in &data {
245 println!("uint4: {:08b} u8: {}", f.as_u8(), f.as_u8());
246 }
247
248 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(1).unwrap());
249 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(2).unwrap());
250 assert_eq!(data[2], CustomDataTypeUInt4Element::try_from(3).unwrap());
251 assert_eq!(data[3], CustomDataTypeUInt4Element::try_from(4).unwrap());
252 assert_eq!(data[4], CustomDataTypeUInt4Element::try_from(5).unwrap());
253 assert_eq!(data[5], CustomDataTypeUInt4Element::try_from(15).unwrap());
254
255 let data = array
256 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(
257 &ArraySubset::new_with_ranges(&[1..3, 0..1]),
258 )
259 .unwrap();
260 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(2).unwrap());
261 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(3).unwrap());
262}
11fn sharded_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
12 use zarrs::{
13 array::{
14 codec::{self, array_to_bytes::sharding::ShardingCodecBuilder},
15 DataType, FillValue,
16 },
17 array_subset::ArraySubset,
18 node::Node,
19 storage::store,
20 };
21
22 use rayon::prelude::{IntoParallelIterator, ParallelIterator};
23 use std::sync::Arc;
24
25 // Create a store
26 // let path = tempfile::TempDir::new()?;
27 // let mut store: ReadableWritableListableStorage =
28 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
29 // let mut store: ReadableWritableListableStorage = Arc::new(
30 // zarrs::filesystem::FilesystemStore::new("zarrs/tests/data/sharded_array_write_read.zarr")?,
31 // );
32 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
33 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
34 if arg1 == "--usage-log" {
35 let log_writer = Arc::new(std::sync::Mutex::new(
36 // std::io::BufWriter::new(
37 std::io::stdout(),
38 // )
39 ));
40 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
41 chrono::Utc::now().format("[%T%.3f] ").to_string()
42 }));
43 }
44 }
45
46 // Create the root group
47 zarrs::group::GroupBuilder::new()
48 .build(store.clone(), "/")?
49 .store_metadata()?;
50
51 // Create a group with attributes
52 let group_path = "/group";
53 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
54 group
55 .attributes_mut()
56 .insert("foo".into(), serde_json::Value::String("bar".into()));
57 group.store_metadata()?;
58
59 // Create an array
60 let array_path = "/group/array";
61 let shard_shape = vec![4, 8];
62 let inner_chunk_shape = vec![4, 4];
63 let mut sharding_codec_builder =
64 ShardingCodecBuilder::new(inner_chunk_shape.as_slice().try_into()?);
65 sharding_codec_builder.bytes_to_bytes_codecs(vec![
66 #[cfg(feature = "gzip")]
67 Arc::new(codec::GzipCodec::new(5)?),
68 ]);
69 let array = zarrs::array::ArrayBuilder::new(
70 vec![8, 8], // array shape
71 DataType::UInt16,
72 shard_shape.try_into()?,
73 FillValue::from(0u16),
74 )
75 .array_to_bytes_codec(Arc::new(sharding_codec_builder.build()))
76 .dimension_names(["y", "x"].into())
77 // .storage_transformers(vec![].into())
78 .build(store.clone(), array_path)?;
79
80 // Write array metadata to store
81 array.store_metadata()?;
82
83 // The array metadata is
84 println!(
85 "The array metadata is:\n{}\n",
86 array.metadata().to_string_pretty()
87 );
88
89 // Use default codec options (concurrency etc)
90 let options = CodecOptions::default();
91
92 // Write some shards (in parallel)
93 (0..2).into_par_iter().try_for_each(|s| {
94 let chunk_grid = array.chunk_grid();
95 let chunk_indices = vec![s, 0];
96 if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
97 let chunk_array = ndarray::ArrayD::<u16>::from_shape_fn(
98 chunk_shape
99 .iter()
100 .map(|u| u.get() as usize)
101 .collect::<Vec<_>>(),
102 |ij| {
103 (s * chunk_shape[0].get() * chunk_shape[1].get()
104 + ij[0] as u64 * chunk_shape[1].get()
105 + ij[1] as u64) as u16
106 },
107 );
108 array.store_chunk_ndarray(&chunk_indices, chunk_array)
109 } else {
110 Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
111 chunk_indices.to_vec(),
112 ))
113 }
114 })?;
115
116 // Read the whole array
117 let data_all = array.retrieve_array_subset_ndarray::<u16>(&array.subset_all())?;
118 println!("The whole array is:\n{data_all}\n");
119
120 // Read a shard back from the store
121 let shard_indices = vec![1, 0];
122 let data_shard = array.retrieve_chunk_ndarray::<u16>(&shard_indices)?;
123 println!("Shard [1,0] is:\n{data_shard}\n");
124
125 // Read an inner chunk from the store
126 let subset_chunk_1_0 = ArraySubset::new_with_ranges(&[4..8, 0..4]);
127 let data_chunk = array.retrieve_array_subset_ndarray::<u16>(&subset_chunk_1_0)?;
128 println!("Chunk [1,0] is:\n{data_chunk}\n");
129
130 // Read the central 4x2 subset of the array
131 let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
132 let data_4x2 = array.retrieve_array_subset_ndarray::<u16>(&subset_4x2)?;
133 println!("The middle 4x2 subset is:\n{data_4x2}\n");
134
135 // Decode inner chunks
136 // In some cases, it might be preferable to decode inner chunks in a shard directly.
137 // If using the partial decoder, then the shard index will only be read once from the store.
138 let partial_decoder = array.partial_decoder(&[0, 0])?;
139 let inner_chunks_to_decode = vec![
140 ArraySubset::new_with_start_shape(vec![0, 0], inner_chunk_shape.clone())?,
141 ArraySubset::new_with_start_shape(vec![0, 4], inner_chunk_shape.clone())?,
142 ];
143 let decoded_inner_chunks_bytes =
144 partial_decoder.partial_decode(&inner_chunks_to_decode, &options)?;
145 println!("Decoded inner chunks:");
146 for (inner_chunk_subset, decoded_inner_chunk) in
147 std::iter::zip(inner_chunks_to_decode, decoded_inner_chunks_bytes)
148 {
149 let ndarray = bytes_to_ndarray::<u16>(
150 &inner_chunk_shape,
151 decoded_inner_chunk.into_fixed()?.into_owned(),
152 )?;
153 println!("{inner_chunk_subset}\n{ndarray}\n");
154 }
155
156 // Show the hierarchy
157 let node = Node::open(&store, "/").unwrap();
158 let tree = node.hierarchy_tree();
159 println!("The Zarr hierarchy tree is:\n{}", tree);
160
161 println!(
162 "The keys in the store are:\n[{}]",
163 store.list().unwrap_or_default().iter().format(", ")
164 );
165
166 Ok(())
167}
Sourcepub fn array_to_bytes_codec_named(
&mut self,
array_to_bytes_codec: impl Into<NamedArrayToBytesCodec>,
) -> &mut Self
pub fn array_to_bytes_codec_named( &mut self, array_to_bytes_codec: impl Into<NamedArrayToBytesCodec>, ) -> &mut Self
Set the array to bytes codec with non-default names.
If left unmodified, the array will default to using the bytes
codec with native endian encoding.
Sourcepub fn bytes_to_bytes_codecs(
&mut self,
bytes_to_bytes_codecs: Vec<Arc<dyn BytesToBytesCodecTraits>>,
) -> &mut Self
pub fn bytes_to_bytes_codecs( &mut self, bytes_to_bytes_codecs: Vec<Arc<dyn BytesToBytesCodecTraits>>, ) -> &mut Self
Set the bytes to bytes codecs.
If left unmodified, the array will have no bytes to bytes codecs.
Examples found in repository?
153fn main() {
154 let store = std::sync::Arc::new(MemoryStore::default());
155 let array_path = "/array";
156 let array = ArrayBuilder::new(
157 vec![4, 1], // array shape
158 DataType::Extension(Arc::new(CustomDataTypeVariableSize)),
159 vec![3, 1].try_into().unwrap(), // regular chunk shape
160 FillValue::from(vec![]),
161 )
162 .array_to_array_codecs(vec![
163 #[cfg(feature = "transpose")]
164 Arc::new(zarrs::array::codec::TransposeCodec::new(
165 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
166 )),
167 ])
168 .bytes_to_bytes_codecs(vec![
169 #[cfg(feature = "gzip")]
170 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
171 #[cfg(feature = "crc32c")]
172 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
173 ])
174 // .storage_transformers(vec![].into())
175 .build(store, array_path)
176 .unwrap();
177 println!("{}", array.metadata().to_string_pretty());
178
179 let data = [
180 CustomDataTypeVariableSizeElement::from(Some(1.0)),
181 CustomDataTypeVariableSizeElement::from(None),
182 CustomDataTypeVariableSizeElement::from(Some(3.0)),
183 ];
184 array.store_chunk_elements(&[0, 0], &data).unwrap();
185
186 let data = array
187 .retrieve_array_subset_elements::<CustomDataTypeVariableSizeElement>(&array.subset_all())
188 .unwrap();
189
190 assert_eq!(data[0], CustomDataTypeVariableSizeElement::from(Some(1.0)));
191 assert_eq!(data[1], CustomDataTypeVariableSizeElement::from(None));
192 assert_eq!(data[2], CustomDataTypeVariableSizeElement::from(Some(3.0)));
193 assert_eq!(data[3], CustomDataTypeVariableSizeElement::from(None));
194
195 println!("{data:#?}");
196}
More examples
269fn main() {
270 let store = std::sync::Arc::new(MemoryStore::default());
271 let array_path = "/array";
272 let fill_value = CustomDataTypeFixedSizeElement { x: 1, y: 2.3 };
273 let array = ArrayBuilder::new(
274 vec![4, 1], // array shape
275 DataType::Extension(Arc::new(CustomDataTypeFixedSize)),
276 vec![2, 1].try_into().unwrap(), // regular chunk shape
277 FillValue::new(fill_value.to_ne_bytes().to_vec()),
278 )
279 .array_to_array_codecs(vec![
280 #[cfg(feature = "transpose")]
281 Arc::new(zarrs::array::codec::TransposeCodec::new(
282 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
283 )),
284 ])
285 .bytes_to_bytes_codecs(vec![
286 #[cfg(feature = "gzip")]
287 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
288 #[cfg(feature = "crc32c")]
289 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
290 ])
291 // .storage_transformers(vec![].into())
292 .build(store, array_path)
293 .unwrap();
294 println!("{}", array.metadata().to_string_pretty());
295
296 let data = [
297 CustomDataTypeFixedSizeElement { x: 3, y: 4.5 },
298 CustomDataTypeFixedSizeElement { x: 6, y: 7.8 },
299 ];
300 array.store_chunk_elements(&[0, 0], &data).unwrap();
301
302 let data = array
303 .retrieve_array_subset_elements::<CustomDataTypeFixedSizeElement>(&array.subset_all())
304 .unwrap();
305
306 assert_eq!(data[0], CustomDataTypeFixedSizeElement { x: 3, y: 4.5 });
307 assert_eq!(data[1], CustomDataTypeFixedSizeElement { x: 6, y: 7.8 });
308 assert_eq!(data[2], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
309 assert_eq!(data[3], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
310
311 println!("{data:#?}");
312}
205fn main() {
206 let store = std::sync::Arc::new(MemoryStore::default());
207 let array_path = "/array";
208 let fill_value = CustomDataTypeUInt12Element::try_from(15).unwrap();
209 let array = ArrayBuilder::new(
210 vec![4096, 1], // array shape
211 DataType::Extension(Arc::new(CustomDataTypeUInt12)),
212 vec![5, 1].try_into().unwrap(), // regular chunk shape
213 FillValue::new(fill_value.to_le_bytes().to_vec()),
214 )
215 .array_to_array_codecs(vec![
216 #[cfg(feature = "transpose")]
217 Arc::new(zarrs::array::codec::TransposeCodec::new(
218 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
219 )),
220 ])
221 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
222 .bytes_to_bytes_codecs(vec![
223 #[cfg(feature = "gzip")]
224 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
225 #[cfg(feature = "crc32c")]
226 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
227 ])
228 // .storage_transformers(vec![].into())
229 .build(store, array_path)
230 .unwrap();
231 println!("{}", array.metadata().to_string_pretty());
232
233 let data: Vec<CustomDataTypeUInt12Element> = (0..4096)
234 .into_iter()
235 .map(|i| CustomDataTypeUInt12Element::try_from(i).unwrap())
236 .collect();
237
238 array
239 .store_array_subset_elements(&array.subset_all(), &data)
240 .unwrap();
241
242 let data = array
243 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(&array.subset_all())
244 .unwrap();
245
246 for i in 0usize..4096 {
247 let element = CustomDataTypeUInt12Element::try_from(i as u64).unwrap();
248 assert_eq!(data[i], element);
249 let element_pd = array
250 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(
251 &ArraySubset::new_with_ranges(&[(i as u64)..i as u64 + 1, 0..1]),
252 )
253 .unwrap()[0];
254 assert_eq!(element_pd, element);
255 }
256}
217fn main() {
218 let store = std::sync::Arc::new(MemoryStore::default());
219 let array_path = "/array";
220 let fill_value = CustomDataTypeFloat8e3m4Element::from(1.23);
221 let array = ArrayBuilder::new(
222 vec![6, 1], // array shape
223 DataType::Extension(Arc::new(CustomDataTypeFloat8e3m4)),
224 vec![5, 1].try_into().unwrap(), // regular chunk shape
225 FillValue::new(fill_value.to_ne_bytes().to_vec()),
226 )
227 .array_to_array_codecs(vec![
228 #[cfg(feature = "transpose")]
229 Arc::new(zarrs::array::codec::TransposeCodec::new(
230 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
231 )),
232 ])
233 .bytes_to_bytes_codecs(vec![
234 #[cfg(feature = "gzip")]
235 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
236 #[cfg(feature = "crc32c")]
237 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
238 ])
239 // .storage_transformers(vec![].into())
240 .build(store, array_path)
241 .unwrap();
242 println!("{}", array.metadata().to_string_pretty());
243
244 let data = [
245 CustomDataTypeFloat8e3m4Element::from(2.34),
246 CustomDataTypeFloat8e3m4Element::from(3.45),
247 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY),
248 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY),
249 CustomDataTypeFloat8e3m4Element::from(f32::NAN),
250 ];
251 array.store_chunk_elements(&[0, 0], &data).unwrap();
252
253 let data = array
254 .retrieve_array_subset_elements::<CustomDataTypeFloat8e3m4Element>(&array.subset_all())
255 .unwrap();
256
257 for f in &data {
258 println!(
259 "float8_e3m4: {:08b} f32: {}",
260 f.to_ne_bytes()[0],
261 f.as_f32()
262 );
263 }
264
265 assert_eq!(data[0], CustomDataTypeFloat8e3m4Element::from(2.34));
266 assert_eq!(data[1], CustomDataTypeFloat8e3m4Element::from(3.45));
267 assert_eq!(
268 data[2],
269 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY)
270 );
271 assert_eq!(
272 data[3],
273 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY)
274 );
275 assert_eq!(data[4], CustomDataTypeFloat8e3m4Element::from(f32::NAN));
276 assert_eq!(data[5], CustomDataTypeFloat8e3m4Element::from(1.23));
277}
203fn main() {
204 let store = std::sync::Arc::new(MemoryStore::default());
205 let array_path = "/array";
206 let fill_value = CustomDataTypeUInt4Element::try_from(15).unwrap();
207 let array = ArrayBuilder::new(
208 vec![6, 1], // array shape
209 DataType::Extension(Arc::new(CustomDataTypeUInt4)),
210 vec![5, 1].try_into().unwrap(), // regular chunk shape
211 FillValue::new(fill_value.to_ne_bytes().to_vec()),
212 )
213 .array_to_array_codecs(vec![
214 #[cfg(feature = "transpose")]
215 Arc::new(zarrs::array::codec::TransposeCodec::new(
216 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
217 )),
218 ])
219 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
220 .bytes_to_bytes_codecs(vec![
221 #[cfg(feature = "gzip")]
222 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
223 #[cfg(feature = "crc32c")]
224 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
225 ])
226 // .storage_transformers(vec![].into())
227 .build(store, array_path)
228 .unwrap();
229 println!("{}", array.metadata().to_string_pretty());
230
231 let data = [
232 CustomDataTypeUInt4Element::try_from(1).unwrap(),
233 CustomDataTypeUInt4Element::try_from(2).unwrap(),
234 CustomDataTypeUInt4Element::try_from(3).unwrap(),
235 CustomDataTypeUInt4Element::try_from(4).unwrap(),
236 CustomDataTypeUInt4Element::try_from(5).unwrap(),
237 ];
238 array.store_chunk_elements(&[0, 0], &data).unwrap();
239
240 let data = array
241 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(&array.subset_all())
242 .unwrap();
243
244 for f in &data {
245 println!("uint4: {:08b} u8: {}", f.as_u8(), f.as_u8());
246 }
247
248 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(1).unwrap());
249 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(2).unwrap());
250 assert_eq!(data[2], CustomDataTypeUInt4Element::try_from(3).unwrap());
251 assert_eq!(data[3], CustomDataTypeUInt4Element::try_from(4).unwrap());
252 assert_eq!(data[4], CustomDataTypeUInt4Element::try_from(5).unwrap());
253 assert_eq!(data[5], CustomDataTypeUInt4Element::try_from(15).unwrap());
254
255 let data = array
256 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(
257 &ArraySubset::new_with_ranges(&[1..3, 0..1]),
258 )
259 .unwrap();
260 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(2).unwrap());
261 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(3).unwrap());
262}
8fn rectangular_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
9 use rayon::prelude::{IntoParallelIterator, ParallelIterator};
10 use zarrs::array::ChunkGrid;
11 use zarrs::{
12 array::{chunk_grid::RectangularChunkGrid, codec, FillValue},
13 node::Node,
14 };
15 use zarrs::{
16 array::{DataType, ZARR_NAN_F32},
17 array_subset::ArraySubset,
18 storage::store,
19 };
20
21 // Create a store
22 // let path = tempfile::TempDir::new()?;
23 // let mut store: ReadableWritableListableStorage =
24 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
25 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
26 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
27 if arg1 == "--usage-log" {
28 let log_writer = Arc::new(std::sync::Mutex::new(
29 // std::io::BufWriter::new(
30 std::io::stdout(),
31 // )
32 ));
33 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
34 chrono::Utc::now().format("[%T%.3f] ").to_string()
35 }));
36 }
37 }
38
39 // Create the root group
40 zarrs::group::GroupBuilder::new()
41 .build(store.clone(), "/")?
42 .store_metadata()?;
43
44 // Create a group with attributes
45 let group_path = "/group";
46 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
47 group
48 .attributes_mut()
49 .insert("foo".into(), serde_json::Value::String("bar".into()));
50 group.store_metadata()?;
51
52 println!(
53 "The group metadata is:\n{}\n",
54 group.metadata().to_string_pretty()
55 );
56
57 // Create an array
58 let array_path = "/group/array";
59 let array = zarrs::array::ArrayBuilder::new(
60 vec![8, 8], // array shape
61 DataType::Float32,
62 ChunkGrid::new(RectangularChunkGrid::new(&[
63 [1, 2, 3, 2].try_into()?,
64 4.try_into()?,
65 ])),
66 FillValue::from(ZARR_NAN_F32),
67 )
68 .bytes_to_bytes_codecs(vec![
69 #[cfg(feature = "gzip")]
70 Arc::new(codec::GzipCodec::new(5)?),
71 ])
72 .dimension_names(["y", "x"].into())
73 // .storage_transformers(vec![].into())
74 .build(store.clone(), array_path)?;
75
76 // Write array metadata to store
77 array.store_metadata()?;
78
79 // Write some chunks (in parallel)
80 (0..4).into_par_iter().try_for_each(|i| {
81 let chunk_grid = array.chunk_grid();
82 let chunk_indices = vec![i, 0];
83 if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
84 let chunk_array = ndarray::ArrayD::<f32>::from_elem(
85 chunk_shape
86 .iter()
87 .map(|u| u.get() as usize)
88 .collect::<Vec<_>>(),
89 i as f32,
90 );
91 array.store_chunk_ndarray(&chunk_indices, chunk_array)
92 } else {
93 Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
94 chunk_indices.to_vec(),
95 ))
96 }
97 })?;
98
99 println!(
100 "The array metadata is:\n{}\n",
101 array.metadata().to_string_pretty()
102 );
103
104 // Write a subset spanning multiple chunks, including updating chunks already written
105 array.store_array_subset_ndarray(
106 &[3, 3], // start
107 ndarray::ArrayD::<f32>::from_shape_vec(
108 vec![3, 3],
109 vec![0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
110 )?,
111 )?;
112
113 // Store elements directly, in this case set the 7th column to 123.0
114 array.store_array_subset_elements::<f32>(
115 &ArraySubset::new_with_ranges(&[0..8, 6..7]),
116 &[123.0; 8],
117 )?;
118
119 // Store elements directly in a chunk, in this case set the last row of the bottom right chunk
120 array.store_chunk_subset_elements::<f32>(
121 // chunk indices
122 &[3, 1],
123 // subset within chunk
124 &ArraySubset::new_with_ranges(&[1..2, 0..4]),
125 &[-4.0; 4],
126 )?;
127
128 // Read the whole array
129 let data_all = array.retrieve_array_subset_ndarray::<f32>(&array.subset_all())?;
130 println!("The whole array is:\n{data_all}\n");
131
132 // Read a chunk back from the store
133 let chunk_indices = vec![1, 0];
134 let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
135 println!("Chunk [1,0] is:\n{data_chunk}\n");
136
137 // Read the central 4x2 subset of the array
138 let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
139 let data_4x2 = array.retrieve_array_subset_ndarray::<f32>(&subset_4x2)?;
140 println!("The middle 4x2 subset is:\n{data_4x2}\n");
141
142 // Show the hierarchy
143 let node = Node::open(&store, "/").unwrap();
144 let tree = node.hierarchy_tree();
145 println!("The Zarr hierarchy tree is:\n{tree}");
146
147 Ok(())
148}
Sourcepub fn bytes_to_bytes_codecs_named(
&mut self,
bytes_to_bytes_codecs: Vec<impl Into<NamedBytesToBytesCodec>>,
) -> &mut Self
pub fn bytes_to_bytes_codecs_named( &mut self, bytes_to_bytes_codecs: Vec<impl Into<NamedBytesToBytesCodec>>, ) -> &mut Self
Set the bytes to bytes codecs with non-default names.
If left unmodified, the array will have no bytes to bytes codecs.
Sourcepub fn attributes(&mut self, attributes: Map<String, Value>) -> &mut Self
pub fn attributes(&mut self, attributes: Map<String, Value>) -> &mut Self
Set the user defined attributes.
If left unmodified, the user defined attributes of the array will be empty.
Sourcepub fn additional_fields(
&mut self,
additional_fields: AdditionalFieldsV3,
) -> &mut Self
pub fn additional_fields( &mut self, additional_fields: AdditionalFieldsV3, ) -> &mut Self
Set the additional fields.
Set additional fields not defined in the Zarr specification.
Use this cautiously. In general, store user defined attributes using ArrayBuilder::attributes
.
zarrs
and other implementations are expected to error when opening an array with unsupported additional fields, unless they are a JSON object containing "must_understand": false
.
Sourcepub fn dimension_names<I, D>(&mut self, dimension_names: Option<I>) -> &mut Selfwhere
I: IntoIterator<Item = D>,
D: IntoDimensionName,
pub fn dimension_names<I, D>(&mut self, dimension_names: Option<I>) -> &mut Selfwhere
I: IntoIterator<Item = D>,
D: IntoDimensionName,
Set the dimension names.
If left unmodified, all dimension names are “unnamed”.
Examples found in repository?
10fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
11 use std::sync::Arc;
12 use zarrs::{
13 array::{DataType, FillValue},
14 array_subset::ArraySubset,
15 storage::store,
16 };
17
18 // Create a store
19 // let path = tempfile::TempDir::new()?;
20 // let mut store: ReadableWritableListableStorage =
21 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
22 // let mut store: ReadableWritableListableStorage = Arc::new(
23 // zarrs::filesystem::FilesystemStore::new("zarrs/tests/data/array_write_read.zarr")?,
24 // );
25 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
26 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
27 if arg1 == "--usage-log" {
28 let log_writer = Arc::new(std::sync::Mutex::new(
29 // std::io::BufWriter::new(
30 std::io::stdout(),
31 // )
32 ));
33 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
34 chrono::Utc::now().format("[%T%.3f] ").to_string()
35 }));
36 }
37 }
38
39 // Create the root group
40 zarrs::group::GroupBuilder::new()
41 .build(store.clone(), "/")?
42 .store_metadata()?;
43
44 // Create a group with attributes
45 let group_path = "/group";
46 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
47 group
48 .attributes_mut()
49 .insert("foo".into(), serde_json::Value::String("bar".into()));
50 group.store_metadata()?;
51
52 println!(
53 "The group metadata is:\n{}\n",
54 group.metadata().to_string_pretty()
55 );
56
57 // Create an array
58 let array_path = "/group/array";
59 let array = zarrs::array::ArrayBuilder::new(
60 vec![4, 4], // array shape
61 DataType::String,
62 vec![2, 2].try_into()?, // regular chunk shape
63 FillValue::from("_"),
64 )
65 // .bytes_to_bytes_codecs(vec![]) // uncompressed
66 .dimension_names(["y", "x"].into())
67 // .storage_transformers(vec![].into())
68 .build(store.clone(), array_path)?;
69
70 // Write array metadata to store
71 array.store_metadata()?;
72
73 println!(
74 "The array metadata is:\n{}\n",
75 array.metadata().to_string_pretty()
76 );
77
78 // Write some chunks
79 array.store_chunk_ndarray(
80 &[0, 0],
81 ArrayD::<&str>::from_shape_vec(vec![2, 2], vec!["a", "bb", "ccc", "dddd"]).unwrap(),
82 )?;
83 array.store_chunk_ndarray(
84 &[0, 1],
85 ArrayD::<&str>::from_shape_vec(vec![2, 2], vec!["4444", "333", "22", "1"]).unwrap(),
86 )?;
87 let subset_all = array.subset_all();
88 let data_all = array.retrieve_array_subset_ndarray::<String>(&subset_all)?;
89 println!("store_chunk [0, 0] and [0, 1]:\n{data_all}\n");
90
91 // Write a subset spanning multiple chunks, including updating chunks already written
92 let ndarray_subset: Array2<&str> = array![["!", "@@"], ["###", "$$$$"]];
93 array.store_array_subset_ndarray(
94 ArraySubset::new_with_ranges(&[1..3, 1..3]).start(),
95 ndarray_subset,
96 )?;
97 let data_all = array.retrieve_array_subset_ndarray::<String>(&subset_all)?;
98 println!("store_array_subset [1..3, 1..3]:\nndarray::ArrayD<String>\n{data_all}");
99
100 // Retrieve bytes directly, convert into a single string allocation, create a &str ndarray
101 // TODO: Add a convenience function for this?
102 let data_all = array.retrieve_array_subset(&subset_all)?;
103 let (bytes, offsets) = data_all.into_variable()?;
104 let string = String::from_utf8(bytes.into_owned())?;
105 let elements = offsets
106 .iter()
107 .tuple_windows()
108 .map(|(&curr, &next)| &string[curr..next])
109 .collect::<Vec<&str>>();
110 let ndarray = ArrayD::<&str>::from_shape_vec(subset_all.shape_usize(), elements)?;
111 println!("ndarray::ArrayD<&str>:\n{ndarray}");
112
113 Ok(())
114}
More examples
8fn rectangular_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
9 use rayon::prelude::{IntoParallelIterator, ParallelIterator};
10 use zarrs::array::ChunkGrid;
11 use zarrs::{
12 array::{chunk_grid::RectangularChunkGrid, codec, FillValue},
13 node::Node,
14 };
15 use zarrs::{
16 array::{DataType, ZARR_NAN_F32},
17 array_subset::ArraySubset,
18 storage::store,
19 };
20
21 // Create a store
22 // let path = tempfile::TempDir::new()?;
23 // let mut store: ReadableWritableListableStorage =
24 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
25 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
26 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
27 if arg1 == "--usage-log" {
28 let log_writer = Arc::new(std::sync::Mutex::new(
29 // std::io::BufWriter::new(
30 std::io::stdout(),
31 // )
32 ));
33 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
34 chrono::Utc::now().format("[%T%.3f] ").to_string()
35 }));
36 }
37 }
38
39 // Create the root group
40 zarrs::group::GroupBuilder::new()
41 .build(store.clone(), "/")?
42 .store_metadata()?;
43
44 // Create a group with attributes
45 let group_path = "/group";
46 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
47 group
48 .attributes_mut()
49 .insert("foo".into(), serde_json::Value::String("bar".into()));
50 group.store_metadata()?;
51
52 println!(
53 "The group metadata is:\n{}\n",
54 group.metadata().to_string_pretty()
55 );
56
57 // Create an array
58 let array_path = "/group/array";
59 let array = zarrs::array::ArrayBuilder::new(
60 vec![8, 8], // array shape
61 DataType::Float32,
62 ChunkGrid::new(RectangularChunkGrid::new(&[
63 [1, 2, 3, 2].try_into()?,
64 4.try_into()?,
65 ])),
66 FillValue::from(ZARR_NAN_F32),
67 )
68 .bytes_to_bytes_codecs(vec![
69 #[cfg(feature = "gzip")]
70 Arc::new(codec::GzipCodec::new(5)?),
71 ])
72 .dimension_names(["y", "x"].into())
73 // .storage_transformers(vec![].into())
74 .build(store.clone(), array_path)?;
75
76 // Write array metadata to store
77 array.store_metadata()?;
78
79 // Write some chunks (in parallel)
80 (0..4).into_par_iter().try_for_each(|i| {
81 let chunk_grid = array.chunk_grid();
82 let chunk_indices = vec![i, 0];
83 if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
84 let chunk_array = ndarray::ArrayD::<f32>::from_elem(
85 chunk_shape
86 .iter()
87 .map(|u| u.get() as usize)
88 .collect::<Vec<_>>(),
89 i as f32,
90 );
91 array.store_chunk_ndarray(&chunk_indices, chunk_array)
92 } else {
93 Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
94 chunk_indices.to_vec(),
95 ))
96 }
97 })?;
98
99 println!(
100 "The array metadata is:\n{}\n",
101 array.metadata().to_string_pretty()
102 );
103
104 // Write a subset spanning multiple chunks, including updating chunks already written
105 array.store_array_subset_ndarray(
106 &[3, 3], // start
107 ndarray::ArrayD::<f32>::from_shape_vec(
108 vec![3, 3],
109 vec![0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
110 )?,
111 )?;
112
113 // Store elements directly, in this case set the 7th column to 123.0
114 array.store_array_subset_elements::<f32>(
115 &ArraySubset::new_with_ranges(&[0..8, 6..7]),
116 &[123.0; 8],
117 )?;
118
119 // Store elements directly in a chunk, in this case set the last row of the bottom right chunk
120 array.store_chunk_subset_elements::<f32>(
121 // chunk indices
122 &[3, 1],
123 // subset within chunk
124 &ArraySubset::new_with_ranges(&[1..2, 0..4]),
125 &[-4.0; 4],
126 )?;
127
128 // Read the whole array
129 let data_all = array.retrieve_array_subset_ndarray::<f32>(&array.subset_all())?;
130 println!("The whole array is:\n{data_all}\n");
131
132 // Read a chunk back from the store
133 let chunk_indices = vec![1, 0];
134 let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
135 println!("Chunk [1,0] is:\n{data_chunk}\n");
136
137 // Read the central 4x2 subset of the array
138 let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
139 let data_4x2 = array.retrieve_array_subset_ndarray::<f32>(&subset_4x2)?;
140 println!("The middle 4x2 subset is:\n{data_4x2}\n");
141
142 // Show the hierarchy
143 let node = Node::open(&store, "/").unwrap();
144 let tree = node.hierarchy_tree();
145 println!("The Zarr hierarchy tree is:\n{tree}");
146
147 Ok(())
148}
8fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
9 use std::sync::Arc;
10 use zarrs::{
11 array::{DataType, FillValue, ZARR_NAN_F32},
12 array_subset::ArraySubset,
13 node::Node,
14 storage::store,
15 };
16
17 // Create a store
18 // let path = tempfile::TempDir::new()?;
19 // let mut store: ReadableWritableListableStorage =
20 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
21 // let mut store: ReadableWritableListableStorage = Arc::new(
22 // zarrs::filesystem::FilesystemStore::new("zarrs/tests/data/array_write_read.zarr")?,
23 // );
24 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
25 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
26 if arg1 == "--usage-log" {
27 let log_writer = Arc::new(std::sync::Mutex::new(
28 // std::io::BufWriter::new(
29 std::io::stdout(),
30 // )
31 ));
32 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
33 chrono::Utc::now().format("[%T%.3f] ").to_string()
34 }));
35 }
36 }
37
38 // Create the root group
39 zarrs::group::GroupBuilder::new()
40 .build(store.clone(), "/")?
41 .store_metadata()?;
42
43 // Create a group with attributes
44 let group_path = "/group";
45 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
46 group
47 .attributes_mut()
48 .insert("foo".into(), serde_json::Value::String("bar".into()));
49 group.store_metadata()?;
50
51 println!(
52 "The group metadata is:\n{}\n",
53 group.metadata().to_string_pretty()
54 );
55
56 // Create an array
57 let array_path = "/group/array";
58 let array = zarrs::array::ArrayBuilder::new(
59 vec![8, 8], // array shape
60 DataType::Float32,
61 vec![4, 4].try_into()?, // regular chunk shape
62 FillValue::from(ZARR_NAN_F32),
63 )
64 // .bytes_to_bytes_codecs(vec![]) // uncompressed
65 .dimension_names(["y", "x"].into())
66 // .storage_transformers(vec![].into())
67 .build(store.clone(), array_path)?;
68
69 // Write array metadata to store
70 array.store_metadata()?;
71
72 println!(
73 "The array metadata is:\n{}\n",
74 array.metadata().to_string_pretty()
75 );
76
77 // Write some chunks
78 (0..2).into_par_iter().try_for_each(|i| {
79 let chunk_indices: Vec<u64> = vec![0, i];
80 let chunk_subset = array
81 .chunk_grid()
82 .subset(&chunk_indices, array.shape())?
83 .ok_or_else(|| {
84 zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
85 })?;
86 array.store_chunk_elements(
87 &chunk_indices,
88 &vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
89 )
90 })?;
91
92 let subset_all = array.subset_all();
93 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
94 println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
95
96 // Store multiple chunks
97 array.store_chunks_elements::<f32>(
98 &ArraySubset::new_with_ranges(&[1..2, 0..2]),
99 &[
100 //
101 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
102 //
103 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
104 ],
105 )?;
106 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
107 println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
108
109 // Write a subset spanning multiple chunks, including updating chunks already written
110 array.store_array_subset_elements::<f32>(
111 &ArraySubset::new_with_ranges(&[3..6, 3..6]),
112 &[-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
113 )?;
114 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
115 println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
116
117 // Store array subset
118 array.store_array_subset_elements::<f32>(
119 &ArraySubset::new_with_ranges(&[0..8, 6..7]),
120 &[-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
121 )?;
122 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
123 println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
124
125 // Store chunk subset
126 array.store_chunk_subset_elements::<f32>(
127 // chunk indices
128 &[1, 1],
129 // subset within chunk
130 &ArraySubset::new_with_ranges(&[3..4, 0..4]),
131 &[-7.4, -7.5, -7.6, -7.7],
132 )?;
133 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
134 println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
135
136 // Erase a chunk
137 array.erase_chunk(&[0, 0])?;
138 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
139 println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
140
141 // Read a chunk
142 let chunk_indices = vec![0, 1];
143 let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
144 println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
145
146 // Read chunks
147 let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
148 let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
149 println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
150
151 // Retrieve an array subset
152 let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
153 let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
154 println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
155
156 // Show the hierarchy
157 let node = Node::open(&store, "/").unwrap();
158 let tree = node.hierarchy_tree();
159 println!("hierarchy_tree:\n{}", tree);
160
161 Ok(())
162}
11fn sharded_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
12 use zarrs::{
13 array::{
14 codec::{self, array_to_bytes::sharding::ShardingCodecBuilder},
15 DataType, FillValue,
16 },
17 array_subset::ArraySubset,
18 node::Node,
19 storage::store,
20 };
21
22 use rayon::prelude::{IntoParallelIterator, ParallelIterator};
23 use std::sync::Arc;
24
25 // Create a store
26 // let path = tempfile::TempDir::new()?;
27 // let mut store: ReadableWritableListableStorage =
28 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
29 // let mut store: ReadableWritableListableStorage = Arc::new(
30 // zarrs::filesystem::FilesystemStore::new("zarrs/tests/data/sharded_array_write_read.zarr")?,
31 // );
32 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
33 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
34 if arg1 == "--usage-log" {
35 let log_writer = Arc::new(std::sync::Mutex::new(
36 // std::io::BufWriter::new(
37 std::io::stdout(),
38 // )
39 ));
40 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
41 chrono::Utc::now().format("[%T%.3f] ").to_string()
42 }));
43 }
44 }
45
46 // Create the root group
47 zarrs::group::GroupBuilder::new()
48 .build(store.clone(), "/")?
49 .store_metadata()?;
50
51 // Create a group with attributes
52 let group_path = "/group";
53 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
54 group
55 .attributes_mut()
56 .insert("foo".into(), serde_json::Value::String("bar".into()));
57 group.store_metadata()?;
58
59 // Create an array
60 let array_path = "/group/array";
61 let shard_shape = vec![4, 8];
62 let inner_chunk_shape = vec![4, 4];
63 let mut sharding_codec_builder =
64 ShardingCodecBuilder::new(inner_chunk_shape.as_slice().try_into()?);
65 sharding_codec_builder.bytes_to_bytes_codecs(vec![
66 #[cfg(feature = "gzip")]
67 Arc::new(codec::GzipCodec::new(5)?),
68 ]);
69 let array = zarrs::array::ArrayBuilder::new(
70 vec![8, 8], // array shape
71 DataType::UInt16,
72 shard_shape.try_into()?,
73 FillValue::from(0u16),
74 )
75 .array_to_bytes_codec(Arc::new(sharding_codec_builder.build()))
76 .dimension_names(["y", "x"].into())
77 // .storage_transformers(vec![].into())
78 .build(store.clone(), array_path)?;
79
80 // Write array metadata to store
81 array.store_metadata()?;
82
83 // The array metadata is
84 println!(
85 "The array metadata is:\n{}\n",
86 array.metadata().to_string_pretty()
87 );
88
89 // Use default codec options (concurrency etc)
90 let options = CodecOptions::default();
91
92 // Write some shards (in parallel)
93 (0..2).into_par_iter().try_for_each(|s| {
94 let chunk_grid = array.chunk_grid();
95 let chunk_indices = vec![s, 0];
96 if let Some(chunk_shape) = chunk_grid.chunk_shape(&chunk_indices, array.shape())? {
97 let chunk_array = ndarray::ArrayD::<u16>::from_shape_fn(
98 chunk_shape
99 .iter()
100 .map(|u| u.get() as usize)
101 .collect::<Vec<_>>(),
102 |ij| {
103 (s * chunk_shape[0].get() * chunk_shape[1].get()
104 + ij[0] as u64 * chunk_shape[1].get()
105 + ij[1] as u64) as u16
106 },
107 );
108 array.store_chunk_ndarray(&chunk_indices, chunk_array)
109 } else {
110 Err(zarrs::array::ArrayError::InvalidChunkGridIndicesError(
111 chunk_indices.to_vec(),
112 ))
113 }
114 })?;
115
116 // Read the whole array
117 let data_all = array.retrieve_array_subset_ndarray::<u16>(&array.subset_all())?;
118 println!("The whole array is:\n{data_all}\n");
119
120 // Read a shard back from the store
121 let shard_indices = vec![1, 0];
122 let data_shard = array.retrieve_chunk_ndarray::<u16>(&shard_indices)?;
123 println!("Shard [1,0] is:\n{data_shard}\n");
124
125 // Read an inner chunk from the store
126 let subset_chunk_1_0 = ArraySubset::new_with_ranges(&[4..8, 0..4]);
127 let data_chunk = array.retrieve_array_subset_ndarray::<u16>(&subset_chunk_1_0)?;
128 println!("Chunk [1,0] is:\n{data_chunk}\n");
129
130 // Read the central 4x2 subset of the array
131 let subset_4x2 = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
132 let data_4x2 = array.retrieve_array_subset_ndarray::<u16>(&subset_4x2)?;
133 println!("The middle 4x2 subset is:\n{data_4x2}\n");
134
135 // Decode inner chunks
136 // In some cases, it might be preferable to decode inner chunks in a shard directly.
137 // If using the partial decoder, then the shard index will only be read once from the store.
138 let partial_decoder = array.partial_decoder(&[0, 0])?;
139 let inner_chunks_to_decode = vec![
140 ArraySubset::new_with_start_shape(vec![0, 0], inner_chunk_shape.clone())?,
141 ArraySubset::new_with_start_shape(vec![0, 4], inner_chunk_shape.clone())?,
142 ];
143 let decoded_inner_chunks_bytes =
144 partial_decoder.partial_decode(&inner_chunks_to_decode, &options)?;
145 println!("Decoded inner chunks:");
146 for (inner_chunk_subset, decoded_inner_chunk) in
147 std::iter::zip(inner_chunks_to_decode, decoded_inner_chunks_bytes)
148 {
149 let ndarray = bytes_to_ndarray::<u16>(
150 &inner_chunk_shape,
151 decoded_inner_chunk.into_fixed()?.into_owned(),
152 )?;
153 println!("{inner_chunk_subset}\n{ndarray}\n");
154 }
155
156 // Show the hierarchy
157 let node = Node::open(&store, "/").unwrap();
158 let tree = node.hierarchy_tree();
159 println!("The Zarr hierarchy tree is:\n{}", tree);
160
161 println!(
162 "The keys in the store are:\n[{}]",
163 store.list().unwrap_or_default().iter().format(", ")
164 );
165
166 Ok(())
167}
9fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
10 use std::sync::Arc;
11 use zarrs::{
12 array::{DataType, FillValue, ZARR_NAN_F32},
13 array_subset::ArraySubset,
14 node::Node,
15 storage::store,
16 };
17
18 // Create a store
19 // let path = tempfile::TempDir::new()?;
20 // let mut store: ReadableWritableListableStorage =
21 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
22 // let mut store: ReadableWritableListableStorage = Arc::new(
23 // zarrs::filesystem::FilesystemStore::new("zarrs/tests/data/array_write_read.zarr")?,
24 // );
25 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
26 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
27 if arg1 == "--usage-log" {
28 let log_writer = Arc::new(std::sync::Mutex::new(
29 // std::io::BufWriter::new(
30 std::io::stdout(),
31 // )
32 ));
33 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
34 chrono::Utc::now().format("[%T%.3f] ").to_string()
35 }));
36 }
37 }
38
39 // Create the root group
40 zarrs::group::GroupBuilder::new()
41 .build(store.clone(), "/")?
42 .store_metadata()?;
43
44 // Create a group with attributes
45 let group_path = "/group";
46 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
47 group
48 .attributes_mut()
49 .insert("foo".into(), serde_json::Value::String("bar".into()));
50 group.store_metadata()?;
51
52 println!(
53 "The group metadata is:\n{}\n",
54 group.metadata().to_string_pretty()
55 );
56
57 // Create an array
58 let array_path = "/group/array";
59 let array = zarrs::array::ArrayBuilder::new(
60 vec![8, 8], // array shape
61 DataType::Float32,
62 vec![4, 4].try_into()?, // regular chunk shape
63 FillValue::from(ZARR_NAN_F32),
64 )
65 // .bytes_to_bytes_codecs(vec![]) // uncompressed
66 .dimension_names(["y", "x"].into())
67 // .storage_transformers(vec![].into())
68 .build(store.clone(), array_path)?;
69
70 // Write array metadata to store
71 array.store_metadata()?;
72
73 println!(
74 "The array metadata is:\n{}\n",
75 array.metadata().to_string_pretty()
76 );
77
78 // Write some chunks
79 (0..2).into_par_iter().try_for_each(|i| {
80 let chunk_indices: Vec<u64> = vec![0, i];
81 let chunk_subset = array
82 .chunk_grid()
83 .subset(&chunk_indices, array.shape())?
84 .ok_or_else(|| {
85 zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
86 })?;
87 array.store_chunk_ndarray(
88 &chunk_indices,
89 ArrayD::<f32>::from_shape_vec(
90 chunk_subset.shape_usize(),
91 vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
92 )
93 .unwrap(),
94 )
95 })?;
96
97 let subset_all = array.subset_all();
98 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
99 println!("store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
100
101 // Store multiple chunks
102 let ndarray_chunks: Array2<f32> = array![
103 [1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
104 [1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
105 [1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
106 [1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,],
107 ];
108 array.store_chunks_ndarray(&ArraySubset::new_with_ranges(&[1..2, 0..2]), ndarray_chunks)?;
109 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
110 println!("store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
111
112 // Write a subset spanning multiple chunks, including updating chunks already written
113 let ndarray_subset: Array2<f32> =
114 array![[-3.3, -3.4, -3.5,], [-4.3, -4.4, -4.5,], [-5.3, -5.4, -5.5],];
115 array.store_array_subset_ndarray(
116 ArraySubset::new_with_ranges(&[3..6, 3..6]).start(),
117 ndarray_subset,
118 )?;
119 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
120 println!("store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
121
122 // Store array subset
123 let ndarray_subset: Array2<f32> = array![
124 [-0.6],
125 [-1.6],
126 [-2.6],
127 [-3.6],
128 [-4.6],
129 [-5.6],
130 [-6.6],
131 [-7.6],
132 ];
133 array.store_array_subset_ndarray(
134 ArraySubset::new_with_ranges(&[0..8, 6..7]).start(),
135 ndarray_subset,
136 )?;
137 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
138 println!("store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
139
140 // Store chunk subset
141 let ndarray_chunk_subset: Array2<f32> = array![[-7.4, -7.5, -7.6, -7.7],];
142 array.store_chunk_subset_ndarray(
143 // chunk indices
144 &[1, 1],
145 // subset within chunk
146 ArraySubset::new_with_ranges(&[3..4, 0..4]).start(),
147 ndarray_chunk_subset,
148 )?;
149 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
150 println!("store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
151
152 // Erase a chunk
153 array.erase_chunk(&[0, 0])?;
154 let data_all = array.retrieve_array_subset_ndarray::<f32>(&subset_all)?;
155 println!("erase_chunk [0, 0]:\n{data_all:+4.1}\n");
156
157 // Read a chunk
158 let chunk_indices = vec![0, 1];
159 let data_chunk = array.retrieve_chunk_ndarray::<f32>(&chunk_indices)?;
160 println!("retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
161
162 // Read chunks
163 let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
164 let data_chunks = array.retrieve_chunks_ndarray::<f32>(&chunks)?;
165 println!("retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
166
167 // Retrieve an array subset
168 let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
169 let data_subset = array.retrieve_array_subset_ndarray::<f32>(&subset)?;
170 println!("retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
171
172 // Show the hierarchy
173 let node = Node::open(&store, "/").unwrap();
174 let tree = node.hierarchy_tree();
175 println!("hierarchy_tree:\n{}", tree);
176
177 Ok(())
178}
8async fn async_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
9 use futures::StreamExt;
10 use std::sync::Arc;
11 use zarrs::{
12 array::{DataType, FillValue, ZARR_NAN_F32},
13 array_subset::ArraySubset,
14 node::Node,
15 };
16
17 // Create a store
18 let mut store: AsyncReadableWritableListableStorage = Arc::new(
19 zarrs_object_store::AsyncObjectStore::new(object_store::memory::InMemory::new()),
20 );
21 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
22 if arg1 == "--usage-log" {
23 let log_writer = Arc::new(std::sync::Mutex::new(
24 // std::io::BufWriter::new(
25 std::io::stdout(),
26 // )
27 ));
28 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
29 chrono::Utc::now().format("[%T%.3f] ").to_string()
30 }));
31 }
32 }
33
34 // Create the root group
35 zarrs::group::GroupBuilder::new()
36 .build(store.clone(), "/")?
37 .async_store_metadata()
38 .await?;
39
40 // Create a group with attributes
41 let group_path = "/group";
42 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
43 group
44 .attributes_mut()
45 .insert("foo".into(), serde_json::Value::String("bar".into()));
46 group.async_store_metadata().await?;
47
48 println!(
49 "The group metadata is:\n{}\n",
50 group.metadata().to_string_pretty()
51 );
52
53 // Create an array
54 let array_path = "/group/array";
55 let array = zarrs::array::ArrayBuilder::new(
56 vec![8, 8], // array shape
57 DataType::Float32,
58 vec![4, 4].try_into()?, // regular chunk shape
59 FillValue::from(ZARR_NAN_F32),
60 )
61 // .bytes_to_bytes_codecs(vec![]) // uncompressed
62 .dimension_names(["y", "x"].into())
63 // .storage_transformers(vec![].into())
64 .build_arc(store.clone(), array_path)?;
65
66 // Write array metadata to store
67 array.async_store_metadata().await?;
68
69 println!(
70 "The array metadata is:\n{}\n",
71 array.metadata().to_string_pretty()
72 );
73
74 // Write some chunks
75 let store_chunk = |i: u64| {
76 let array = array.clone();
77 async move {
78 let chunk_indices: Vec<u64> = vec![0, i];
79 let chunk_subset = array
80 .chunk_grid()
81 .subset(&chunk_indices, array.shape())?
82 .ok_or_else(|| {
83 zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
84 })?;
85 array
86 .async_store_chunk_elements(
87 &chunk_indices,
88 &vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
89 )
90 .await
91 }
92 };
93 futures::stream::iter(0..2)
94 .map(Ok)
95 .try_for_each_concurrent(None, store_chunk)
96 .await?;
97
98 let subset_all = array.subset_all();
99 let data_all = array
100 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
101 .await?;
102 println!("async_store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
103
104 // Store multiple chunks
105 array
106 .async_store_chunks_elements::<f32>(
107 &ArraySubset::new_with_ranges(&[1..2, 0..2]),
108 &[
109 //
110 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
111 //
112 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
113 ],
114 )
115 .await?;
116 let data_all = array
117 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
118 .await?;
119 println!("async_store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
120
121 // Write a subset spanning multiple chunks, including updating chunks already written
122 array
123 .async_store_array_subset_elements::<f32>(
124 &ArraySubset::new_with_ranges(&[3..6, 3..6]),
125 &[-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
126 )
127 .await?;
128 let data_all = array
129 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
130 .await?;
131 println!("async_store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
132
133 // Store array subset
134 array
135 .async_store_array_subset_elements::<f32>(
136 &ArraySubset::new_with_ranges(&[0..8, 6..7]),
137 &[-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
138 )
139 .await?;
140 let data_all = array
141 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
142 .await?;
143 println!("async_store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
144
145 // Store chunk subset
146 array
147 .async_store_chunk_subset_elements::<f32>(
148 // chunk indices
149 &[1, 1],
150 // subset within chunk
151 &ArraySubset::new_with_ranges(&[3..4, 0..4]),
152 &[-7.4, -7.5, -7.6, -7.7],
153 )
154 .await?;
155 let data_all = array
156 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
157 .await?;
158 println!("async_store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
159
160 // Erase a chunk
161 array.async_erase_chunk(&[0, 0]).await?;
162 let data_all = array
163 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
164 .await?;
165 println!("async_erase_chunk [0, 0]:\n{data_all:+4.1}\n");
166
167 // Read a chunk
168 let chunk_indices = vec![0, 1];
169 let data_chunk = array
170 .async_retrieve_chunk_ndarray::<f32>(&chunk_indices)
171 .await?;
172 println!("async_retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
173
174 // Read chunks
175 let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
176 let data_chunks = array.async_retrieve_chunks_ndarray::<f32>(&chunks).await?;
177 println!("async_retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
178
179 // Retrieve an array subset
180 let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
181 let data_subset = array
182 .async_retrieve_array_subset_ndarray::<f32>(&subset)
183 .await?;
184 println!("async_retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
185
186 // Show the hierarchy
187 let node = Node::async_open(store, "/").await.unwrap();
188 let tree = node.hierarchy_tree();
189 println!("hierarchy_tree:\n{}", tree);
190
191 Ok(())
192}
Sourcepub fn storage_transformers(
&mut self,
storage_transformers: StorageTransformerChain,
) -> &mut Self
pub fn storage_transformers( &mut self, storage_transformers: StorageTransformerChain, ) -> &mut Self
Set the storage transformers.
If left unmodified, there are no storage transformers.
Sourcepub fn build<TStorage: ?Sized>(
&self,
storage: Arc<TStorage>,
path: &str,
) -> Result<Array<TStorage>, ArrayCreateError>
pub fn build<TStorage: ?Sized>( &self, storage: Arc<TStorage>, path: &str, ) -> Result<Array<TStorage>, ArrayCreateError>
Build into an Array
.
§Errors
Returns ArrayCreateError
if there is an error creating the array.
This can be due to a storage error, an invalid path, or a problem with array configuration.
Examples found in repository?
153fn main() {
154 let store = std::sync::Arc::new(MemoryStore::default());
155 let array_path = "/array";
156 let array = ArrayBuilder::new(
157 vec![4, 1], // array shape
158 DataType::Extension(Arc::new(CustomDataTypeVariableSize)),
159 vec![3, 1].try_into().unwrap(), // regular chunk shape
160 FillValue::from(vec![]),
161 )
162 .array_to_array_codecs(vec![
163 #[cfg(feature = "transpose")]
164 Arc::new(zarrs::array::codec::TransposeCodec::new(
165 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
166 )),
167 ])
168 .bytes_to_bytes_codecs(vec![
169 #[cfg(feature = "gzip")]
170 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
171 #[cfg(feature = "crc32c")]
172 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
173 ])
174 // .storage_transformers(vec![].into())
175 .build(store, array_path)
176 .unwrap();
177 println!("{}", array.metadata().to_string_pretty());
178
179 let data = [
180 CustomDataTypeVariableSizeElement::from(Some(1.0)),
181 CustomDataTypeVariableSizeElement::from(None),
182 CustomDataTypeVariableSizeElement::from(Some(3.0)),
183 ];
184 array.store_chunk_elements(&[0, 0], &data).unwrap();
185
186 let data = array
187 .retrieve_array_subset_elements::<CustomDataTypeVariableSizeElement>(&array.subset_all())
188 .unwrap();
189
190 assert_eq!(data[0], CustomDataTypeVariableSizeElement::from(Some(1.0)));
191 assert_eq!(data[1], CustomDataTypeVariableSizeElement::from(None));
192 assert_eq!(data[2], CustomDataTypeVariableSizeElement::from(Some(3.0)));
193 assert_eq!(data[3], CustomDataTypeVariableSizeElement::from(None));
194
195 println!("{data:#?}");
196}
More examples
269fn main() {
270 let store = std::sync::Arc::new(MemoryStore::default());
271 let array_path = "/array";
272 let fill_value = CustomDataTypeFixedSizeElement { x: 1, y: 2.3 };
273 let array = ArrayBuilder::new(
274 vec![4, 1], // array shape
275 DataType::Extension(Arc::new(CustomDataTypeFixedSize)),
276 vec![2, 1].try_into().unwrap(), // regular chunk shape
277 FillValue::new(fill_value.to_ne_bytes().to_vec()),
278 )
279 .array_to_array_codecs(vec![
280 #[cfg(feature = "transpose")]
281 Arc::new(zarrs::array::codec::TransposeCodec::new(
282 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
283 )),
284 ])
285 .bytes_to_bytes_codecs(vec![
286 #[cfg(feature = "gzip")]
287 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
288 #[cfg(feature = "crc32c")]
289 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
290 ])
291 // .storage_transformers(vec![].into())
292 .build(store, array_path)
293 .unwrap();
294 println!("{}", array.metadata().to_string_pretty());
295
296 let data = [
297 CustomDataTypeFixedSizeElement { x: 3, y: 4.5 },
298 CustomDataTypeFixedSizeElement { x: 6, y: 7.8 },
299 ];
300 array.store_chunk_elements(&[0, 0], &data).unwrap();
301
302 let data = array
303 .retrieve_array_subset_elements::<CustomDataTypeFixedSizeElement>(&array.subset_all())
304 .unwrap();
305
306 assert_eq!(data[0], CustomDataTypeFixedSizeElement { x: 3, y: 4.5 });
307 assert_eq!(data[1], CustomDataTypeFixedSizeElement { x: 6, y: 7.8 });
308 assert_eq!(data[2], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
309 assert_eq!(data[3], CustomDataTypeFixedSizeElement { x: 1, y: 2.3 });
310
311 println!("{data:#?}");
312}
205fn main() {
206 let store = std::sync::Arc::new(MemoryStore::default());
207 let array_path = "/array";
208 let fill_value = CustomDataTypeUInt12Element::try_from(15).unwrap();
209 let array = ArrayBuilder::new(
210 vec![4096, 1], // array shape
211 DataType::Extension(Arc::new(CustomDataTypeUInt12)),
212 vec![5, 1].try_into().unwrap(), // regular chunk shape
213 FillValue::new(fill_value.to_le_bytes().to_vec()),
214 )
215 .array_to_array_codecs(vec![
216 #[cfg(feature = "transpose")]
217 Arc::new(zarrs::array::codec::TransposeCodec::new(
218 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
219 )),
220 ])
221 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
222 .bytes_to_bytes_codecs(vec![
223 #[cfg(feature = "gzip")]
224 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
225 #[cfg(feature = "crc32c")]
226 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
227 ])
228 // .storage_transformers(vec![].into())
229 .build(store, array_path)
230 .unwrap();
231 println!("{}", array.metadata().to_string_pretty());
232
233 let data: Vec<CustomDataTypeUInt12Element> = (0..4096)
234 .into_iter()
235 .map(|i| CustomDataTypeUInt12Element::try_from(i).unwrap())
236 .collect();
237
238 array
239 .store_array_subset_elements(&array.subset_all(), &data)
240 .unwrap();
241
242 let data = array
243 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(&array.subset_all())
244 .unwrap();
245
246 for i in 0usize..4096 {
247 let element = CustomDataTypeUInt12Element::try_from(i as u64).unwrap();
248 assert_eq!(data[i], element);
249 let element_pd = array
250 .retrieve_array_subset_elements::<CustomDataTypeUInt12Element>(
251 &ArraySubset::new_with_ranges(&[(i as u64)..i as u64 + 1, 0..1]),
252 )
253 .unwrap()[0];
254 assert_eq!(element_pd, element);
255 }
256}
217fn main() {
218 let store = std::sync::Arc::new(MemoryStore::default());
219 let array_path = "/array";
220 let fill_value = CustomDataTypeFloat8e3m4Element::from(1.23);
221 let array = ArrayBuilder::new(
222 vec![6, 1], // array shape
223 DataType::Extension(Arc::new(CustomDataTypeFloat8e3m4)),
224 vec![5, 1].try_into().unwrap(), // regular chunk shape
225 FillValue::new(fill_value.to_ne_bytes().to_vec()),
226 )
227 .array_to_array_codecs(vec![
228 #[cfg(feature = "transpose")]
229 Arc::new(zarrs::array::codec::TransposeCodec::new(
230 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
231 )),
232 ])
233 .bytes_to_bytes_codecs(vec![
234 #[cfg(feature = "gzip")]
235 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
236 #[cfg(feature = "crc32c")]
237 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
238 ])
239 // .storage_transformers(vec![].into())
240 .build(store, array_path)
241 .unwrap();
242 println!("{}", array.metadata().to_string_pretty());
243
244 let data = [
245 CustomDataTypeFloat8e3m4Element::from(2.34),
246 CustomDataTypeFloat8e3m4Element::from(3.45),
247 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY),
248 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY),
249 CustomDataTypeFloat8e3m4Element::from(f32::NAN),
250 ];
251 array.store_chunk_elements(&[0, 0], &data).unwrap();
252
253 let data = array
254 .retrieve_array_subset_elements::<CustomDataTypeFloat8e3m4Element>(&array.subset_all())
255 .unwrap();
256
257 for f in &data {
258 println!(
259 "float8_e3m4: {:08b} f32: {}",
260 f.to_ne_bytes()[0],
261 f.as_f32()
262 );
263 }
264
265 assert_eq!(data[0], CustomDataTypeFloat8e3m4Element::from(2.34));
266 assert_eq!(data[1], CustomDataTypeFloat8e3m4Element::from(3.45));
267 assert_eq!(
268 data[2],
269 CustomDataTypeFloat8e3m4Element::from(f32::INFINITY)
270 );
271 assert_eq!(
272 data[3],
273 CustomDataTypeFloat8e3m4Element::from(f32::NEG_INFINITY)
274 );
275 assert_eq!(data[4], CustomDataTypeFloat8e3m4Element::from(f32::NAN));
276 assert_eq!(data[5], CustomDataTypeFloat8e3m4Element::from(1.23));
277}
203fn main() {
204 let store = std::sync::Arc::new(MemoryStore::default());
205 let array_path = "/array";
206 let fill_value = CustomDataTypeUInt4Element::try_from(15).unwrap();
207 let array = ArrayBuilder::new(
208 vec![6, 1], // array shape
209 DataType::Extension(Arc::new(CustomDataTypeUInt4)),
210 vec![5, 1].try_into().unwrap(), // regular chunk shape
211 FillValue::new(fill_value.to_ne_bytes().to_vec()),
212 )
213 .array_to_array_codecs(vec![
214 #[cfg(feature = "transpose")]
215 Arc::new(zarrs::array::codec::TransposeCodec::new(
216 zarrs::array::codec::array_to_array::transpose::TransposeOrder::new(&[1, 0]).unwrap(),
217 )),
218 ])
219 .array_to_bytes_codec(Arc::new(zarrs::array::codec::PackBitsCodec::default()))
220 .bytes_to_bytes_codecs(vec![
221 #[cfg(feature = "gzip")]
222 Arc::new(zarrs::array::codec::GzipCodec::new(5).unwrap()),
223 #[cfg(feature = "crc32c")]
224 Arc::new(zarrs::array::codec::Crc32cCodec::new()),
225 ])
226 // .storage_transformers(vec![].into())
227 .build(store, array_path)
228 .unwrap();
229 println!("{}", array.metadata().to_string_pretty());
230
231 let data = [
232 CustomDataTypeUInt4Element::try_from(1).unwrap(),
233 CustomDataTypeUInt4Element::try_from(2).unwrap(),
234 CustomDataTypeUInt4Element::try_from(3).unwrap(),
235 CustomDataTypeUInt4Element::try_from(4).unwrap(),
236 CustomDataTypeUInt4Element::try_from(5).unwrap(),
237 ];
238 array.store_chunk_elements(&[0, 0], &data).unwrap();
239
240 let data = array
241 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(&array.subset_all())
242 .unwrap();
243
244 for f in &data {
245 println!("uint4: {:08b} u8: {}", f.as_u8(), f.as_u8());
246 }
247
248 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(1).unwrap());
249 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(2).unwrap());
250 assert_eq!(data[2], CustomDataTypeUInt4Element::try_from(3).unwrap());
251 assert_eq!(data[3], CustomDataTypeUInt4Element::try_from(4).unwrap());
252 assert_eq!(data[4], CustomDataTypeUInt4Element::try_from(5).unwrap());
253 assert_eq!(data[5], CustomDataTypeUInt4Element::try_from(15).unwrap());
254
255 let data = array
256 .retrieve_array_subset_elements::<CustomDataTypeUInt4Element>(
257 &ArraySubset::new_with_ranges(&[1..3, 0..1]),
258 )
259 .unwrap();
260 assert_eq!(data[0], CustomDataTypeUInt4Element::try_from(2).unwrap());
261 assert_eq!(data[1], CustomDataTypeUInt4Element::try_from(3).unwrap());
262}
10fn array_write_read() -> Result<(), Box<dyn std::error::Error>> {
11 use std::sync::Arc;
12 use zarrs::{
13 array::{DataType, FillValue},
14 array_subset::ArraySubset,
15 storage::store,
16 };
17
18 // Create a store
19 // let path = tempfile::TempDir::new()?;
20 // let mut store: ReadableWritableListableStorage =
21 // Arc::new(zarrs::filesystem::FilesystemStore::new(path.path())?);
22 // let mut store: ReadableWritableListableStorage = Arc::new(
23 // zarrs::filesystem::FilesystemStore::new("zarrs/tests/data/array_write_read.zarr")?,
24 // );
25 let mut store: ReadableWritableListableStorage = Arc::new(store::MemoryStore::new());
26 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
27 if arg1 == "--usage-log" {
28 let log_writer = Arc::new(std::sync::Mutex::new(
29 // std::io::BufWriter::new(
30 std::io::stdout(),
31 // )
32 ));
33 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
34 chrono::Utc::now().format("[%T%.3f] ").to_string()
35 }));
36 }
37 }
38
39 // Create the root group
40 zarrs::group::GroupBuilder::new()
41 .build(store.clone(), "/")?
42 .store_metadata()?;
43
44 // Create a group with attributes
45 let group_path = "/group";
46 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
47 group
48 .attributes_mut()
49 .insert("foo".into(), serde_json::Value::String("bar".into()));
50 group.store_metadata()?;
51
52 println!(
53 "The group metadata is:\n{}\n",
54 group.metadata().to_string_pretty()
55 );
56
57 // Create an array
58 let array_path = "/group/array";
59 let array = zarrs::array::ArrayBuilder::new(
60 vec![4, 4], // array shape
61 DataType::String,
62 vec![2, 2].try_into()?, // regular chunk shape
63 FillValue::from("_"),
64 )
65 // .bytes_to_bytes_codecs(vec![]) // uncompressed
66 .dimension_names(["y", "x"].into())
67 // .storage_transformers(vec![].into())
68 .build(store.clone(), array_path)?;
69
70 // Write array metadata to store
71 array.store_metadata()?;
72
73 println!(
74 "The array metadata is:\n{}\n",
75 array.metadata().to_string_pretty()
76 );
77
78 // Write some chunks
79 array.store_chunk_ndarray(
80 &[0, 0],
81 ArrayD::<&str>::from_shape_vec(vec![2, 2], vec!["a", "bb", "ccc", "dddd"]).unwrap(),
82 )?;
83 array.store_chunk_ndarray(
84 &[0, 1],
85 ArrayD::<&str>::from_shape_vec(vec![2, 2], vec!["4444", "333", "22", "1"]).unwrap(),
86 )?;
87 let subset_all = array.subset_all();
88 let data_all = array.retrieve_array_subset_ndarray::<String>(&subset_all)?;
89 println!("store_chunk [0, 0] and [0, 1]:\n{data_all}\n");
90
91 // Write a subset spanning multiple chunks, including updating chunks already written
92 let ndarray_subset: Array2<&str> = array![["!", "@@"], ["###", "$$$$"]];
93 array.store_array_subset_ndarray(
94 ArraySubset::new_with_ranges(&[1..3, 1..3]).start(),
95 ndarray_subset,
96 )?;
97 let data_all = array.retrieve_array_subset_ndarray::<String>(&subset_all)?;
98 println!("store_array_subset [1..3, 1..3]:\nndarray::ArrayD<String>\n{data_all}");
99
100 // Retrieve bytes directly, convert into a single string allocation, create a &str ndarray
101 // TODO: Add a convenience function for this?
102 let data_all = array.retrieve_array_subset(&subset_all)?;
103 let (bytes, offsets) = data_all.into_variable()?;
104 let string = String::from_utf8(bytes.into_owned())?;
105 let elements = offsets
106 .iter()
107 .tuple_windows()
108 .map(|(&curr, &next)| &string[curr..next])
109 .collect::<Vec<&str>>();
110 let ndarray = ArrayD::<&str>::from_shape_vec(subset_all.shape_usize(), elements)?;
111 println!("ndarray::ArrayD<&str>:\n{ndarray}");
112
113 Ok(())
114}
Sourcepub fn build_arc<TStorage: ?Sized>(
&self,
storage: Arc<TStorage>,
path: &str,
) -> Result<Arc<Array<TStorage>>, ArrayCreateError>
pub fn build_arc<TStorage: ?Sized>( &self, storage: Arc<TStorage>, path: &str, ) -> Result<Arc<Array<TStorage>>, ArrayCreateError>
Build into an Arc<Array>
.
§Errors
Returns ArrayCreateError
if there is an error creating the array.
This can be due to a storage error, an invalid path, or a problem with array configuration.
Examples found in repository?
8async fn async_array_write_read() -> Result<(), Box<dyn std::error::Error>> {
9 use futures::StreamExt;
10 use std::sync::Arc;
11 use zarrs::{
12 array::{DataType, FillValue, ZARR_NAN_F32},
13 array_subset::ArraySubset,
14 node::Node,
15 };
16
17 // Create a store
18 let mut store: AsyncReadableWritableListableStorage = Arc::new(
19 zarrs_object_store::AsyncObjectStore::new(object_store::memory::InMemory::new()),
20 );
21 if let Some(arg1) = std::env::args().collect::<Vec<_>>().get(1) {
22 if arg1 == "--usage-log" {
23 let log_writer = Arc::new(std::sync::Mutex::new(
24 // std::io::BufWriter::new(
25 std::io::stdout(),
26 // )
27 ));
28 store = Arc::new(UsageLogStorageAdapter::new(store, log_writer, || {
29 chrono::Utc::now().format("[%T%.3f] ").to_string()
30 }));
31 }
32 }
33
34 // Create the root group
35 zarrs::group::GroupBuilder::new()
36 .build(store.clone(), "/")?
37 .async_store_metadata()
38 .await?;
39
40 // Create a group with attributes
41 let group_path = "/group";
42 let mut group = zarrs::group::GroupBuilder::new().build(store.clone(), group_path)?;
43 group
44 .attributes_mut()
45 .insert("foo".into(), serde_json::Value::String("bar".into()));
46 group.async_store_metadata().await?;
47
48 println!(
49 "The group metadata is:\n{}\n",
50 group.metadata().to_string_pretty()
51 );
52
53 // Create an array
54 let array_path = "/group/array";
55 let array = zarrs::array::ArrayBuilder::new(
56 vec![8, 8], // array shape
57 DataType::Float32,
58 vec![4, 4].try_into()?, // regular chunk shape
59 FillValue::from(ZARR_NAN_F32),
60 )
61 // .bytes_to_bytes_codecs(vec![]) // uncompressed
62 .dimension_names(["y", "x"].into())
63 // .storage_transformers(vec![].into())
64 .build_arc(store.clone(), array_path)?;
65
66 // Write array metadata to store
67 array.async_store_metadata().await?;
68
69 println!(
70 "The array metadata is:\n{}\n",
71 array.metadata().to_string_pretty()
72 );
73
74 // Write some chunks
75 let store_chunk = |i: u64| {
76 let array = array.clone();
77 async move {
78 let chunk_indices: Vec<u64> = vec![0, i];
79 let chunk_subset = array
80 .chunk_grid()
81 .subset(&chunk_indices, array.shape())?
82 .ok_or_else(|| {
83 zarrs::array::ArrayError::InvalidChunkGridIndicesError(chunk_indices.to_vec())
84 })?;
85 array
86 .async_store_chunk_elements(
87 &chunk_indices,
88 &vec![i as f32 * 0.1; chunk_subset.num_elements() as usize],
89 )
90 .await
91 }
92 };
93 futures::stream::iter(0..2)
94 .map(Ok)
95 .try_for_each_concurrent(None, store_chunk)
96 .await?;
97
98 let subset_all = array.subset_all();
99 let data_all = array
100 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
101 .await?;
102 println!("async_store_chunk [0, 0] and [0, 1]:\n{data_all:+4.1}\n");
103
104 // Store multiple chunks
105 array
106 .async_store_chunks_elements::<f32>(
107 &ArraySubset::new_with_ranges(&[1..2, 0..2]),
108 &[
109 //
110 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
111 //
112 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1, 1.0, 1.0, 1.0, 1.0, 1.1, 1.1, 1.1, 1.1,
113 ],
114 )
115 .await?;
116 let data_all = array
117 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
118 .await?;
119 println!("async_store_chunks [1..2, 0..2]:\n{data_all:+4.1}\n");
120
121 // Write a subset spanning multiple chunks, including updating chunks already written
122 array
123 .async_store_array_subset_elements::<f32>(
124 &ArraySubset::new_with_ranges(&[3..6, 3..6]),
125 &[-3.3, -3.4, -3.5, -4.3, -4.4, -4.5, -5.3, -5.4, -5.5],
126 )
127 .await?;
128 let data_all = array
129 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
130 .await?;
131 println!("async_store_array_subset [3..6, 3..6]:\n{data_all:+4.1}\n");
132
133 // Store array subset
134 array
135 .async_store_array_subset_elements::<f32>(
136 &ArraySubset::new_with_ranges(&[0..8, 6..7]),
137 &[-0.6, -1.6, -2.6, -3.6, -4.6, -5.6, -6.6, -7.6],
138 )
139 .await?;
140 let data_all = array
141 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
142 .await?;
143 println!("async_store_array_subset [0..8, 6..7]:\n{data_all:+4.1}\n");
144
145 // Store chunk subset
146 array
147 .async_store_chunk_subset_elements::<f32>(
148 // chunk indices
149 &[1, 1],
150 // subset within chunk
151 &ArraySubset::new_with_ranges(&[3..4, 0..4]),
152 &[-7.4, -7.5, -7.6, -7.7],
153 )
154 .await?;
155 let data_all = array
156 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
157 .await?;
158 println!("async_store_chunk_subset [3..4, 0..4] of chunk [1, 1]:\n{data_all:+4.1}\n");
159
160 // Erase a chunk
161 array.async_erase_chunk(&[0, 0]).await?;
162 let data_all = array
163 .async_retrieve_array_subset_ndarray::<f32>(&subset_all)
164 .await?;
165 println!("async_erase_chunk [0, 0]:\n{data_all:+4.1}\n");
166
167 // Read a chunk
168 let chunk_indices = vec![0, 1];
169 let data_chunk = array
170 .async_retrieve_chunk_ndarray::<f32>(&chunk_indices)
171 .await?;
172 println!("async_retrieve_chunk [0, 1]:\n{data_chunk:+4.1}\n");
173
174 // Read chunks
175 let chunks = ArraySubset::new_with_ranges(&[0..2, 1..2]);
176 let data_chunks = array.async_retrieve_chunks_ndarray::<f32>(&chunks).await?;
177 println!("async_retrieve_chunks [0..2, 1..2]:\n{data_chunks:+4.1}\n");
178
179 // Retrieve an array subset
180 let subset = ArraySubset::new_with_ranges(&[2..6, 3..5]); // the center 4x2 region
181 let data_subset = array
182 .async_retrieve_array_subset_ndarray::<f32>(&subset)
183 .await?;
184 println!("async_retrieve_array_subset [2..6, 3..5]:\n{data_subset:+4.1}\n");
185
186 // Show the hierarchy
187 let node = Node::async_open(store, "/").await.unwrap();
188 let tree = node.hierarchy_tree();
189 println!("hierarchy_tree:\n{}", tree);
190
191 Ok(())
192}
Trait Implementations§
Auto Trait Implementations§
impl Freeze for ArrayBuilder
impl !RefUnwindSafe for ArrayBuilder
impl Send for ArrayBuilder
impl Sync for ArrayBuilder
impl Unpin for ArrayBuilder
impl !UnwindSafe for ArrayBuilder
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read more