anndata_zarr/
lib.rs

1use anndata::{
2    backend::*,
3    data::{DynArray, DynCowArray, SelectInfoBounds, SelectInfoElem, SelectInfoElemBounds, Shape},
4};
5
6use anyhow::{bail, Context, Result};
7use ndarray::{Array, ArrayD, ArrayView, CowArray, Dimension, IxDyn, SliceInfoElem};
8use std::{
9    borrow::Cow, ops::{Deref, Index}, path::{Path, PathBuf}
10};
11use std::{sync::Arc, vec};
12use zarrs::{array::{data_type::DataType, Element}, array_subset::ArraySubset, storage::StorePrefix};
13use zarrs::filesystem::FilesystemStore;
14use zarrs::group::Group;
15use zarrs::{array::ElementOwned, storage::ReadableWritableListableStorageTraits};
16use zarrs::array::codec::bytes_to_bytes::zstd::ZstdCodec;
17
18/// The Zarr backend.
19pub struct Zarr;
20
21#[derive(Clone)]
22pub struct ZarrStore {
23    inner: Arc<dyn ReadableWritableListableStorageTraits>,
24    path: PathBuf,
25}
26
27impl Deref for ZarrStore {
28    type Target = Arc<dyn ReadableWritableListableStorageTraits>;
29
30    fn deref(&self) -> &Self::Target {
31        &self.inner
32    }
33}
34
35pub struct ZarrGroup {
36    group: Group<dyn ReadableWritableListableStorageTraits>,
37    store: ZarrStore,
38}
39
40pub struct ZarrDataset {
41    dataset: zarrs::array::Array<dyn ReadableWritableListableStorageTraits>,
42    store: ZarrStore,
43}
44
45impl Backend for Zarr {
46    const NAME: &'static str = "zarr";
47
48    type Store = ZarrStore;
49
50    type Group = ZarrGroup;
51
52    /// datasets contain arrays.
53    type Dataset = ZarrDataset;
54
55    fn new<P: AsRef<Path>>(path: P) -> Result<Self::Store> {
56        if path.as_ref().try_exists()? {
57            let metadata = std::fs::metadata(&path)?;
58            if metadata.is_file() {
59                std::fs::remove_file(&path)?;
60            } else {
61                std::fs::remove_dir_all(&path)?;
62            }
63        }
64
65        let inner = Arc::new(FilesystemStore::new(path.as_ref())?);
66        zarrs::group::GroupBuilder::new().build(inner.clone(), "/")?.store_metadata()?;
67        Ok(ZarrStore {
68            path: path.as_ref().to_path_buf(),
69            inner,
70        })
71    }
72
73    /// Opens a file as read-only, file must exist.
74    fn open<P: AsRef<Path>>(path: P) -> Result<Self::Store> {
75        Ok(ZarrStore {
76            path: path.as_ref().to_path_buf(),
77            inner: Arc::new(FilesystemStore::new(path)?),
78        })
79    }
80
81    /// Opens a file as read/write, file must exist.
82    fn open_rw<P: AsRef<Path>>(path: P) -> Result<Self::Store> {
83        Ok(ZarrStore {
84            path: path.as_ref().to_path_buf(),
85            inner: Arc::new(FilesystemStore::new(path)?),
86        })
87    }
88}
89
90impl StoreOp<Zarr> for ZarrStore {
91    /// Returns the file path.
92    fn filename(&self) -> PathBuf {
93        self.path.clone()
94    }
95
96    /// Close the file.
97    fn close(self) -> Result<()> {
98        drop(self);
99        Ok(())
100    }
101}
102
103impl GroupOp<Zarr> for ZarrStore {
104    /// List all groups and datasets in this group.
105    fn list(&self) -> Result<Vec<String>> {
106        let result = self.list_dir(&StorePrefix::root())?;
107        Ok(result.prefixes().into_iter().map(|x| x.as_str().trim_end_matches("/").to_string()).collect())
108    }
109
110    /// Create a new group.
111    fn new_group(&self, name: &str) -> Result<<Zarr as Backend>::Group> {
112        let path = canoincalize_path(name);
113        let group = zarrs::group::GroupBuilder::new().build(self.inner.clone(), &path)?;
114        group.store_metadata()?;
115        Ok(ZarrGroup {
116            group,
117            store: self.clone(),
118        })
119    }
120
121    /// Open an existing group.
122    fn open_group(&self, name: &str) -> Result<<Zarr as Backend>::Group> {
123        let group = zarrs::group::Group::open(self.inner.clone(), &canoincalize_path(name))?;
124        Ok(ZarrGroup {
125            group,
126            store: self.clone(),
127        })
128    }
129
130    /// Create an empty dataset holding an array value.
131    fn new_empty_dataset<T: BackendData>(
132        &self,
133        name: &str,
134        shape: &Shape,
135        config: WriteConfig,
136    ) -> Result<<Zarr as Backend>::Dataset> {
137        let path = canoincalize_path(name);
138        let shape = shape.as_ref();
139        let sizes: Vec<u64> = match config.block_size {
140            Some(s) => s.as_ref().into_iter().map(|x| (*x).max(1) as u64).collect(),
141            _ => {
142                if shape.len() == 1 {
143                    vec![shape[0].min(10000).max(1) as u64]
144                } else {
145                    shape.iter().map(|&x| x.min(100).max(1) as u64).collect()
146                }
147            }
148        };
149        let chunk_size = zarrs::array::chunk_grid::ChunkGrid::new(
150            zarrs::array::chunk_grid::regular::RegularChunkGrid::new(sizes.try_into().unwrap()),
151        );
152
153        let (datatype, fill) = match T::DTYPE {
154            ScalarType::U8 => (DataType::UInt8, 0u8.into()),
155            ScalarType::U16 => (DataType::UInt16, 0u16.into()),
156            ScalarType::U32 => (DataType::UInt32, 0u32.into()),
157            ScalarType::U64 => (DataType::UInt64, 0u64.into()),
158            ScalarType::I8 => (DataType::Int8, 0i8.into()),
159            ScalarType::I16 => (DataType::Int16, 0i16.into()),
160            ScalarType::I32 => (DataType::Int32, 0i32.into()),
161            ScalarType::I64 => (DataType::Int64, 0i64.into()),
162            ScalarType::F32 => (DataType::Float32, zarrs::array::ZARR_NAN_F32.into()),
163            ScalarType::F64 => (DataType::Float64, zarrs::array::ZARR_NAN_F64.into()),
164            ScalarType::Bool => (DataType::Bool, false.into()),
165            ScalarType::String => (DataType::String, "".into()),
166        };
167
168        let array = zarrs::array::ArrayBuilder::new(
169            shape.iter().map(|x| *x as u64).collect(),
170            datatype,
171            chunk_size,
172            fill,
173        )
174        .bytes_to_bytes_codecs(vec![
175            Arc::new(ZstdCodec::new(7, false))
176        ])
177        .build(self.inner.clone(), &path)?;
178        array.store_metadata()?;
179        Ok(ZarrDataset {
180            dataset: array,
181            store: self.clone(),
182        })
183    }
184
185    fn open_dataset(&self, name: &str) -> Result<<Zarr as Backend>::Dataset> {
186        let array = zarrs::array::Array::open(self.inner.clone(), &canoincalize_path(name))?;
187        Ok(ZarrDataset {
188            dataset: array,
189            store: self.clone(),
190        })
191    }
192
193    /// Delete a group or dataset.
194    fn delete(&self, name: &str) -> Result<()> {
195        self.inner.erase_prefix(&str_to_prefix(name))?;
196        Ok(())
197    }
198
199    /// Check if a group or dataset exists.
200    fn exists(&self, name: &str) -> Result<bool> {
201        let path = format!("/{}", name);
202        Ok(zarrs::node::node_exists(
203            &self.inner,
204            &path.as_str().try_into()?,
205        )?)
206    }
207}
208
209impl GroupOp<Zarr> for ZarrGroup {
210    fn list(&self) -> Result<Vec<String>> {
211        let current_path = str_to_prefix(self.group.path().as_str());
212        let result = self
213            .store
214            .list_dir(&current_path.as_str().try_into()?)?
215            .prefixes()
216            .into_iter()
217            .map(|x| x.as_str().strip_prefix(current_path.as_str()).unwrap().strip_suffix("/").unwrap().to_owned())
218            .collect();
219        Ok(result)
220    }
221
222    /// Create a new group.
223    fn new_group(&self, name: &str) -> Result<<Zarr as Backend>::Group> {
224        let path = self.group.path().as_path().join(name);
225        let group = zarrs::group::GroupBuilder::new().build(self.store.inner.clone(), path.to_str().unwrap())?;
226        group.store_metadata()?;
227        Ok(ZarrGroup {
228            group,
229            store: self.store.clone(),
230        })
231    }
232
233    /// Open an existing group.
234    fn open_group(&self, name: &str) -> Result<<Zarr as Backend>::Group> {
235        let path = self.group.path().as_path().join(name);
236        let group = zarrs::group::Group::open(self.store.inner.clone(), path.to_str().unwrap())?;
237        Ok(ZarrGroup {
238            group,
239            store: self.store.clone(),
240        })
241    }
242
243    /// Create an empty dataset holding an array value.
244    fn new_empty_dataset<T: BackendData>(
245        &self,
246        name: &str,
247        shape: &Shape,
248        config: WriteConfig,
249    ) -> Result<<Zarr as Backend>::Dataset> {
250        let shape = shape.as_ref();
251        let sizes: Vec<u64> = match config.block_size {
252            Some(s) => s.as_ref().into_iter().map(|x| (*x).max(1) as u64).collect(),
253            _ => {
254                if shape.len() == 1 {
255                    vec![shape[0].min(20000).max(1) as u64]
256                } else {
257                    shape.iter().map(|&x| x.min(500).max(1) as u64).collect()
258                }
259            }
260        };
261        let chunk_size = zarrs::array::chunk_grid::ChunkGrid::new(
262            zarrs::array::chunk_grid::regular::RegularChunkGrid::new(sizes.try_into().unwrap()),
263        );
264
265        let (datatype, fill) = match T::DTYPE {
266            ScalarType::U8 => (DataType::UInt8, 0u8.into()),
267            ScalarType::U16 => (DataType::UInt16, 0u16.into()),
268            ScalarType::U32 => (DataType::UInt32, 0u32.into()),
269            ScalarType::U64 => (DataType::UInt64, 0u64.into()),
270            ScalarType::I8 => (DataType::Int8, 0i8.into()),
271            ScalarType::I16 => (DataType::Int16, 0i16.into()),
272            ScalarType::I32 => (DataType::Int32, 0i32.into()),
273            ScalarType::I64 => (DataType::Int64, 0i64.into()),
274            ScalarType::F32 => (DataType::Float32, zarrs::array::ZARR_NAN_F32.into()),
275            ScalarType::F64 => (DataType::Float64, zarrs::array::ZARR_NAN_F64.into()),
276            ScalarType::Bool => (DataType::Bool, false.into()),
277            ScalarType::String => (DataType::String, "".into()),
278        };
279
280        let path = self.group.path().as_path().join(name);
281        let array = zarrs::array::ArrayBuilder::new(
282            shape.iter().map(|x| *x as u64).collect(),
283            datatype,
284            chunk_size,
285            fill,
286        )
287        .bytes_to_bytes_codecs(vec![
288            Arc::new(ZstdCodec::new(7, false))
289        ])
290        .build(self.store.inner.clone(), path.to_str().unwrap())?;
291        array.store_metadata()?;
292        Ok(ZarrDataset {
293            dataset: array,
294            store: self.store.clone(),
295        })
296    }
297
298    fn open_dataset(&self, name: &str) -> Result<<Zarr as Backend>::Dataset> {
299        let path = self.group.path().as_path().join(name);
300        let array = zarrs::array::Array::open(self.store.inner.clone(), path.to_str().unwrap())?;
301        Ok(ZarrDataset {
302            dataset: array,
303            store: self.store.clone(),
304        })
305    }
306
307    /// Delete a group or dataset.
308    fn delete(&self, name: &str) -> Result<()> {
309        let path = format!("{}/{}", self.group.path().as_str(), name);
310        self.store.erase_prefix(&str_to_prefix(&path))?;
311        Ok(())
312    }
313
314    /// Check if a group or dataset exists.
315    fn exists(&self, name: &str) -> Result<bool> {
316        let path = self
317            .group
318            .path()
319            .as_path()
320            .join(name)
321            .as_os_str()
322            .to_str()
323            .unwrap()
324            .try_into()?;
325        Ok(zarrs::node::node_exists(&self.store.inner, &path)?)
326    }
327}
328
329impl AttributeOp<Zarr> for ZarrGroup {
330    /// Returns the Root.
331    fn store(&self) -> Result<<Zarr as Backend>::Store> {
332        Ok(self.store.clone())
333    }
334
335    /// Returns the path of the location relative to the file root.
336    fn path(&self) -> PathBuf {
337        self.group.path().as_path().to_path_buf()
338    }
339
340    /// Write an attribute at a given location.
341    fn new_json_attr(&mut self, name: &str, value: &Value) -> Result<()> {
342        self.group.attributes_mut().insert(name.to_string(), value.clone());
343        self.group.store_metadata()?;
344        Ok(())
345    }
346
347    fn get_json_attr(&self, name: &str) -> Result<Value> {
348        Ok(self
349            .group
350            .attributes()
351            .get(name)
352            .with_context(|| format!("Attribute {} not found", name))?.clone()
353        )
354    }
355}
356
357impl AttributeOp<Zarr> for ZarrDataset {
358    /// Returns the Root.
359    fn store(&self) -> Result<<Zarr as Backend>::Store> {
360        Ok(self.store.clone())
361    }
362
363    /// Returns the path of the location relative to the file root.
364    fn path(&self) -> PathBuf {
365        self.dataset.path().as_path().to_path_buf()
366    }
367
368    /// Write an attribute at a given location.
369    fn new_json_attr(&mut self, name: &str, value: &Value) -> Result<()> {
370        self.dataset.attributes_mut().insert(name.to_string(), value.clone());
371        self.dataset.store_metadata()?;
372        Ok(())
373    }
374
375    fn get_json_attr(&self, name: &str) -> Result<Value> {
376        Ok(self
377            .dataset
378            .attributes()
379            .get(name)
380            .with_context(|| format!("Attribute {} not found", name))?.clone()
381        )
382    }
383}
384
385impl DatasetOp<Zarr> for ZarrDataset {
386    fn dtype(&self) -> Result<ScalarType> {
387        match self.dataset.data_type() {
388            DataType::UInt8 => Ok(ScalarType::U8),
389            DataType::UInt16 => Ok(ScalarType::U16),
390            DataType::UInt32 => Ok(ScalarType::U32),
391            DataType::UInt64 => Ok(ScalarType::U64),
392            DataType::Int8 => Ok(ScalarType::I8),
393            DataType::Int16 => Ok(ScalarType::I16),
394            DataType::Int32 => Ok(ScalarType::I32),
395            DataType::Int64 => Ok(ScalarType::I64),
396            DataType::Float32 => Ok(ScalarType::F32),
397            DataType::Float64 => Ok(ScalarType::F64),
398            DataType::Bool => Ok(ScalarType::Bool),
399            DataType::String => Ok(ScalarType::String),
400            ty => bail!("Unsupported type: {:?}", ty),
401        }
402    }
403
404    fn shape(&self) -> Shape {
405        self.dataset
406            .shape()
407            .into_iter()
408            .map(|x| *x as usize)
409            .collect()
410    }
411
412    fn reshape(&mut self, shape: &Shape) -> Result<()> {
413        self.dataset
414            .set_shape(shape.as_ref().iter().map(|x| *x as u64).collect());
415        self.dataset.store_metadata()?;
416        Ok(())
417    }
418
419    /// TODO: current implementation reads the entire array and then selects the slice.
420    fn read_array_slice<T: BackendData, S, D>(&self, selection: &[S]) -> Result<Array<T, D>>
421    where
422        S: AsRef<SelectInfoElem>,
423        D: Dimension,
424    {
425        fn read_arr<T, S, D>(dataset: &ZarrDataset, selection: &[S]) -> Result<Array<T, D>>
426        where
427            T: ElementOwned + BackendData,
428            S: AsRef<SelectInfoElem>,
429            D: Dimension,
430        {
431            let sel = SelectInfoBounds::new(&selection, &dataset.shape());
432            if let Some(subset) = to_array_subset(sel) {
433                let arr = dataset
434                    .dataset
435                    .retrieve_array_subset_ndarray(&subset)?
436                    .into_dimensionality::<D>()?;
437                Ok(arr)
438            } else {
439                // Read the entire array and then select the slice.
440                let arr = dataset
441                    .dataset
442                    .retrieve_array_subset_ndarray(&dataset.dataset.subset_all())?
443                    .into_dimensionality::<D>()?;
444                Ok(select(arr.view(), selection))
445            }
446        }
447
448        let array: DynArray = match T::DTYPE {
449            ScalarType::U8 => read_arr::<u8, _, D>(self, selection)?.into(),
450            ScalarType::U16 => read_arr::<u16, _, D>(self, selection)?.into(),
451            ScalarType::U32 => read_arr::<u32, _, D>(self, selection)?.into(),
452            ScalarType::U64 => read_arr::<u64, _, D>(self, selection)?.into(),
453            ScalarType::I8 => read_arr::<i8, _, D>(self, selection)?.into(),
454            ScalarType::I16 => read_arr::<i16, _, D>(self, selection)?.into(),
455            ScalarType::I32 => read_arr::<i32, _, D>(self, selection)?.into(),
456            ScalarType::I64 => read_arr::<i64, _, D>(self, selection)?.into(),
457            ScalarType::F32 => read_arr::<f32, _, D>(self, selection)?.into(),
458            ScalarType::F64 => read_arr::<f64, _, D>(self, selection)?.into(),
459            ScalarType::Bool => read_arr::<bool, _, D>(self, selection)?.into(),
460            ScalarType::String => read_arr::<String, _, D>(self, selection)?.into(),
461        };
462        Ok(BackendData::from_dyn_arr(array)?.into_dimensionality::<D>()?)
463    }
464
465    fn write_array_slice<S, T, D>(&self, arr: CowArray<'_, T, D>, selection: &[S]) -> Result<()>
466    where
467        T: BackendData,
468        S: AsRef<SelectInfoElem>,
469        D: Dimension,
470    {
471        fn write_array_impl<T, S>(
472            container: &ZarrDataset,
473            arr: CowArray<'_, T, IxDyn>,
474            selection: &[S],
475        ) -> Result<()>
476        where
477            T: Element + 'static,
478            S: AsRef<SelectInfoElem>,
479        {
480            let selection = SelectInfoBounds::new(&selection, &container.shape());
481            let starts: Vec<_> = selection
482                .iter()
483                .flat_map(|x| {
484                    if let SelectInfoElemBounds::Slice(slice) = x {
485                        if slice.step == 1 {
486                            Some(slice.start as u64)
487                        } else {
488                            None
489                        }
490                    } else {
491                        None
492                    }
493                })
494                .collect();
495            if starts.len() == selection.ndim() {
496                container
497                    .dataset
498                    .store_array_subset_ndarray(starts.as_slice(), arr.into_owned())?;
499            } else {
500                panic!("Not implemented");
501            }
502            Ok(())
503        }
504
505        match BackendData::into_dyn_arr(arr.into_dyn()) {
506            DynCowArray::U8(x) => write_array_impl(self, x, selection),
507            DynCowArray::U16(x) => write_array_impl(self, x, selection),
508            DynCowArray::U32(x) => write_array_impl(self, x, selection),
509            DynCowArray::U64(x) => write_array_impl(self, x, selection),
510            DynCowArray::I8(x) => write_array_impl(self, x, selection),
511            DynCowArray::I16(x) => write_array_impl(self, x, selection),
512            DynCowArray::I32(x) => write_array_impl(self, x, selection),
513            DynCowArray::I64(x) => write_array_impl(self, x, selection),
514            DynCowArray::F32(x) => write_array_impl(self, x, selection),
515            DynCowArray::F64(x) => write_array_impl(self, x, selection),
516            DynCowArray::Bool(x) => write_array_impl(self, x, selection),
517            DynCowArray::String(x) => write_array_impl(self, x, selection),
518        }
519    }
520}
521
522fn select<'a, S, T, D>(arr: ArrayView<'a, T, D>, info: &[S]) -> Array<T, D>
523where
524    S: AsRef<SelectInfoElem>,
525    T: Clone,
526    D: Dimension,
527{
528    let arr = arr.into_dyn();
529    let slices = info
530        .as_ref()
531        .into_iter()
532        .map(|x| match x.as_ref() {
533            SelectInfoElem::Slice(slice) => Some(SliceInfoElem::from(slice.clone())),
534            _ => None,
535        })
536        .collect::<Option<Vec<_>>>();
537    if let Some(slices) = slices {
538        arr.slice(slices.as_slice()).into_owned()
539    } else {
540        let shape = arr.shape();
541        let select: Vec<_> = info
542            .as_ref()
543            .into_iter()
544            .zip(shape)
545            .map(|(x, n)| SelectInfoElemBounds::new(x.as_ref(), *n))
546            .collect();
547        let new_shape = select.iter().map(|x| x.len()).collect::<Vec<_>>();
548        ArrayD::from_shape_fn(new_shape, |idx| {
549            let new_idx: Vec<_> = (0..idx.ndim())
550                .into_iter()
551                .map(|i| select[i].index(idx[i]))
552                .collect();
553            arr.index(new_idx.as_slice()).clone()
554        })
555    }
556    .into_dimensionality::<D>()
557    .unwrap()
558}
559
560fn str_to_prefix(s: &str) -> StorePrefix {
561    if s.is_empty() {
562        StorePrefix::root()
563    } else {
564        let s = s.trim_matches('/').to_string();
565        StorePrefix::new((s + "/").as_str()).unwrap()
566    }
567}
568
569fn canoincalize_path<'a>(path: &'a str) -> Cow<'a, str> {
570    if path.starts_with("/") {
571        path.into()
572    } else {
573        format!("/{}", path).into()
574    }
575}
576
577fn to_array_subset(info: SelectInfoBounds) -> Option<ArraySubset> {
578    let ranges = info.iter().map(|x| {
579        if let SelectInfoElemBounds::Slice(slice) = x {
580            if slice.step == 1 {
581                Some(slice.start as u64 .. slice.end as u64)
582            } else {
583                None
584            }
585        } else {
586            None
587        }
588    })
589    .collect::<Option<Vec<_>>>()?;
590    Some(ArraySubset::new_with_ranges(&ranges))
591}
592
593
594
595/// test module
596#[cfg(test)]
597mod tests {
598    use super::*;
599    use anndata::s;
600    use ndarray::{array, concatenate, Array2, Axis, Ix2};
601    use ndarray_rand::rand_distr::Uniform;
602    use ndarray_rand::RandomExt;
603    use std::path::PathBuf;
604    use tempfile::tempdir;
605
606    pub fn with_tmp_dir<T, F: FnMut(PathBuf) -> T>(mut func: F) -> T {
607        let dir = tempdir().unwrap();
608        let path = dir.path().to_path_buf();
609        func(path)
610    }
611
612    fn with_tmp_path<T, F: Fn(PathBuf) -> T>(func: F) -> T {
613        with_tmp_dir(|dir| func(dir.join("temp")))
614    }
615
616    #[test]
617    fn test_basic() -> Result<()> {
618        with_tmp_path(|path| {
619            let store = Zarr::new(&path)?;
620            store.open_group("/")?;
621
622            store.new_scalar_dataset("data", &4)?;
623            store.open_dataset("data")?;
624
625            let group = store.new_group("group")?;
626            assert!(store.exists("group")?);
627
628            let subgroup = group.new_group("group")?;
629            assert!(group.exists("group")?);
630
631            let subsubgroup = subgroup.new_group("group")?;
632            assert!(subgroup.exists("group")?);
633
634            let data = subsubgroup.new_scalar_dataset("group", &4)?;
635            assert!(subsubgroup.exists("group")?);
636            subsubgroup.open_dataset("group")?;
637
638            {
639                let store = Zarr::open(&path)?;
640                DataContainer::open(&store, "group")?;
641            }
642
643            assert_eq!(group.path(), PathBuf::from("/group"));
644            assert_eq!(subgroup.path(), PathBuf::from("/group/group"));
645            assert_eq!(subsubgroup.path(), PathBuf::from("/group/group/group"));
646            assert_eq!(data.path(), PathBuf::from("/group/group/group/group"));
647            Ok(())
648        })
649    }
650
651    #[test]
652    fn test_write_empty() -> Result<()> {
653        with_tmp_path(|path| {
654            let store = Zarr::new(&path)?;
655            let group = store.new_group("group")?;
656            let config = WriteConfig {
657                ..Default::default()
658            };
659
660            let empty: Array2<i64> = array![[]];
661            let dataset = group.new_array_dataset("test", empty.view().into(), config)?;
662            assert_eq!(empty, dataset.read_array::<i64, Ix2>()?);
663            Ok(())
664        })
665    }
666
667    #[test]
668    fn test_write_slice() -> Result<()> {
669        let store = Zarr::new("test_zarr")?;
670        let config = WriteConfig {
671            block_size: Some(vec![2, 2].as_slice().into()),
672            ..Default::default()
673        };
674
675        let group = store.new_group("group")?;
676        let mut dataset = group.new_empty_dataset::<i32>("test", &[20, 50].as_slice().into(), config)?;
677
678        let arr = Array::random((10, 10), Uniform::new(0, 100));
679        dataset.write_array_slice(arr.view().into(), s![5..15, 10..20].as_ref())?;
680        assert_eq!(
681            arr,
682            dataset.read_array_slice::<i32, _, _>(s![5..15, 10..20].as_ref())?
683        );
684
685        // Repeatitive writes
686        let arr = Array::random((20, 50), Uniform::new(0, 100));
687        dataset.write_array_slice(arr.view().into(), s![.., ..].as_ref())?;
688        dataset.write_array_slice(arr.view().into(), s![.., ..].as_ref())?;
689
690        // Out-of-bounds writes should fail
691        //assert!(dataset.write_array_slice(&arr, s![20..40, ..].as_ref()).is_err());
692
693        // Reshape and write
694        dataset.reshape(&[40, 50].as_slice().into())?;
695        dataset.write_array_slice(arr.view().into(), s![20..40, ..].as_ref())?;
696
697        // Read back is OK
698        let merged = concatenate(Axis(0), &[arr.view(), arr.view()])?;
699        assert_eq!(merged, dataset.read_array::<i32, _>()?);
700
701        // Shrinking is OK
702        dataset.reshape(&[20, 50].as_slice().into())?;
703        assert_eq!(arr, dataset.read_array::<i32, _>()?);
704
705        dataset.reshape(&[50, 50].as_slice().into())?;
706        assert_eq!(
707            [50, 50],
708            store
709                .open_group("group")?
710                .open_dataset("test")?
711                .shape()
712                .as_ref(),
713        );
714
715        assert_eq!(vec!["group"], store.list()?);
716        assert_eq!(vec!["test"], group.list()?);
717
718        assert!(store.exists("group")?);
719        assert!(group.exists("test")?);
720
721        store.delete("group")?;
722        assert!(!store.exists("group")?);
723        assert!(!group.exists("test")?);
724
725        Ok(())
726    }
727}