Skip to main content

ferray_ma/
masked_array.rs

1// ferray-ma: MaskedArray<T, D> type (REQ-1, REQ-2, REQ-3)
2
3use std::sync::{Arc, OnceLock};
4
5use ferray_core::Array;
6use ferray_core::dimension::Dimension;
7use ferray_core::dtype::Element;
8use ferray_core::error::{FerrayError, FerrayResult};
9
10/// A masked array that pairs data with a boolean mask.
11///
12/// Each element position has a corresponding mask bit:
13/// - `true` means the element is **masked** (invalid / missing)
14/// - `false` means the element is valid
15///
16/// All operations (arithmetic, reductions, ufuncs) respect the mask by
17/// skipping masked elements.
18///
19/// The `fill_value` field is the replacement value for masked positions when
20/// the masked array participates in operations or when [`MaskedArray::filled`]
21/// is called without an explicit override. It defaults to `T::zero()`.
22///
23/// # Nomask sentinel (#506)
24///
25/// When a [`MaskedArray`] is constructed via [`MaskedArray::from_data`]
26/// the mask is logically "all-false" but is NOT allocated as a full
27/// `Array<bool, D>` up front — the lazy `OnceLock` inside stores
28/// nothing until the first call to [`MaskedArray::mask`]. For arrays
29/// that never touch their mask (e.g. masked ops that short-circuit
30/// via [`MaskedArray::has_real_mask`]), this saves a full bool-sized
31/// allocation proportional to the data size.
32///
33/// The `.mask()` accessor still returns `&Array<bool, D>` so all
34/// existing code continues to work unchanged; the cost is one
35/// lazy allocation on first access. Hot-path code that wants to
36/// avoid the materialization should check `has_real_mask()` first
37/// and skip any mask work when it returns `false`.
38pub struct MaskedArray<T: Element, D: Dimension> {
39    /// The underlying data array.
40    data: Array<T, D>,
41    /// Boolean mask (`true` = masked/invalid). Lazily materialized
42    /// when explicitly queried via [`MaskedArray::mask`] — a
43    /// `from_data`-constructed array with no masked elements pays
44    /// zero allocation cost until that first query.
45    ///
46    /// Wrapped in `Arc` for structural sharing across clones (#512):
47    /// cloning a `MaskedArray` bumps the Arc refcount instead of
48    /// deep-copying the mask, and any mutation that needs a unique
49    /// mask does copy-on-write via [`Self::make_mask_unique`].
50    mask: Arc<OnceLock<Array<bool, D>>>,
51    /// `true` when a non-trivial mask has been explicitly provided
52    /// (via [`MaskedArray::new`] or [`MaskedArray::set_mask`]),
53    /// `false` when the array is in the nomask-sentinel state.
54    ///
55    /// Hot-path consumers should branch on this flag and skip the
56    /// mask-iteration entirely when it is `false` — see
57    /// [`MaskedArray::has_real_mask`].
58    real_mask: bool,
59    /// Whether the mask is hardened (cannot be cleared by assignment).
60    pub(crate) hard_mask: bool,
61    /// Replacement value for masked positions during operations and filling.
62    /// Defaults to `T::zero()`.
63    pub(crate) fill_value: T,
64}
65
66impl<T: Element, D: Dimension> std::fmt::Debug for MaskedArray<T, D> {
67    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68        f.debug_struct("MaskedArray")
69            .field("data", &self.data)
70            .field("real_mask", &self.real_mask)
71            .field("hard_mask", &self.hard_mask)
72            .field("fill_value", &self.fill_value)
73            .finish_non_exhaustive()
74    }
75}
76
77impl<T: Element + Clone, D: Dimension> Clone for MaskedArray<T, D> {
78    fn clone(&self) -> Self {
79        // Structural sharing (#512): just bump the Arc refcount instead
80        // of cloning the underlying mask array. Copy-on-write kicks in
81        // via `make_mask_unique` whenever either the parent or the
82        // clone tries to mutate its mask.
83        //
84        // The data array is still deep-cloned because ferray-core's
85        // Array doesn't have Arc-based structural sharing; sharing the
86        // mask alone still saves the larger-of-the-two allocations in
87        // the common "unmasked data transformations" path.
88        Self {
89            data: self.data.clone(),
90            mask: Arc::clone(&self.mask),
91            real_mask: self.real_mask,
92            hard_mask: self.hard_mask,
93            fill_value: self.fill_value.clone(),
94        }
95    }
96}
97
98impl<T: Element, D: Dimension> MaskedArray<T, D> {
99    /// Create a new masked array from data and mask arrays.
100    ///
101    /// The `fill_value` defaults to `T::zero()`. Use [`MaskedArray::with_fill_value`]
102    /// to set a custom replacement value.
103    ///
104    /// # Errors
105    /// Returns `FerrayError::ShapeMismatch` if data and mask shapes differ.
106    pub fn new(data: Array<T, D>, mask: Array<bool, D>) -> FerrayResult<Self> {
107        if data.shape() != mask.shape() {
108            return Err(FerrayError::shape_mismatch(format!(
109                "MaskedArray::new: data shape {:?} does not match mask shape {:?}",
110                data.shape(),
111                mask.shape()
112            )));
113        }
114        let lock = OnceLock::new();
115        let _ = lock.set(mask);
116        Ok(Self {
117            data,
118            mask: Arc::new(lock),
119            real_mask: true,
120            hard_mask: false,
121            fill_value: T::zero(),
122        })
123    }
124
125    /// Create a masked array with no masked elements (all-false mask).
126    ///
127    /// Does NOT allocate the mask up front — the array is in the
128    /// nomask-sentinel state (#506) until [`MaskedArray::mask`] is
129    /// explicitly called. For code that only uses `data()` or
130    /// short-circuits via [`MaskedArray::has_real_mask`], this saves
131    /// a full-sized bool allocation.
132    ///
133    /// # Errors
134    /// Always returns `Ok` — the `FerrayResult` is preserved for API
135    /// parity with the previous eager implementation.
136    pub fn from_data(data: Array<T, D>) -> FerrayResult<Self> {
137        Ok(Self {
138            data,
139            mask: Arc::new(OnceLock::new()),
140            real_mask: false,
141            hard_mask: false,
142            fill_value: T::zero(),
143        })
144    }
145
146    /// Return `true` if this masked array holds a real (explicitly
147    /// provided or materialized) mask. Returns `false` when the array
148    /// is in the nomask-sentinel state and the mask is logically
149    /// all-false.
150    ///
151    /// Hot-path iteration code should branch on this flag to skip
152    /// mask scanning entirely when it returns `false` (#506).
153    #[inline]
154    pub const fn has_real_mask(&self) -> bool {
155        self.real_mask
156    }
157
158    /// Return the fill value used to replace masked positions.
159    ///
160    /// See [`MaskedArray::with_fill_value`] for setting it.
161    #[inline]
162    pub const fn fill_value(&self) -> T
163    where
164        T: Copy,
165    {
166        self.fill_value
167    }
168
169    /// Set the fill value, returning the modified array.
170    ///
171    /// The fill value is used by [`MaskedArray::filled`] (when called
172    /// without an explicit override) and by arithmetic operations as the
173    /// replacement for masked positions in the result data.
174    #[must_use]
175    pub fn with_fill_value(mut self, fill_value: T) -> Self {
176        self.fill_value = fill_value;
177        self
178    }
179
180    /// Replace the fill value in place.
181    pub fn set_fill_value(&mut self, fill_value: T) {
182        self.fill_value = fill_value;
183    }
184
185    /// Return a reference to the underlying data array.
186    #[inline]
187    pub const fn data(&self) -> &Array<T, D> {
188        &self.data
189    }
190
191    /// Return a reference to the mask array.
192    ///
193    /// If the array is in the nomask-sentinel state (constructed via
194    /// [`MaskedArray::from_data`] or otherwise) this lazily allocates
195    /// a full all-false `Array<bool, D>` and caches it for subsequent
196    /// calls. Use [`MaskedArray::has_real_mask`] to check whether the
197    /// mask is known to be trivial first, and skip calling `.mask()`
198    /// entirely on the hot path when you can.
199    pub fn mask(&self) -> &Array<bool, D> {
200        self.mask.get_or_init(|| {
201            Array::<bool, D>::from_elem(self.data.dim().clone(), false)
202                .expect("from_elem with matching dim cannot fail")
203        })
204    }
205
206    /// Return a reference to the mask array if one has been
207    /// materialized, or `None` when the array is still in the
208    /// nomask-sentinel state.
209    ///
210    /// Unlike [`MaskedArray::mask`], this does NOT trigger lazy
211    /// allocation — it's the fast-path query for hot code that
212    /// wants to branch on whether any mask bits are set (#506).
213    #[inline]
214    pub fn mask_opt(&self) -> Option<&Array<bool, D>> {
215        if self.real_mask {
216            // A real mask was set via `new` or `set_mask`; the
217            // OnceLock is guaranteed to be initialized.
218            self.mask.get()
219        } else {
220            None
221        }
222    }
223
224    /// Return a mutable element slice into the underlying data array.
225    ///
226    /// The masked-array invariant (data shape == mask shape) requires
227    /// the data length to remain fixed. Returning `&mut [T]` here
228    /// (instead of `&mut Array<T, D>`) lets callers update individual
229    /// element values while blocking any reshape/resize that would
230    /// break the invariant (#273). Returns `None` for non-contiguous
231    /// data layouts where a flat slice can't be exposed.
232    #[inline]
233    pub fn data_mut(&mut self) -> Option<&mut [T]> {
234        self.data.as_slice_mut()
235    }
236
237    /// Return the shape of the masked array.
238    #[inline]
239    pub fn shape(&self) -> &[usize] {
240        self.data.shape()
241    }
242
243    /// Return the number of dimensions.
244    #[inline]
245    pub fn ndim(&self) -> usize {
246        self.data.ndim()
247    }
248
249    /// Return the total number of elements (including masked).
250    #[inline]
251    pub fn size(&self) -> usize {
252        self.data.size()
253    }
254
255    /// Return the dimension descriptor.
256    #[inline]
257    pub const fn dim(&self) -> &D {
258        self.data.dim()
259    }
260
261    /// Return whether the mask is hardened.
262    #[inline]
263    pub const fn is_hard_mask(&self) -> bool {
264        self.hard_mask
265    }
266
267    /// Internal helper: force the lazy nomask sentinel to materialize a
268    /// concrete `Array<bool, D>` AND ensure the mask's `Arc` is
269    /// uniquely owned (copy-on-write), then return a mutable reference
270    /// to the inner mask.
271    ///
272    /// After this call `real_mask` is `true`, `self.mask` is guaranteed
273    /// to contain an initialized `Array<bool, D>`, and the underlying
274    /// `Arc` has refcount exactly 1 so it's safe to mutate without
275    /// aliasing any other `MaskedArray` that may have cloned from us.
276    fn ensure_materialized_mut(&mut self) -> &mut Array<bool, D> {
277        // Step 1: materialize if we're still in the nomask sentinel
278        // state. We install a fresh Arc<OnceLock> containing an
279        // all-false mask.
280        if !self.real_mask || self.mask.get().is_none() {
281            let fresh = Array::<bool, D>::from_elem(self.data.dim().clone(), false)
282                .expect("from_elem with matching dim cannot fail");
283            let lock = OnceLock::new();
284            let _ = lock.set(fresh);
285            self.mask = Arc::new(lock);
286            self.real_mask = true;
287        }
288
289        // Step 2: copy-on-write — if this Arc is shared with any
290        // clones, deep-copy the inner mask into a fresh Arc so our
291        // mutation doesn't affect the clones. Arc::get_mut returns
292        // None when refcount > 1.
293        if Arc::get_mut(&mut self.mask).is_none() {
294            let cloned_mask = self
295                .mask
296                .get()
297                .expect("real_mask implies OnceLock set")
298                .clone();
299            let new_lock = OnceLock::new();
300            let _ = new_lock.set(cloned_mask);
301            self.mask = Arc::new(new_lock);
302        }
303
304        // Step 3: now we're the unique owner — get_mut on the OnceLock
305        // for the inner Array<bool, D>.
306        Arc::get_mut(&mut self.mask)
307            .expect("just made the Arc unique above")
308            .get_mut()
309            .expect("OnceLock was initialized above")
310    }
311
312    /// Set a mask value at a flat index.
313    ///
314    /// If the mask is hardened, only `true` (masking) is allowed; attempts to
315    /// clear a mask bit are silently ignored.
316    ///
317    /// Setting a mask bit materializes the lazy nomask sentinel into a
318    /// real mask array (#506) — if you set even one bit, the full
319    /// `Array<bool, D>` is allocated.
320    ///
321    /// # Errors
322    /// Returns `FerrayError::IndexOutOfBounds` if `flat_idx >= size`.
323    pub fn set_mask_flat(&mut self, flat_idx: usize, value: bool) -> FerrayResult<()> {
324        let size = self.size();
325        if flat_idx >= size {
326            return Err(FerrayError::index_out_of_bounds(flat_idx as isize, 0, size));
327        }
328        if self.hard_mask && !value {
329            // Hard mask: cannot clear mask bits
330            return Ok(());
331        }
332        // Setting a nomask-sentinel to false is a no-op (mask is
333        // already logically all-false); skip the allocation entirely.
334        if !self.real_mask && !value {
335            return Ok(());
336        }
337        let mask = self.ensure_materialized_mut();
338        // Fast path: contiguous mask — direct O(1) slice indexing.
339        if let Some(slice) = mask.as_slice_mut() {
340            slice[flat_idx] = value;
341        } else {
342            // Non-contiguous: fall back to iterator (rare case).
343            if let Some(m) = mask.iter_mut().nth(flat_idx) {
344                *m = value;
345            }
346        }
347        Ok(())
348    }
349
350    /// Replace the mask with a new one.
351    ///
352    /// If the mask is hardened, only bits that are `true` in both the old and
353    /// new masks (or newly set to `true`) are allowed; cleared bits are ignored.
354    ///
355    /// Passing a new mask always materializes the array out of the
356    /// nomask-sentinel state — the stored mask becomes the provided
357    /// one (possibly unioned with the existing mask if hardened).
358    ///
359    /// # Errors
360    /// Returns `FerrayError::ShapeMismatch` if shapes differ.
361    pub fn set_mask(&mut self, new_mask: Array<bool, D>) -> FerrayResult<()> {
362        if self.data.shape() != new_mask.shape() {
363            return Err(FerrayError::shape_mismatch(format!(
364                "set_mask: mask shape {:?} does not match array shape {:?}",
365                new_mask.shape(),
366                self.data.shape()
367            )));
368        }
369        if self.hard_mask && self.real_mask {
370            // Hard-mask union: merge the new mask with the existing
371            // one, keeping any `true` bits and never clearing.
372            let existing = self.mask.get().expect("real_mask implies OnceLock set");
373            let merged: Vec<bool> = existing
374                .iter()
375                .zip(new_mask.iter())
376                .map(|(old, new)| *old || *new)
377                .collect();
378            let merged_arr = Array::from_vec(self.data.dim().clone(), merged)?;
379            let lock = OnceLock::new();
380            let _ = lock.set(merged_arr);
381            // Install a fresh Arc; any clones keep their own snapshot.
382            self.mask = Arc::new(lock);
383        } else {
384            // Either not hardened or currently in the nomask sentinel
385            // state — unconditionally install the new mask in a fresh
386            // Arc (copy-on-write: clones remain unaffected).
387            let lock = OnceLock::new();
388            let _ = lock.set(new_mask);
389            self.mask = Arc::new(lock);
390        }
391        self.real_mask = true;
392        Ok(())
393    }
394
395    /// Return `true` when this masked array's underlying mask is
396    /// structurally shared with at least one other `MaskedArray`.
397    ///
398    /// After a `clone()` the original and the clone share the same
399    /// mask via `Arc` until one of them mutates it (copy-on-write, #512).
400    /// Hot-path code can use this to reason about memory sharing —
401    /// `shares_mask() == false` means the mask is uniquely owned and
402    /// can be mutated without affecting any other `MaskedArray`.
403    #[inline]
404    pub fn shares_mask(&self) -> bool {
405        Arc::strong_count(&self.mask) > 1
406    }
407}
408
409#[cfg(test)]
410mod tests {
411    use super::*;
412    use ferray_core::Array;
413    use ferray_core::dimension::Ix1;
414
415    fn arr_f64(data: Vec<f64>) -> Array<f64, Ix1> {
416        let n = data.len();
417        Array::<f64, Ix1>::from_vec(Ix1::new([n]), data).unwrap()
418    }
419
420    fn arr_bool(data: Vec<bool>) -> Array<bool, Ix1> {
421        let n = data.len();
422        Array::<bool, Ix1>::from_vec(Ix1::new([n]), data).unwrap()
423    }
424
425    // ---- nomask sentinel (#506) ----
426
427    #[test]
428    fn from_data_starts_in_nomask_sentinel_state() {
429        let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
430        assert!(!ma.has_real_mask());
431        assert!(ma.mask_opt().is_none());
432    }
433
434    #[test]
435    fn new_with_explicit_mask_is_real_mask() {
436        let ma = MaskedArray::new(
437            arr_f64(vec![1.0, 2.0, 3.0]),
438            arr_bool(vec![false, true, false]),
439        )
440        .unwrap();
441        assert!(ma.has_real_mask());
442        assert!(ma.mask_opt().is_some());
443    }
444
445    #[test]
446    fn mask_accessor_lazily_materializes_nomask_sentinel() {
447        let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
448        // Before calling .mask(), the OnceLock is empty.
449        assert!(ma.mask_opt().is_none());
450        // After calling .mask(), we get a full all-false Array<bool, D>.
451        let m = ma.mask();
452        assert_eq!(m.shape(), &[3]);
453        assert_eq!(
454            m.iter().copied().collect::<Vec<_>>(),
455            vec![false, false, false]
456        );
457        // Subsequent calls return the same cached array (no re-alloc).
458        let m2 = ma.mask();
459        assert_eq!(std::ptr::from_ref(m), std::ptr::from_ref(m2));
460        // BUT `has_real_mask` still reports `false` — the lazy
461        // materialization doesn't promote the sentinel to a "real" mask
462        // because the contents are still logically all-false. Hot-path
463        // code can keep skipping.
464        assert!(!ma.has_real_mask());
465    }
466
467    #[test]
468    fn set_mask_flat_false_on_nomask_stays_zero_allocation() {
469        // Setting a position to false on a nomask-sentinel array is a
470        // no-op and should NOT materialize the mask.
471        let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
472        ma.set_mask_flat(1, false).unwrap();
473        assert!(!ma.has_real_mask());
474        assert!(ma.mask_opt().is_none());
475    }
476
477    #[test]
478    fn set_mask_flat_true_on_nomask_materializes_and_promotes() {
479        // Setting a position to true forces materialization and
480        // promotes `real_mask` to true.
481        let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
482        ma.set_mask_flat(1, true).unwrap();
483        assert!(ma.has_real_mask());
484        let m: Vec<bool> = ma.mask().iter().copied().collect();
485        assert_eq!(m, vec![false, true, false]);
486    }
487
488    #[test]
489    fn set_mask_promotes_and_keeps_provided_values() {
490        let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
491        assert!(!ma.has_real_mask());
492        ma.set_mask(arr_bool(vec![true, false, true])).unwrap();
493        assert!(ma.has_real_mask());
494        assert_eq!(
495            ma.mask().iter().copied().collect::<Vec<_>>(),
496            vec![true, false, true]
497        );
498    }
499
500    #[test]
501    fn set_mask_shape_mismatch_errors() {
502        let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
503        assert!(ma.set_mask(arr_bool(vec![false; 4])).is_err());
504    }
505
506    #[test]
507    fn clone_preserves_nomask_sentinel_state() {
508        let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
509        let cloned = ma;
510        assert!(!cloned.has_real_mask());
511        assert!(cloned.mask_opt().is_none());
512    }
513
514    #[test]
515    fn clone_after_materialization_copies_the_mask() {
516        let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
517        // Force materialization.
518        let _ = ma.mask();
519        let cloned = ma;
520        // The clone has the same mask contents (all-false).
521        assert_eq!(
522            cloned.mask().iter().copied().collect::<Vec<_>>(),
523            vec![false, false, false]
524        );
525    }
526
527    #[test]
528    fn clone_preserves_real_mask_state() {
529        let ma = MaskedArray::new(
530            arr_f64(vec![1.0, 2.0, 3.0]),
531            arr_bool(vec![false, true, false]),
532        )
533        .unwrap();
534        let cloned = ma;
535        assert!(cloned.has_real_mask());
536        assert_eq!(
537            cloned.mask().iter().copied().collect::<Vec<_>>(),
538            vec![false, true, false]
539        );
540    }
541
542    // ---- shared mask with copy-on-write (#512) ----
543
544    #[test]
545    fn clone_shares_mask_via_arc() {
546        let ma = MaskedArray::new(
547            arr_f64(vec![1.0, 2.0, 3.0]),
548            arr_bool(vec![false, true, false]),
549        )
550        .unwrap();
551        let cloned = ma.clone();
552        // Both copies should report structural sharing.
553        assert!(ma.shares_mask());
554        assert!(cloned.shares_mask());
555    }
556
557    #[test]
558    fn unique_masked_array_does_not_share() {
559        let ma = MaskedArray::new(
560            arr_f64(vec![1.0, 2.0, 3.0]),
561            arr_bool(vec![false, true, false]),
562        )
563        .unwrap();
564        assert!(!ma.shares_mask());
565    }
566
567    #[test]
568    fn copy_on_write_isolates_parent_from_child_mutation() {
569        // Clone, then mutate the mask of the clone. The parent's mask
570        // must be unchanged even though they started sharing an Arc.
571        let parent = MaskedArray::new(
572            arr_f64(vec![1.0, 2.0, 3.0]),
573            arr_bool(vec![false, false, false]),
574        )
575        .unwrap();
576        let mut child = parent.clone();
577        assert!(parent.shares_mask());
578        assert!(child.shares_mask());
579
580        // Mutate the child — triggers copy-on-write.
581        child.set_mask_flat(1, true).unwrap();
582
583        // Parent's mask is still the original all-false.
584        assert_eq!(
585            parent.mask().iter().copied().collect::<Vec<_>>(),
586            vec![false, false, false]
587        );
588        // Child's mask reflects the mutation.
589        assert_eq!(
590            child.mask().iter().copied().collect::<Vec<_>>(),
591            vec![false, true, false]
592        );
593
594        // Parent no longer shares (the child's CoW broke the Arc's
595        // dual ownership by installing its own).
596        assert!(!parent.shares_mask());
597        assert!(!child.shares_mask());
598    }
599
600    #[test]
601    fn copy_on_write_via_set_mask() {
602        // set_mask replaces the Arc entirely, which also implicitly
603        // isolates the two.
604        let parent = MaskedArray::new(
605            arr_f64(vec![1.0, 2.0, 3.0]),
606            arr_bool(vec![false, false, false]),
607        )
608        .unwrap();
609        let mut child = parent.clone();
610        assert!(parent.shares_mask());
611
612        child.set_mask(arr_bool(vec![true, true, true])).unwrap();
613        // Parent still has the original mask.
614        assert_eq!(
615            parent.mask().iter().copied().collect::<Vec<_>>(),
616            vec![false, false, false]
617        );
618        // Child has the new mask.
619        assert_eq!(
620            child.mask().iter().copied().collect::<Vec<_>>(),
621            vec![true, true, true]
622        );
623        assert!(!parent.shares_mask());
624    }
625
626    #[test]
627    fn nomask_sentinel_clones_share_empty_arc() {
628        // A from_data-constructed array in the nomask-sentinel state
629        // still uses an Arc; clones share it.
630        let parent = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
631        let cloned = parent.clone();
632        assert!(parent.shares_mask());
633        assert!(cloned.shares_mask());
634        // Neither has a real mask yet.
635        assert!(!parent.has_real_mask());
636        assert!(!cloned.has_real_mask());
637    }
638
639    #[test]
640    fn hard_mask_union_on_real_mask() {
641        let mut ma = MaskedArray::new(
642            arr_f64(vec![1.0, 2.0, 3.0]),
643            arr_bool(vec![true, false, false]),
644        )
645        .unwrap();
646        ma.harden_mask().unwrap();
647        // Try to clear position 0 and set position 2. With a hard
648        // mask, the union keeps position 0's true bit.
649        ma.set_mask(arr_bool(vec![false, false, true])).unwrap();
650        assert_eq!(
651            ma.mask().iter().copied().collect::<Vec<_>>(),
652            vec![true, false, true]
653        );
654    }
655}