ferray_ma/masked_array.rs
1// ferray-ma: MaskedArray<T, D> type (REQ-1, REQ-2, REQ-3)
2
3use std::sync::{Arc, OnceLock};
4
5use ferray_core::Array;
6use ferray_core::dimension::Dimension;
7use ferray_core::dtype::Element;
8use ferray_core::error::{FerrayError, FerrayResult};
9
10/// A masked array that pairs data with a boolean mask.
11///
12/// Each element position has a corresponding mask bit:
13/// - `true` means the element is **masked** (invalid / missing)
14/// - `false` means the element is valid
15///
16/// All operations (arithmetic, reductions, ufuncs) respect the mask by
17/// skipping masked elements.
18///
19/// The `fill_value` field is the replacement value for masked positions when
20/// the masked array participates in operations or when [`MaskedArray::filled`]
21/// is called without an explicit override. It defaults to `T::zero()`.
22///
23/// # Nomask sentinel (#506)
24///
25/// When a [`MaskedArray`] is constructed via [`MaskedArray::from_data`]
26/// the mask is logically "all-false" but is NOT allocated as a full
27/// `Array<bool, D>` up front — the lazy `OnceLock` inside stores
28/// nothing until the first call to [`MaskedArray::mask`]. For arrays
29/// that never touch their mask (e.g. masked ops that short-circuit
30/// via [`MaskedArray::has_real_mask`]), this saves a full bool-sized
31/// allocation proportional to the data size.
32///
33/// The `.mask()` accessor still returns `&Array<bool, D>` so all
34/// existing code continues to work unchanged; the cost is one
35/// lazy allocation on first access. Hot-path code that wants to
36/// avoid the materialization should check `has_real_mask()` first
37/// and skip any mask work when it returns `false`.
38pub struct MaskedArray<T: Element, D: Dimension> {
39 /// The underlying data array.
40 data: Array<T, D>,
41 /// Boolean mask (`true` = masked/invalid). Lazily materialized
42 /// when explicitly queried via [`MaskedArray::mask`] — a
43 /// `from_data`-constructed array with no masked elements pays
44 /// zero allocation cost until that first query.
45 ///
46 /// Wrapped in `Arc` for structural sharing across clones (#512):
47 /// cloning a `MaskedArray` bumps the Arc refcount instead of
48 /// deep-copying the mask, and any mutation that needs a unique
49 /// mask does copy-on-write via [`Self::make_mask_unique`].
50 mask: Arc<OnceLock<Array<bool, D>>>,
51 /// `true` when a non-trivial mask has been explicitly provided
52 /// (via [`MaskedArray::new`] or [`MaskedArray::set_mask`]),
53 /// `false` when the array is in the nomask-sentinel state.
54 ///
55 /// Hot-path consumers should branch on this flag and skip the
56 /// mask-iteration entirely when it is `false` — see
57 /// [`MaskedArray::has_real_mask`].
58 real_mask: bool,
59 /// Whether the mask is hardened (cannot be cleared by assignment).
60 pub(crate) hard_mask: bool,
61 /// Replacement value for masked positions during operations and filling.
62 /// Defaults to `T::zero()`.
63 pub(crate) fill_value: T,
64}
65
66impl<T: Element, D: Dimension> std::fmt::Debug for MaskedArray<T, D> {
67 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68 f.debug_struct("MaskedArray")
69 .field("data", &self.data)
70 .field("real_mask", &self.real_mask)
71 .field("hard_mask", &self.hard_mask)
72 .field("fill_value", &self.fill_value)
73 .finish_non_exhaustive()
74 }
75}
76
77impl<T: Element + Clone, D: Dimension> Clone for MaskedArray<T, D> {
78 fn clone(&self) -> Self {
79 // Structural sharing (#512): just bump the Arc refcount instead
80 // of cloning the underlying mask array. Copy-on-write kicks in
81 // via `make_mask_unique` whenever either the parent or the
82 // clone tries to mutate its mask.
83 //
84 // The data array is still deep-cloned because ferray-core's
85 // Array doesn't have Arc-based structural sharing; sharing the
86 // mask alone still saves the larger-of-the-two allocations in
87 // the common "unmasked data transformations" path.
88 Self {
89 data: self.data.clone(),
90 mask: Arc::clone(&self.mask),
91 real_mask: self.real_mask,
92 hard_mask: self.hard_mask,
93 fill_value: self.fill_value.clone(),
94 }
95 }
96}
97
98impl<T: Element, D: Dimension> MaskedArray<T, D> {
99 /// Create a new masked array from data and mask arrays.
100 ///
101 /// The `fill_value` defaults to `T::zero()`. Use [`MaskedArray::with_fill_value`]
102 /// to set a custom replacement value.
103 ///
104 /// # Errors
105 /// Returns `FerrayError::ShapeMismatch` if data and mask shapes differ.
106 pub fn new(data: Array<T, D>, mask: Array<bool, D>) -> FerrayResult<Self> {
107 if data.shape() != mask.shape() {
108 return Err(FerrayError::shape_mismatch(format!(
109 "MaskedArray::new: data shape {:?} does not match mask shape {:?}",
110 data.shape(),
111 mask.shape()
112 )));
113 }
114 let lock = OnceLock::new();
115 let _ = lock.set(mask);
116 Ok(Self {
117 data,
118 mask: Arc::new(lock),
119 real_mask: true,
120 hard_mask: false,
121 fill_value: T::zero(),
122 })
123 }
124
125 /// Create a masked array with no masked elements (all-false mask).
126 ///
127 /// Does NOT allocate the mask up front — the array is in the
128 /// nomask-sentinel state (#506) until [`MaskedArray::mask`] is
129 /// explicitly called. For code that only uses `data()` or
130 /// short-circuits via [`MaskedArray::has_real_mask`], this saves
131 /// a full-sized bool allocation.
132 ///
133 /// # Errors
134 /// Always returns `Ok` — the `FerrayResult` is preserved for API
135 /// parity with the previous eager implementation.
136 pub fn from_data(data: Array<T, D>) -> FerrayResult<Self> {
137 Ok(Self {
138 data,
139 mask: Arc::new(OnceLock::new()),
140 real_mask: false,
141 hard_mask: false,
142 fill_value: T::zero(),
143 })
144 }
145
146 /// Return `true` if this masked array holds a real (explicitly
147 /// provided or materialized) mask. Returns `false` when the array
148 /// is in the nomask-sentinel state and the mask is logically
149 /// all-false.
150 ///
151 /// Hot-path iteration code should branch on this flag to skip
152 /// mask scanning entirely when it returns `false` (#506).
153 #[inline]
154 pub const fn has_real_mask(&self) -> bool {
155 self.real_mask
156 }
157
158 /// Return the fill value used to replace masked positions.
159 ///
160 /// See [`MaskedArray::with_fill_value`] for setting it.
161 #[inline]
162 pub const fn fill_value(&self) -> T
163 where
164 T: Copy,
165 {
166 self.fill_value
167 }
168
169 /// Set the fill value, returning the modified array.
170 ///
171 /// The fill value is used by [`MaskedArray::filled`] (when called
172 /// without an explicit override) and by arithmetic operations as the
173 /// replacement for masked positions in the result data.
174 #[must_use]
175 pub fn with_fill_value(mut self, fill_value: T) -> Self {
176 self.fill_value = fill_value;
177 self
178 }
179
180 /// Replace the fill value in place.
181 pub fn set_fill_value(&mut self, fill_value: T) {
182 self.fill_value = fill_value;
183 }
184
185 /// Return a reference to the underlying data array.
186 #[inline]
187 pub const fn data(&self) -> &Array<T, D> {
188 &self.data
189 }
190
191 /// Return a reference to the mask array.
192 ///
193 /// If the array is in the nomask-sentinel state (constructed via
194 /// [`MaskedArray::from_data`] or otherwise) this lazily allocates
195 /// a full all-false `Array<bool, D>` and caches it for subsequent
196 /// calls. Use [`MaskedArray::has_real_mask`] to check whether the
197 /// mask is known to be trivial first, and skip calling `.mask()`
198 /// entirely on the hot path when you can.
199 pub fn mask(&self) -> &Array<bool, D> {
200 self.mask.get_or_init(|| {
201 Array::<bool, D>::from_elem(self.data.dim().clone(), false)
202 .expect("from_elem with matching dim cannot fail")
203 })
204 }
205
206 /// Return a reference to the mask array if one has been
207 /// materialized, or `None` when the array is still in the
208 /// nomask-sentinel state.
209 ///
210 /// Unlike [`MaskedArray::mask`], this does NOT trigger lazy
211 /// allocation — it's the fast-path query for hot code that
212 /// wants to branch on whether any mask bits are set (#506).
213 #[inline]
214 pub fn mask_opt(&self) -> Option<&Array<bool, D>> {
215 if self.real_mask {
216 // A real mask was set via `new` or `set_mask`; the
217 // OnceLock is guaranteed to be initialized.
218 self.mask.get()
219 } else {
220 None
221 }
222 }
223
224 /// Return a mutable reference to the underlying data array.
225 #[inline]
226 pub const fn data_mut(&mut self) -> &mut Array<T, D> {
227 &mut self.data
228 }
229
230 /// Return the shape of the masked array.
231 #[inline]
232 pub fn shape(&self) -> &[usize] {
233 self.data.shape()
234 }
235
236 /// Return the number of dimensions.
237 #[inline]
238 pub fn ndim(&self) -> usize {
239 self.data.ndim()
240 }
241
242 /// Return the total number of elements (including masked).
243 #[inline]
244 pub fn size(&self) -> usize {
245 self.data.size()
246 }
247
248 /// Return the dimension descriptor.
249 #[inline]
250 pub const fn dim(&self) -> &D {
251 self.data.dim()
252 }
253
254 /// Return whether the mask is hardened.
255 #[inline]
256 pub const fn is_hard_mask(&self) -> bool {
257 self.hard_mask
258 }
259
260 /// Internal helper: force the lazy nomask sentinel to materialize a
261 /// concrete `Array<bool, D>` AND ensure the mask's `Arc` is
262 /// uniquely owned (copy-on-write), then return a mutable reference
263 /// to the inner mask.
264 ///
265 /// After this call `real_mask` is `true`, `self.mask` is guaranteed
266 /// to contain an initialized `Array<bool, D>`, and the underlying
267 /// `Arc` has refcount exactly 1 so it's safe to mutate without
268 /// aliasing any other `MaskedArray` that may have cloned from us.
269 fn ensure_materialized_mut(&mut self) -> &mut Array<bool, D> {
270 // Step 1: materialize if we're still in the nomask sentinel
271 // state. We install a fresh Arc<OnceLock> containing an
272 // all-false mask.
273 if !self.real_mask || self.mask.get().is_none() {
274 let fresh = Array::<bool, D>::from_elem(self.data.dim().clone(), false)
275 .expect("from_elem with matching dim cannot fail");
276 let lock = OnceLock::new();
277 let _ = lock.set(fresh);
278 self.mask = Arc::new(lock);
279 self.real_mask = true;
280 }
281
282 // Step 2: copy-on-write — if this Arc is shared with any
283 // clones, deep-copy the inner mask into a fresh Arc so our
284 // mutation doesn't affect the clones. Arc::get_mut returns
285 // None when refcount > 1.
286 if Arc::get_mut(&mut self.mask).is_none() {
287 let cloned_mask = self
288 .mask
289 .get()
290 .expect("real_mask implies OnceLock set")
291 .clone();
292 let new_lock = OnceLock::new();
293 let _ = new_lock.set(cloned_mask);
294 self.mask = Arc::new(new_lock);
295 }
296
297 // Step 3: now we're the unique owner — get_mut on the OnceLock
298 // for the inner Array<bool, D>.
299 Arc::get_mut(&mut self.mask)
300 .expect("just made the Arc unique above")
301 .get_mut()
302 .expect("OnceLock was initialized above")
303 }
304
305 /// Set a mask value at a flat index.
306 ///
307 /// If the mask is hardened, only `true` (masking) is allowed; attempts to
308 /// clear a mask bit are silently ignored.
309 ///
310 /// Setting a mask bit materializes the lazy nomask sentinel into a
311 /// real mask array (#506) — if you set even one bit, the full
312 /// `Array<bool, D>` is allocated.
313 ///
314 /// # Errors
315 /// Returns `FerrayError::IndexOutOfBounds` if `flat_idx >= size`.
316 pub fn set_mask_flat(&mut self, flat_idx: usize, value: bool) -> FerrayResult<()> {
317 let size = self.size();
318 if flat_idx >= size {
319 return Err(FerrayError::index_out_of_bounds(flat_idx as isize, 0, size));
320 }
321 if self.hard_mask && !value {
322 // Hard mask: cannot clear mask bits
323 return Ok(());
324 }
325 // Setting a nomask-sentinel to false is a no-op (mask is
326 // already logically all-false); skip the allocation entirely.
327 if !self.real_mask && !value {
328 return Ok(());
329 }
330 let mask = self.ensure_materialized_mut();
331 // Fast path: contiguous mask — direct O(1) slice indexing.
332 if let Some(slice) = mask.as_slice_mut() {
333 slice[flat_idx] = value;
334 } else {
335 // Non-contiguous: fall back to iterator (rare case).
336 if let Some(m) = mask.iter_mut().nth(flat_idx) {
337 *m = value;
338 }
339 }
340 Ok(())
341 }
342
343 /// Replace the mask with a new one.
344 ///
345 /// If the mask is hardened, only bits that are `true` in both the old and
346 /// new masks (or newly set to `true`) are allowed; cleared bits are ignored.
347 ///
348 /// Passing a new mask always materializes the array out of the
349 /// nomask-sentinel state — the stored mask becomes the provided
350 /// one (possibly unioned with the existing mask if hardened).
351 ///
352 /// # Errors
353 /// Returns `FerrayError::ShapeMismatch` if shapes differ.
354 pub fn set_mask(&mut self, new_mask: Array<bool, D>) -> FerrayResult<()> {
355 if self.data.shape() != new_mask.shape() {
356 return Err(FerrayError::shape_mismatch(format!(
357 "set_mask: mask shape {:?} does not match array shape {:?}",
358 new_mask.shape(),
359 self.data.shape()
360 )));
361 }
362 if self.hard_mask && self.real_mask {
363 // Hard-mask union: merge the new mask with the existing
364 // one, keeping any `true` bits and never clearing.
365 let existing = self.mask.get().expect("real_mask implies OnceLock set");
366 let merged: Vec<bool> = existing
367 .iter()
368 .zip(new_mask.iter())
369 .map(|(old, new)| *old || *new)
370 .collect();
371 let merged_arr = Array::from_vec(self.data.dim().clone(), merged)?;
372 let lock = OnceLock::new();
373 let _ = lock.set(merged_arr);
374 // Install a fresh Arc; any clones keep their own snapshot.
375 self.mask = Arc::new(lock);
376 } else {
377 // Either not hardened or currently in the nomask sentinel
378 // state — unconditionally install the new mask in a fresh
379 // Arc (copy-on-write: clones remain unaffected).
380 let lock = OnceLock::new();
381 let _ = lock.set(new_mask);
382 self.mask = Arc::new(lock);
383 }
384 self.real_mask = true;
385 Ok(())
386 }
387
388 /// Return `true` when this masked array's underlying mask is
389 /// structurally shared with at least one other `MaskedArray`.
390 ///
391 /// After a `clone()` the original and the clone share the same
392 /// mask via `Arc` until one of them mutates it (copy-on-write, #512).
393 /// Hot-path code can use this to reason about memory sharing —
394 /// `shares_mask() == false` means the mask is uniquely owned and
395 /// can be mutated without affecting any other `MaskedArray`.
396 #[inline]
397 pub fn shares_mask(&self) -> bool {
398 Arc::strong_count(&self.mask) > 1
399 }
400}
401
402#[cfg(test)]
403mod tests {
404 use super::*;
405 use ferray_core::Array;
406 use ferray_core::dimension::Ix1;
407
408 fn arr_f64(data: Vec<f64>) -> Array<f64, Ix1> {
409 let n = data.len();
410 Array::<f64, Ix1>::from_vec(Ix1::new([n]), data).unwrap()
411 }
412
413 fn arr_bool(data: Vec<bool>) -> Array<bool, Ix1> {
414 let n = data.len();
415 Array::<bool, Ix1>::from_vec(Ix1::new([n]), data).unwrap()
416 }
417
418 // ---- nomask sentinel (#506) ----
419
420 #[test]
421 fn from_data_starts_in_nomask_sentinel_state() {
422 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
423 assert!(!ma.has_real_mask());
424 assert!(ma.mask_opt().is_none());
425 }
426
427 #[test]
428 fn new_with_explicit_mask_is_real_mask() {
429 let ma = MaskedArray::new(
430 arr_f64(vec![1.0, 2.0, 3.0]),
431 arr_bool(vec![false, true, false]),
432 )
433 .unwrap();
434 assert!(ma.has_real_mask());
435 assert!(ma.mask_opt().is_some());
436 }
437
438 #[test]
439 fn mask_accessor_lazily_materializes_nomask_sentinel() {
440 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
441 // Before calling .mask(), the OnceLock is empty.
442 assert!(ma.mask_opt().is_none());
443 // After calling .mask(), we get a full all-false Array<bool, D>.
444 let m = ma.mask();
445 assert_eq!(m.shape(), &[3]);
446 assert_eq!(
447 m.iter().copied().collect::<Vec<_>>(),
448 vec![false, false, false]
449 );
450 // Subsequent calls return the same cached array (no re-alloc).
451 let m2 = ma.mask();
452 assert_eq!(std::ptr::from_ref(m), std::ptr::from_ref(m2));
453 // BUT `has_real_mask` still reports `false` — the lazy
454 // materialization doesn't promote the sentinel to a "real" mask
455 // because the contents are still logically all-false. Hot-path
456 // code can keep skipping.
457 assert!(!ma.has_real_mask());
458 }
459
460 #[test]
461 fn set_mask_flat_false_on_nomask_stays_zero_allocation() {
462 // Setting a position to false on a nomask-sentinel array is a
463 // no-op and should NOT materialize the mask.
464 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
465 ma.set_mask_flat(1, false).unwrap();
466 assert!(!ma.has_real_mask());
467 assert!(ma.mask_opt().is_none());
468 }
469
470 #[test]
471 fn set_mask_flat_true_on_nomask_materializes_and_promotes() {
472 // Setting a position to true forces materialization and
473 // promotes `real_mask` to true.
474 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
475 ma.set_mask_flat(1, true).unwrap();
476 assert!(ma.has_real_mask());
477 let m: Vec<bool> = ma.mask().iter().copied().collect();
478 assert_eq!(m, vec![false, true, false]);
479 }
480
481 #[test]
482 fn set_mask_promotes_and_keeps_provided_values() {
483 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
484 assert!(!ma.has_real_mask());
485 ma.set_mask(arr_bool(vec![true, false, true])).unwrap();
486 assert!(ma.has_real_mask());
487 assert_eq!(
488 ma.mask().iter().copied().collect::<Vec<_>>(),
489 vec![true, false, true]
490 );
491 }
492
493 #[test]
494 fn set_mask_shape_mismatch_errors() {
495 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
496 assert!(ma.set_mask(arr_bool(vec![false; 4])).is_err());
497 }
498
499 #[test]
500 fn clone_preserves_nomask_sentinel_state() {
501 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
502 let cloned = ma;
503 assert!(!cloned.has_real_mask());
504 assert!(cloned.mask_opt().is_none());
505 }
506
507 #[test]
508 fn clone_after_materialization_copies_the_mask() {
509 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
510 // Force materialization.
511 let _ = ma.mask();
512 let cloned = ma;
513 // The clone has the same mask contents (all-false).
514 assert_eq!(
515 cloned.mask().iter().copied().collect::<Vec<_>>(),
516 vec![false, false, false]
517 );
518 }
519
520 #[test]
521 fn clone_preserves_real_mask_state() {
522 let ma = MaskedArray::new(
523 arr_f64(vec![1.0, 2.0, 3.0]),
524 arr_bool(vec![false, true, false]),
525 )
526 .unwrap();
527 let cloned = ma;
528 assert!(cloned.has_real_mask());
529 assert_eq!(
530 cloned.mask().iter().copied().collect::<Vec<_>>(),
531 vec![false, true, false]
532 );
533 }
534
535 // ---- shared mask with copy-on-write (#512) ----
536
537 #[test]
538 fn clone_shares_mask_via_arc() {
539 let ma = MaskedArray::new(
540 arr_f64(vec![1.0, 2.0, 3.0]),
541 arr_bool(vec![false, true, false]),
542 )
543 .unwrap();
544 let cloned = ma.clone();
545 // Both copies should report structural sharing.
546 assert!(ma.shares_mask());
547 assert!(cloned.shares_mask());
548 }
549
550 #[test]
551 fn unique_masked_array_does_not_share() {
552 let ma = MaskedArray::new(
553 arr_f64(vec![1.0, 2.0, 3.0]),
554 arr_bool(vec![false, true, false]),
555 )
556 .unwrap();
557 assert!(!ma.shares_mask());
558 }
559
560 #[test]
561 fn copy_on_write_isolates_parent_from_child_mutation() {
562 // Clone, then mutate the mask of the clone. The parent's mask
563 // must be unchanged even though they started sharing an Arc.
564 let parent = MaskedArray::new(
565 arr_f64(vec![1.0, 2.0, 3.0]),
566 arr_bool(vec![false, false, false]),
567 )
568 .unwrap();
569 let mut child = parent.clone();
570 assert!(parent.shares_mask());
571 assert!(child.shares_mask());
572
573 // Mutate the child — triggers copy-on-write.
574 child.set_mask_flat(1, true).unwrap();
575
576 // Parent's mask is still the original all-false.
577 assert_eq!(
578 parent.mask().iter().copied().collect::<Vec<_>>(),
579 vec![false, false, false]
580 );
581 // Child's mask reflects the mutation.
582 assert_eq!(
583 child.mask().iter().copied().collect::<Vec<_>>(),
584 vec![false, true, false]
585 );
586
587 // Parent no longer shares (the child's CoW broke the Arc's
588 // dual ownership by installing its own).
589 assert!(!parent.shares_mask());
590 assert!(!child.shares_mask());
591 }
592
593 #[test]
594 fn copy_on_write_via_set_mask() {
595 // set_mask replaces the Arc entirely, which also implicitly
596 // isolates the two.
597 let parent = MaskedArray::new(
598 arr_f64(vec![1.0, 2.0, 3.0]),
599 arr_bool(vec![false, false, false]),
600 )
601 .unwrap();
602 let mut child = parent.clone();
603 assert!(parent.shares_mask());
604
605 child.set_mask(arr_bool(vec![true, true, true])).unwrap();
606 // Parent still has the original mask.
607 assert_eq!(
608 parent.mask().iter().copied().collect::<Vec<_>>(),
609 vec![false, false, false]
610 );
611 // Child has the new mask.
612 assert_eq!(
613 child.mask().iter().copied().collect::<Vec<_>>(),
614 vec![true, true, true]
615 );
616 assert!(!parent.shares_mask());
617 }
618
619 #[test]
620 fn nomask_sentinel_clones_share_empty_arc() {
621 // A from_data-constructed array in the nomask-sentinel state
622 // still uses an Arc; clones share it.
623 let parent = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
624 let cloned = parent.clone();
625 assert!(parent.shares_mask());
626 assert!(cloned.shares_mask());
627 // Neither has a real mask yet.
628 assert!(!parent.has_real_mask());
629 assert!(!cloned.has_real_mask());
630 }
631
632 #[test]
633 fn hard_mask_union_on_real_mask() {
634 let mut ma = MaskedArray::new(
635 arr_f64(vec![1.0, 2.0, 3.0]),
636 arr_bool(vec![true, false, false]),
637 )
638 .unwrap();
639 ma.harden_mask().unwrap();
640 // Try to clear position 0 and set position 2. With a hard
641 // mask, the union keeps position 0's true bit.
642 ma.set_mask(arr_bool(vec![false, false, true])).unwrap();
643 assert_eq!(
644 ma.mask().iter().copied().collect::<Vec<_>>(),
645 vec![true, false, true]
646 );
647 }
648}