ferray_ma/masked_array.rs
1// ferray-ma: MaskedArray<T, D> type (REQ-1, REQ-2, REQ-3)
2
3use std::sync::{Arc, OnceLock};
4
5use ferray_core::Array;
6use ferray_core::dimension::Dimension;
7use ferray_core::dtype::Element;
8use ferray_core::error::{FerrayError, FerrayResult};
9
10/// A masked array that pairs data with a boolean mask.
11///
12/// Each element position has a corresponding mask bit:
13/// - `true` means the element is **masked** (invalid / missing)
14/// - `false` means the element is valid
15///
16/// All operations (arithmetic, reductions, ufuncs) respect the mask by
17/// skipping masked elements.
18///
19/// The `fill_value` field is the replacement value for masked positions when
20/// the masked array participates in operations or when [`MaskedArray::filled`]
21/// is called without an explicit override. It defaults to `T::zero()`.
22///
23/// # Nomask sentinel (#506)
24///
25/// When a [`MaskedArray`] is constructed via [`MaskedArray::from_data`]
26/// the mask is logically "all-false" but is NOT allocated as a full
27/// `Array<bool, D>` up front — the lazy `OnceLock` inside stores
28/// nothing until the first call to [`MaskedArray::mask`]. For arrays
29/// that never touch their mask (e.g. masked ops that short-circuit
30/// via [`MaskedArray::has_real_mask`]), this saves a full bool-sized
31/// allocation proportional to the data size.
32///
33/// The `.mask()` accessor still returns `&Array<bool, D>` so all
34/// existing code continues to work unchanged; the cost is one
35/// lazy allocation on first access. Hot-path code that wants to
36/// avoid the materialization should check `has_real_mask()` first
37/// and skip any mask work when it returns `false`.
38pub struct MaskedArray<T: Element, D: Dimension> {
39 /// The underlying data array.
40 data: Array<T, D>,
41 /// Boolean mask (`true` = masked/invalid). Lazily materialized
42 /// when explicitly queried via [`MaskedArray::mask`] — a
43 /// `from_data`-constructed array with no masked elements pays
44 /// zero allocation cost until that first query.
45 ///
46 /// Wrapped in `Arc` for structural sharing across clones (#512):
47 /// cloning a `MaskedArray` bumps the Arc refcount instead of
48 /// deep-copying the mask, and any mutation that needs a unique
49 /// mask does copy-on-write via [`Self::make_mask_unique`].
50 mask: Arc<OnceLock<Array<bool, D>>>,
51 /// `true` when a non-trivial mask has been explicitly provided
52 /// (via [`MaskedArray::new`] or [`MaskedArray::set_mask`]),
53 /// `false` when the array is in the nomask-sentinel state.
54 ///
55 /// Hot-path consumers should branch on this flag and skip the
56 /// mask-iteration entirely when it is `false` — see
57 /// [`MaskedArray::has_real_mask`].
58 real_mask: bool,
59 /// Whether the mask is hardened (cannot be cleared by assignment).
60 pub(crate) hard_mask: bool,
61 /// Replacement value for masked positions during operations and filling.
62 /// Defaults to `T::zero()`.
63 pub(crate) fill_value: T,
64}
65
66impl<T: Element, D: Dimension> std::fmt::Debug for MaskedArray<T, D> {
67 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
68 f.debug_struct("MaskedArray")
69 .field("data", &self.data)
70 .field("real_mask", &self.real_mask)
71 .field("hard_mask", &self.hard_mask)
72 .field("fill_value", &self.fill_value)
73 .finish_non_exhaustive()
74 }
75}
76
77impl<T: Element + Clone, D: Dimension> Clone for MaskedArray<T, D> {
78 fn clone(&self) -> Self {
79 // Structural sharing (#512): just bump the Arc refcount instead
80 // of cloning the underlying mask array. Copy-on-write kicks in
81 // via `make_mask_unique` whenever either the parent or the
82 // clone tries to mutate its mask.
83 //
84 // The data array is still deep-cloned because ferray-core's
85 // Array doesn't have Arc-based structural sharing; sharing the
86 // mask alone still saves the larger-of-the-two allocations in
87 // the common "unmasked data transformations" path.
88 Self {
89 data: self.data.clone(),
90 mask: Arc::clone(&self.mask),
91 real_mask: self.real_mask,
92 hard_mask: self.hard_mask,
93 fill_value: self.fill_value.clone(),
94 }
95 }
96}
97
98impl<T: Element, D: Dimension> MaskedArray<T, D> {
99 /// Create a new masked array from data and mask arrays.
100 ///
101 /// The `fill_value` defaults to `T::zero()`. Use [`MaskedArray::with_fill_value`]
102 /// to set a custom replacement value.
103 ///
104 /// # Errors
105 /// Returns `FerrayError::ShapeMismatch` if data and mask shapes differ.
106 pub fn new(data: Array<T, D>, mask: Array<bool, D>) -> FerrayResult<Self> {
107 if data.shape() != mask.shape() {
108 return Err(FerrayError::shape_mismatch(format!(
109 "MaskedArray::new: data shape {:?} does not match mask shape {:?}",
110 data.shape(),
111 mask.shape()
112 )));
113 }
114 let lock = OnceLock::new();
115 let _ = lock.set(mask);
116 Ok(Self {
117 data,
118 mask: Arc::new(lock),
119 real_mask: true,
120 hard_mask: false,
121 fill_value: T::zero(),
122 })
123 }
124
125 /// Create a masked array with no masked elements (all-false mask).
126 ///
127 /// Does NOT allocate the mask up front — the array is in the
128 /// nomask-sentinel state (#506) until [`MaskedArray::mask`] is
129 /// explicitly called. For code that only uses `data()` or
130 /// short-circuits via [`MaskedArray::has_real_mask`], this saves
131 /// a full-sized bool allocation.
132 ///
133 /// # Errors
134 /// Always returns `Ok` — the `FerrayResult` is preserved for API
135 /// parity with the previous eager implementation.
136 pub fn from_data(data: Array<T, D>) -> FerrayResult<Self> {
137 Ok(Self {
138 data,
139 mask: Arc::new(OnceLock::new()),
140 real_mask: false,
141 hard_mask: false,
142 fill_value: T::zero(),
143 })
144 }
145
146 /// Return `true` if this masked array holds a real (explicitly
147 /// provided or materialized) mask. Returns `false` when the array
148 /// is in the nomask-sentinel state and the mask is logically
149 /// all-false.
150 ///
151 /// Hot-path iteration code should branch on this flag to skip
152 /// mask scanning entirely when it returns `false` (#506).
153 #[inline]
154 pub fn has_real_mask(&self) -> bool {
155 self.real_mask
156 }
157
158 /// Return the fill value used to replace masked positions.
159 ///
160 /// See [`MaskedArray::with_fill_value`] for setting it.
161 #[inline]
162 pub fn fill_value(&self) -> T
163 where
164 T: Copy,
165 {
166 self.fill_value
167 }
168
169 /// Set the fill value, returning the modified array.
170 ///
171 /// The fill value is used by [`MaskedArray::filled`] (when called
172 /// without an explicit override) and by arithmetic operations as the
173 /// replacement for masked positions in the result data.
174 pub fn with_fill_value(mut self, fill_value: T) -> Self {
175 self.fill_value = fill_value;
176 self
177 }
178
179 /// Replace the fill value in place.
180 pub fn set_fill_value(&mut self, fill_value: T) {
181 self.fill_value = fill_value;
182 }
183
184 /// Return a reference to the underlying data array.
185 #[inline]
186 pub fn data(&self) -> &Array<T, D> {
187 &self.data
188 }
189
190 /// Return a reference to the mask array.
191 ///
192 /// If the array is in the nomask-sentinel state (constructed via
193 /// [`MaskedArray::from_data`] or otherwise) this lazily allocates
194 /// a full all-false `Array<bool, D>` and caches it for subsequent
195 /// calls. Use [`MaskedArray::has_real_mask`] to check whether the
196 /// mask is known to be trivial first, and skip calling `.mask()`
197 /// entirely on the hot path when you can.
198 pub fn mask(&self) -> &Array<bool, D> {
199 self.mask.get_or_init(|| {
200 Array::<bool, D>::from_elem(self.data.dim().clone(), false)
201 .expect("from_elem with matching dim cannot fail")
202 })
203 }
204
205 /// Return a reference to the mask array if one has been
206 /// materialized, or `None` when the array is still in the
207 /// nomask-sentinel state.
208 ///
209 /// Unlike [`MaskedArray::mask`], this does NOT trigger lazy
210 /// allocation — it's the fast-path query for hot code that
211 /// wants to branch on whether any mask bits are set (#506).
212 #[inline]
213 pub fn mask_opt(&self) -> Option<&Array<bool, D>> {
214 if self.real_mask {
215 // A real mask was set via `new` or `set_mask`; the
216 // OnceLock is guaranteed to be initialized.
217 self.mask.get()
218 } else {
219 None
220 }
221 }
222
223 /// Return a mutable reference to the underlying data array.
224 #[inline]
225 pub fn data_mut(&mut self) -> &mut Array<T, D> {
226 &mut self.data
227 }
228
229 /// Return the shape of the masked array.
230 #[inline]
231 pub fn shape(&self) -> &[usize] {
232 self.data.shape()
233 }
234
235 /// Return the number of dimensions.
236 #[inline]
237 pub fn ndim(&self) -> usize {
238 self.data.ndim()
239 }
240
241 /// Return the total number of elements (including masked).
242 #[inline]
243 pub fn size(&self) -> usize {
244 self.data.size()
245 }
246
247 /// Return the dimension descriptor.
248 #[inline]
249 pub fn dim(&self) -> &D {
250 self.data.dim()
251 }
252
253 /// Return whether the mask is hardened.
254 #[inline]
255 pub fn is_hard_mask(&self) -> bool {
256 self.hard_mask
257 }
258
259 /// Internal helper: force the lazy nomask sentinel to materialize a
260 /// concrete `Array<bool, D>` AND ensure the mask's `Arc` is
261 /// uniquely owned (copy-on-write), then return a mutable reference
262 /// to the inner mask.
263 ///
264 /// After this call `real_mask` is `true`, `self.mask` is guaranteed
265 /// to contain an initialized `Array<bool, D>`, and the underlying
266 /// `Arc` has refcount exactly 1 so it's safe to mutate without
267 /// aliasing any other `MaskedArray` that may have cloned from us.
268 fn ensure_materialized_mut(&mut self) -> &mut Array<bool, D> {
269 // Step 1: materialize if we're still in the nomask sentinel
270 // state. We install a fresh Arc<OnceLock> containing an
271 // all-false mask.
272 if !self.real_mask || self.mask.get().is_none() {
273 let fresh = Array::<bool, D>::from_elem(self.data.dim().clone(), false)
274 .expect("from_elem with matching dim cannot fail");
275 let lock = OnceLock::new();
276 let _ = lock.set(fresh);
277 self.mask = Arc::new(lock);
278 self.real_mask = true;
279 }
280
281 // Step 2: copy-on-write — if this Arc is shared with any
282 // clones, deep-copy the inner mask into a fresh Arc so our
283 // mutation doesn't affect the clones. Arc::get_mut returns
284 // None when refcount > 1.
285 if Arc::get_mut(&mut self.mask).is_none() {
286 let cloned_mask = self
287 .mask
288 .get()
289 .expect("real_mask implies OnceLock set")
290 .clone();
291 let new_lock = OnceLock::new();
292 let _ = new_lock.set(cloned_mask);
293 self.mask = Arc::new(new_lock);
294 }
295
296 // Step 3: now we're the unique owner — get_mut on the OnceLock
297 // for the inner Array<bool, D>.
298 Arc::get_mut(&mut self.mask)
299 .expect("just made the Arc unique above")
300 .get_mut()
301 .expect("OnceLock was initialized above")
302 }
303
304 /// Set a mask value at a flat index.
305 ///
306 /// If the mask is hardened, only `true` (masking) is allowed; attempts to
307 /// clear a mask bit are silently ignored.
308 ///
309 /// Setting a mask bit materializes the lazy nomask sentinel into a
310 /// real mask array (#506) — if you set even one bit, the full
311 /// `Array<bool, D>` is allocated.
312 ///
313 /// # Errors
314 /// Returns `FerrayError::IndexOutOfBounds` if `flat_idx >= size`.
315 pub fn set_mask_flat(&mut self, flat_idx: usize, value: bool) -> FerrayResult<()> {
316 let size = self.size();
317 if flat_idx >= size {
318 return Err(FerrayError::index_out_of_bounds(flat_idx as isize, 0, size));
319 }
320 if self.hard_mask && !value {
321 // Hard mask: cannot clear mask bits
322 return Ok(());
323 }
324 // Setting a nomask-sentinel to false is a no-op (mask is
325 // already logically all-false); skip the allocation entirely.
326 if !self.real_mask && !value {
327 return Ok(());
328 }
329 let mask = self.ensure_materialized_mut();
330 // Fast path: contiguous mask — direct O(1) slice indexing.
331 if let Some(slice) = mask.as_slice_mut() {
332 slice[flat_idx] = value;
333 } else {
334 // Non-contiguous: fall back to iterator (rare case).
335 if let Some(m) = mask.iter_mut().nth(flat_idx) {
336 *m = value;
337 }
338 }
339 Ok(())
340 }
341
342 /// Replace the mask with a new one.
343 ///
344 /// If the mask is hardened, only bits that are `true` in both the old and
345 /// new masks (or newly set to `true`) are allowed; cleared bits are ignored.
346 ///
347 /// Passing a new mask always materializes the array out of the
348 /// nomask-sentinel state — the stored mask becomes the provided
349 /// one (possibly unioned with the existing mask if hardened).
350 ///
351 /// # Errors
352 /// Returns `FerrayError::ShapeMismatch` if shapes differ.
353 pub fn set_mask(&mut self, new_mask: Array<bool, D>) -> FerrayResult<()> {
354 if self.data.shape() != new_mask.shape() {
355 return Err(FerrayError::shape_mismatch(format!(
356 "set_mask: mask shape {:?} does not match array shape {:?}",
357 new_mask.shape(),
358 self.data.shape()
359 )));
360 }
361 if self.hard_mask && self.real_mask {
362 // Hard-mask union: merge the new mask with the existing
363 // one, keeping any `true` bits and never clearing.
364 let existing = self.mask.get().expect("real_mask implies OnceLock set");
365 let merged: Vec<bool> = existing
366 .iter()
367 .zip(new_mask.iter())
368 .map(|(old, new)| *old || *new)
369 .collect();
370 let merged_arr = Array::from_vec(self.data.dim().clone(), merged)?;
371 let lock = OnceLock::new();
372 let _ = lock.set(merged_arr);
373 // Install a fresh Arc; any clones keep their own snapshot.
374 self.mask = Arc::new(lock);
375 } else {
376 // Either not hardened or currently in the nomask sentinel
377 // state — unconditionally install the new mask in a fresh
378 // Arc (copy-on-write: clones remain unaffected).
379 let lock = OnceLock::new();
380 let _ = lock.set(new_mask);
381 self.mask = Arc::new(lock);
382 }
383 self.real_mask = true;
384 Ok(())
385 }
386
387 /// Return `true` when this masked array's underlying mask is
388 /// structurally shared with at least one other `MaskedArray`.
389 ///
390 /// After a `clone()` the original and the clone share the same
391 /// mask via `Arc` until one of them mutates it (copy-on-write, #512).
392 /// Hot-path code can use this to reason about memory sharing —
393 /// `shares_mask() == false` means the mask is uniquely owned and
394 /// can be mutated without affecting any other MaskedArray.
395 #[inline]
396 pub fn shares_mask(&self) -> bool {
397 Arc::strong_count(&self.mask) > 1
398 }
399}
400
401#[cfg(test)]
402mod tests {
403 use super::*;
404 use ferray_core::Array;
405 use ferray_core::dimension::Ix1;
406
407 fn arr_f64(data: Vec<f64>) -> Array<f64, Ix1> {
408 let n = data.len();
409 Array::<f64, Ix1>::from_vec(Ix1::new([n]), data).unwrap()
410 }
411
412 fn arr_bool(data: Vec<bool>) -> Array<bool, Ix1> {
413 let n = data.len();
414 Array::<bool, Ix1>::from_vec(Ix1::new([n]), data).unwrap()
415 }
416
417 // ---- nomask sentinel (#506) ----
418
419 #[test]
420 fn from_data_starts_in_nomask_sentinel_state() {
421 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
422 assert!(!ma.has_real_mask());
423 assert!(ma.mask_opt().is_none());
424 }
425
426 #[test]
427 fn new_with_explicit_mask_is_real_mask() {
428 let ma = MaskedArray::new(
429 arr_f64(vec![1.0, 2.0, 3.0]),
430 arr_bool(vec![false, true, false]),
431 )
432 .unwrap();
433 assert!(ma.has_real_mask());
434 assert!(ma.mask_opt().is_some());
435 }
436
437 #[test]
438 fn mask_accessor_lazily_materializes_nomask_sentinel() {
439 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
440 // Before calling .mask(), the OnceLock is empty.
441 assert!(ma.mask_opt().is_none());
442 // After calling .mask(), we get a full all-false Array<bool, D>.
443 let m = ma.mask();
444 assert_eq!(m.shape(), &[3]);
445 assert_eq!(
446 m.iter().copied().collect::<Vec<_>>(),
447 vec![false, false, false]
448 );
449 // Subsequent calls return the same cached array (no re-alloc).
450 let m2 = ma.mask();
451 assert_eq!(m as *const _, m2 as *const _);
452 // BUT `has_real_mask` still reports `false` — the lazy
453 // materialization doesn't promote the sentinel to a "real" mask
454 // because the contents are still logically all-false. Hot-path
455 // code can keep skipping.
456 assert!(!ma.has_real_mask());
457 }
458
459 #[test]
460 fn set_mask_flat_false_on_nomask_stays_zero_allocation() {
461 // Setting a position to false on a nomask-sentinel array is a
462 // no-op and should NOT materialize the mask.
463 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
464 ma.set_mask_flat(1, false).unwrap();
465 assert!(!ma.has_real_mask());
466 assert!(ma.mask_opt().is_none());
467 }
468
469 #[test]
470 fn set_mask_flat_true_on_nomask_materializes_and_promotes() {
471 // Setting a position to true forces materialization and
472 // promotes `real_mask` to true.
473 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
474 ma.set_mask_flat(1, true).unwrap();
475 assert!(ma.has_real_mask());
476 let m: Vec<bool> = ma.mask().iter().copied().collect();
477 assert_eq!(m, vec![false, true, false]);
478 }
479
480 #[test]
481 fn set_mask_promotes_and_keeps_provided_values() {
482 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
483 assert!(!ma.has_real_mask());
484 ma.set_mask(arr_bool(vec![true, false, true])).unwrap();
485 assert!(ma.has_real_mask());
486 assert_eq!(
487 ma.mask().iter().copied().collect::<Vec<_>>(),
488 vec![true, false, true]
489 );
490 }
491
492 #[test]
493 fn set_mask_shape_mismatch_errors() {
494 let mut ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
495 assert!(ma.set_mask(arr_bool(vec![false; 4])).is_err());
496 }
497
498 #[test]
499 fn clone_preserves_nomask_sentinel_state() {
500 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
501 let cloned = ma.clone();
502 assert!(!cloned.has_real_mask());
503 assert!(cloned.mask_opt().is_none());
504 }
505
506 #[test]
507 fn clone_after_materialization_copies_the_mask() {
508 let ma = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
509 // Force materialization.
510 let _ = ma.mask();
511 let cloned = ma.clone();
512 // The clone has the same mask contents (all-false).
513 assert_eq!(
514 cloned.mask().iter().copied().collect::<Vec<_>>(),
515 vec![false, false, false]
516 );
517 }
518
519 #[test]
520 fn clone_preserves_real_mask_state() {
521 let ma = MaskedArray::new(
522 arr_f64(vec![1.0, 2.0, 3.0]),
523 arr_bool(vec![false, true, false]),
524 )
525 .unwrap();
526 let cloned = ma.clone();
527 assert!(cloned.has_real_mask());
528 assert_eq!(
529 cloned.mask().iter().copied().collect::<Vec<_>>(),
530 vec![false, true, false]
531 );
532 }
533
534 // ---- shared mask with copy-on-write (#512) ----
535
536 #[test]
537 fn clone_shares_mask_via_arc() {
538 let ma = MaskedArray::new(
539 arr_f64(vec![1.0, 2.0, 3.0]),
540 arr_bool(vec![false, true, false]),
541 )
542 .unwrap();
543 let cloned = ma.clone();
544 // Both copies should report structural sharing.
545 assert!(ma.shares_mask());
546 assert!(cloned.shares_mask());
547 }
548
549 #[test]
550 fn unique_masked_array_does_not_share() {
551 let ma = MaskedArray::new(
552 arr_f64(vec![1.0, 2.0, 3.0]),
553 arr_bool(vec![false, true, false]),
554 )
555 .unwrap();
556 assert!(!ma.shares_mask());
557 }
558
559 #[test]
560 fn copy_on_write_isolates_parent_from_child_mutation() {
561 // Clone, then mutate the mask of the clone. The parent's mask
562 // must be unchanged even though they started sharing an Arc.
563 let parent = MaskedArray::new(
564 arr_f64(vec![1.0, 2.0, 3.0]),
565 arr_bool(vec![false, false, false]),
566 )
567 .unwrap();
568 let mut child = parent.clone();
569 assert!(parent.shares_mask());
570 assert!(child.shares_mask());
571
572 // Mutate the child — triggers copy-on-write.
573 child.set_mask_flat(1, true).unwrap();
574
575 // Parent's mask is still the original all-false.
576 assert_eq!(
577 parent.mask().iter().copied().collect::<Vec<_>>(),
578 vec![false, false, false]
579 );
580 // Child's mask reflects the mutation.
581 assert_eq!(
582 child.mask().iter().copied().collect::<Vec<_>>(),
583 vec![false, true, false]
584 );
585
586 // Parent no longer shares (the child's CoW broke the Arc's
587 // dual ownership by installing its own).
588 assert!(!parent.shares_mask());
589 assert!(!child.shares_mask());
590 }
591
592 #[test]
593 fn copy_on_write_via_set_mask() {
594 // set_mask replaces the Arc entirely, which also implicitly
595 // isolates the two.
596 let parent = MaskedArray::new(
597 arr_f64(vec![1.0, 2.0, 3.0]),
598 arr_bool(vec![false, false, false]),
599 )
600 .unwrap();
601 let mut child = parent.clone();
602 assert!(parent.shares_mask());
603
604 child.set_mask(arr_bool(vec![true, true, true])).unwrap();
605 // Parent still has the original mask.
606 assert_eq!(
607 parent.mask().iter().copied().collect::<Vec<_>>(),
608 vec![false, false, false]
609 );
610 // Child has the new mask.
611 assert_eq!(
612 child.mask().iter().copied().collect::<Vec<_>>(),
613 vec![true, true, true]
614 );
615 assert!(!parent.shares_mask());
616 }
617
618 #[test]
619 fn nomask_sentinel_clones_share_empty_arc() {
620 // A from_data-constructed array in the nomask-sentinel state
621 // still uses an Arc; clones share it.
622 let parent = MaskedArray::from_data(arr_f64(vec![1.0, 2.0, 3.0])).unwrap();
623 let cloned = parent.clone();
624 assert!(parent.shares_mask());
625 assert!(cloned.shares_mask());
626 // Neither has a real mask yet.
627 assert!(!parent.has_real_mask());
628 assert!(!cloned.has_real_mask());
629 }
630
631 #[test]
632 fn hard_mask_union_on_real_mask() {
633 let mut ma = MaskedArray::new(
634 arr_f64(vec![1.0, 2.0, 3.0]),
635 arr_bool(vec![true, false, false]),
636 )
637 .unwrap();
638 ma.harden_mask().unwrap();
639 // Try to clear position 0 and set position 2. With a hard
640 // mask, the union keeps position 0's true bit.
641 ma.set_mask(arr_bool(vec![false, false, true])).unwrap();
642 assert_eq!(
643 ma.mask().iter().copied().collect::<Vec<_>>(),
644 vec![true, false, true]
645 );
646 }
647}