1use crate::{Index, Shape, ShapeIdx, SignedIndex, Unbind, assert};
2use core::fmt;
3use core::marker::PhantomData;
4use core::ops::Range;
5use generativity::Guard;
6
7type Invariant<'a> = fn(&'a ()) -> &'a ();
8
9#[derive(Copy, Clone, Debug, PartialEq, Eq)]
11pub struct Partition<'head, 'tail, 'n> {
12 pub head: Dim<'head>,
14 pub tail: Dim<'tail>,
16 __marker: PhantomData<Invariant<'n>>,
17}
18
19impl<'head, 'tail, 'n> Partition<'head, 'tail, 'n> {
20 #[inline]
22 pub const fn midpoint(&self) -> IdxInc<'n> {
23 unsafe { IdxInc::new_unbound(self.head.unbound) }
24 }
25
26 #[inline]
28 pub const fn flip(&self) -> Partition<'tail, 'head, 'n> {
29 Partition {
30 head: self.tail,
31 tail: self.head,
32 __marker: PhantomData,
33 }
34 }
35}
36
37#[derive(Copy, Clone)]
42#[repr(transparent)]
43pub struct Dim<'n> {
44 unbound: usize,
45 __marker: PhantomData<Invariant<'n>>,
46}
47impl PartialEq for Dim<'_> {
48 #[inline(always)]
49 fn eq(&self, other: &Self) -> bool {
50 equator::debug_assert!(self.unbound == other.unbound);
51 true
52 }
53}
54impl Eq for Dim<'_> {}
55
56impl PartialOrd for Dim<'_> {
57 #[inline(always)]
58 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
59 equator::debug_assert!(self.unbound == other.unbound);
60 Some(core::cmp::Ordering::Equal)
61 }
62}
63impl Ord for Dim<'_> {
64 #[inline(always)]
65 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
66 equator::debug_assert!(self.unbound == other.unbound);
67 core::cmp::Ordering::Equal
68 }
69}
70
71#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
76#[repr(transparent)]
77pub struct Idx<'n, I: Index = usize> {
78 unbound: I,
79 __marker: PhantomData<Invariant<'n>>,
80}
81
82#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
87#[repr(transparent)]
88pub struct IdxInc<'n, I: Index = usize> {
89 unbound: I,
90 __marker: PhantomData<Invariant<'n>>,
91}
92
93impl fmt::Debug for Dim<'_> {
94 #[inline]
95 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
96 self.unbound.fmt(f)
97 }
98}
99impl<I: Index> fmt::Debug for Idx<'_, I> {
100 #[inline]
101 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
102 self.unbound.fmt(f)
103 }
104}
105impl<I: Index> fmt::Debug for IdxInc<'_, I> {
106 #[inline]
107 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
108 self.unbound.fmt(f)
109 }
110}
111impl<I: Index> fmt::Debug for MaybeIdx<'_, I> {
112 #[inline]
113 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
114 if self.unbound.to_signed() >= I::Signed::truncate(0) {
115 self.unbound.fmt(f)
116 } else {
117 f.write_str("None")
118 }
119 }
120}
121
122impl<'n, I: Index> PartialEq<Dim<'n>> for Idx<'n, I> {
123 #[inline(always)]
124 fn eq(&self, other: &Dim<'n>) -> bool {
125 equator::debug_assert!(self.unbound.zx() < other.unbound);
126
127 false
128 }
129}
130impl<'n, I: Index> PartialOrd<Dim<'n>> for Idx<'n, I> {
131 #[inline(always)]
132 fn partial_cmp(&self, other: &Dim<'n>) -> Option<core::cmp::Ordering> {
133 equator::debug_assert!(self.unbound.zx() < other.unbound);
134
135 Some(core::cmp::Ordering::Less)
136 }
137}
138
139impl<'n, I: Index> PartialEq<Dim<'n>> for IdxInc<'n, I> {
140 #[inline(always)]
141 fn eq(&self, other: &Dim<'n>) -> bool {
142 equator::debug_assert!(self.unbound.zx() <= other.unbound);
143
144 self.unbound.zx() == other.unbound
145 }
146}
147
148impl<'n, I: Index> PartialOrd<Dim<'n>> for IdxInc<'n, I> {
149 #[inline(always)]
150 fn partial_cmp(&self, other: &Dim<'n>) -> Option<core::cmp::Ordering> {
151 equator::debug_assert!(self.unbound.zx() <= other.unbound);
152
153 Some(if self.unbound.zx() == other.unbound {
154 core::cmp::Ordering::Equal
155 } else {
156 core::cmp::Ordering::Less
157 })
158 }
159}
160
161impl<'n> Dim<'n> {
162 #[inline(always)]
164 pub fn with<R>(dim: usize, f: impl for<'dim> FnOnce(Dim<'dim>) -> R) -> R {
165 f(unsafe { Self::new_unbound(dim) })
166 }
167
168 #[inline(always)]
172 pub const unsafe fn new_unbound(dim: usize) -> Self {
173 Self {
174 unbound: dim,
175 __marker: PhantomData,
176 }
177 }
178
179 #[inline(always)]
181 pub fn new(dim: usize, guard: Guard<'n>) -> Self {
182 _ = guard;
183 Self {
184 unbound: dim,
185 __marker: PhantomData,
186 }
187 }
188
189 #[inline(always)]
191 pub const fn unbound(self) -> usize {
192 self.unbound
193 }
194
195 #[inline]
197 pub const fn partition<'head, 'tail>(self, midpoint: IdxInc<'n>, head: Guard<'head>, tail: Guard<'tail>) -> Partition<'head, 'tail, 'n> {
198 _ = (head, tail);
199 unsafe {
200 Partition {
201 head: Dim::new_unbound(midpoint.unbound),
202 tail: Dim::new_unbound(self.unbound - midpoint.unbound),
203 __marker: PhantomData,
204 }
205 }
206 }
207
208 #[inline]
210 #[track_caller]
211 pub fn head_partition<'head, 'tail>(self, head: Dim<'head>, tail: Guard<'tail>) -> Partition<'head, 'tail, 'n> {
212 _ = (head, tail);
213 let midpoint = IdxInc::new_checked(head.unbound(), self);
214 unsafe {
215 Partition {
216 head,
217 tail: Dim::new_unbound(self.unbound - midpoint.unbound),
218 __marker: PhantomData,
219 }
220 }
221 }
222
223 #[inline]
225 pub fn advance(self, start: Idx<'n>, len: usize) -> IdxInc<'n> {
226 let len = Ord::min(self.unbound.saturating_sub(start.unbound), len);
227 IdxInc {
228 unbound: start.unbound + len,
229 __marker: PhantomData,
230 }
231 }
232
233 #[inline]
235 pub fn indices(self) -> impl Clone + ExactSizeIterator + DoubleEndedIterator<Item = Idx<'n>> {
236 (0..self.unbound).map(|i| unsafe { Idx::new_unbound(i) })
237 }
238
239 #[inline]
241 #[cfg(feature = "rayon")]
242 pub fn par_indices(self) -> impl rayon::iter::IndexedParallelIterator<Item = Idx<'n>> {
243 use rayon::prelude::*;
244 (0..self.unbound).into_par_iter().map(|i| unsafe { Idx::new_unbound(i) })
245 }
246}
247
248impl<'n, I: Index> Idx<'n, I> {
249 #[inline(always)]
253 pub const unsafe fn new_unbound(idx: I) -> Self {
254 Self {
255 unbound: idx,
256 __marker: PhantomData,
257 }
258 }
259
260 #[inline(always)]
264 pub unsafe fn new_unchecked(idx: I, dim: Dim<'n>) -> Self {
265 equator::debug_assert!(all(idx.zx() < dim.unbound, idx <= I::from_signed(I::Signed::MAX),));
266
267 Self {
268 unbound: idx,
269 __marker: PhantomData,
270 }
271 }
272
273 #[inline(always)]
277 #[track_caller]
278 pub fn new_checked(idx: I, dim: Dim<'n>) -> Self {
279 equator::assert!(all(idx.zx() < dim.unbound, idx <= I::from_signed(I::Signed::MAX),));
280
281 Self {
282 unbound: idx,
283 __marker: PhantomData,
284 }
285 }
286
287 #[inline(always)]
289 pub const fn unbound(self) -> I {
290 self.unbound
291 }
292
293 #[inline(always)]
295 pub fn zx(self) -> Idx<'n> {
296 Idx {
297 unbound: self.unbound.zx(),
298 __marker: PhantomData,
299 }
300 }
301}
302
303impl<'n> IdxInc<'n> {
304 pub const ZERO: Self = unsafe { Self::new_unbound(0) };
306}
307
308impl<'n, I: Index> IdxInc<'n, I> {
309 #[inline(always)]
313 pub const unsafe fn new_unbound(idx: I) -> Self {
314 Self {
315 unbound: idx,
316 __marker: PhantomData,
317 }
318 }
319
320 #[inline(always)]
324 pub unsafe fn new_unchecked(idx: I, dim: Dim<'n>) -> Self {
325 equator::debug_assert!(all(idx.zx() <= dim.unbound, idx <= I::from_signed(I::Signed::MAX),));
326
327 Self {
328 unbound: idx,
329 __marker: PhantomData,
330 }
331 }
332
333 #[inline(always)]
337 #[track_caller]
338 pub fn new_checked(idx: I, dim: Dim<'n>) -> Self {
339 equator::assert!(all(idx.zx() <= dim.unbound, idx <= I::from_signed(I::Signed::MAX),));
340
341 Self {
342 unbound: idx,
343 __marker: PhantomData,
344 }
345 }
346
347 #[inline(always)]
349 pub const fn unbound(self) -> I {
350 self.unbound
351 }
352
353 #[inline(always)]
355 pub fn zx(self) -> IdxInc<'n> {
356 IdxInc {
357 unbound: self.unbound.zx(),
358 __marker: PhantomData,
359 }
360 }
361}
362
363impl<'n> IdxInc<'n> {
364 #[inline]
366 pub fn to(self, upper: IdxInc<'n>) -> impl Clone + ExactSizeIterator + DoubleEndedIterator<Item = Idx<'n>> {
367 (self.unbound..upper.unbound).map(|i| unsafe { Idx::new_unbound(i) })
368 }
369
370 #[inline]
372 pub fn range_to(self, upper: IdxInc<'n>) -> impl Clone + ExactSizeIterator + DoubleEndedIterator<Item = Idx<'n>> {
373 (self.unbound..upper.unbound).map(|i| unsafe { Idx::new_unbound(i) })
374 }
375}
376
377impl Unbind for Dim<'_> {
378 #[inline(always)]
379 unsafe fn new_unbound(idx: usize) -> Self {
380 Self::new_unbound(idx)
381 }
382
383 #[inline(always)]
384 fn unbound(self) -> usize {
385 self.unbound
386 }
387}
388impl<I: Index> Unbind<I> for Idx<'_, I> {
389 #[inline(always)]
390 unsafe fn new_unbound(idx: I) -> Self {
391 Self::new_unbound(idx)
392 }
393
394 #[inline(always)]
395 fn unbound(self) -> I {
396 self.unbound
397 }
398}
399impl<I: Index> Unbind<I> for IdxInc<'_, I> {
400 #[inline(always)]
401 unsafe fn new_unbound(idx: I) -> Self {
402 Self::new_unbound(idx)
403 }
404
405 #[inline(always)]
406 fn unbound(self) -> I {
407 self.unbound
408 }
409}
410
411impl<I: Index> Unbind<I::Signed> for MaybeIdx<'_, I> {
412 #[inline(always)]
413 unsafe fn new_unbound(idx: I::Signed) -> Self {
414 Self::new_unbound(I::from_signed(idx))
415 }
416
417 #[inline(always)]
418 fn unbound(self) -> I::Signed {
419 self.unbound.to_signed()
420 }
421}
422
423impl<'dim> ShapeIdx for Dim<'dim> {
424 type Idx<I: Index> = Idx<'dim, I>;
425 type IdxInc<I: Index> = IdxInc<'dim, I>;
426 type MaybeIdx<I: Index> = MaybeIdx<'dim, I>;
427}
428
429impl<'dim> Shape for Dim<'dim> {}
430
431impl<'n, I: Index> From<Idx<'n, I>> for IdxInc<'n, I> {
432 #[inline(always)]
433 fn from(value: Idx<'n, I>) -> Self {
434 Self {
435 unbound: value.unbound,
436 __marker: PhantomData,
437 }
438 }
439}
440
441impl<'n> From<Dim<'n>> for IdxInc<'n> {
442 #[inline(always)]
443 fn from(value: Dim<'n>) -> Self {
444 Self {
445 unbound: value.unbound,
446 __marker: PhantomData,
447 }
448 }
449}
450
451impl<'n, I: Index> From<Idx<'n, I>> for MaybeIdx<'n, I> {
452 #[inline(always)]
453 fn from(value: Idx<'n, I>) -> Self {
454 Self {
455 unbound: value.unbound,
456 __marker: PhantomData,
457 }
458 }
459}
460
461impl<'size> Dim<'size> {
462 #[track_caller]
464 #[inline]
465 pub fn check<I: Index>(self, idx: I) -> Idx<'size, I> {
466 Idx::new_checked(idx, self)
467 }
468
469 #[track_caller]
471 #[inline]
472 pub fn idx<I: Index>(self, idx: I) -> Idx<'size, I> {
473 Idx::new_checked(idx, self)
474 }
475
476 #[track_caller]
478 #[inline]
479 pub fn idx_inc<I: Index>(self, idx: I) -> IdxInc<'size, I> {
480 IdxInc::new_checked(idx, self)
481 }
482
483 #[inline]
485 pub fn try_check<I: Index>(self, idx: I) -> Option<Idx<'size, I>> {
486 if idx.zx() < self.unbound() {
487 Some(unsafe { Idx::new_unbound(idx) })
488 } else {
489 None
490 }
491 }
492}
493
494impl<'n> Idx<'n> {
495 pub fn truncate<I: Index>(self) -> Idx<'n, I> {
497 unsafe { Idx::new_unbound(I::truncate(self.unbound())) }
498 }
499}
500
501impl<'n, I: Index> Idx<'n, I> {
502 #[inline]
504 pub const fn to_incl(self) -> IdxInc<'n, I> {
505 unsafe { IdxInc::new_unbound(self.unbound()) }
506 }
507
508 #[inline]
510 pub fn next(self) -> IdxInc<'n, I> {
511 unsafe { IdxInc::new_unbound(self.unbound() + I::truncate(1)) }
512 }
513
514 #[inline]
516 pub fn excl(self) -> IdxInc<'n, I> {
517 unsafe { IdxInc::new_unbound(self.unbound()) }
518 }
519
520 #[track_caller]
522 #[inline]
523 pub fn from_slice_mut_checked<'a>(slice: &'a mut [I], size: Dim<'n>) -> &'a mut [Idx<'n, I>] {
524 Self::from_slice_ref_checked(slice, size);
525 unsafe { &mut *(slice as *mut _ as *mut _) }
526 }
527
528 #[track_caller]
530 #[inline]
531 pub unsafe fn from_slice_mut_unchecked<'a>(slice: &'a mut [I]) -> &'a mut [Idx<'n, I>] {
532 unsafe { &mut *(slice as *mut _ as *mut _) }
533 }
534
535 #[track_caller]
537 pub fn from_slice_ref_checked<'a>(slice: &'a [I], size: Dim<'n>) -> &'a [Idx<'n, I>] {
538 for &idx in slice {
539 Self::new_checked(idx, size);
540 }
541 unsafe { &*(slice as *const _ as *const _) }
542 }
543
544 #[track_caller]
546 #[inline]
547 pub unsafe fn from_slice_ref_unchecked<'a>(slice: &'a [I]) -> &'a [Idx<'n, I>] {
548 unsafe { &*(slice as *const _ as *const _) }
549 }
550}
551
552#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
554#[repr(transparent)]
555pub struct MaybeIdx<'n, I: Index = usize> {
556 unbound: I,
557 __marker: PhantomData<Invariant<'n>>,
558}
559
560impl<'n, I: Index> MaybeIdx<'n, I> {
561 #[inline]
563 pub fn from_index(idx: Idx<'n, I>) -> Self {
564 unsafe { Self::new_unbound(idx.unbound()) }
565 }
566
567 #[inline]
569 pub fn none() -> Self {
570 unsafe { Self::new_unbound(I::truncate(usize::MAX)) }
571 }
572
573 #[inline]
575 #[track_caller]
576 pub fn new_checked(idx: I::Signed, size: Dim<'n>) -> Self {
577 assert!((idx.sx() as isize) < size.unbound() as isize);
578 Self {
579 unbound: I::from_signed(idx),
580 __marker: PhantomData,
581 }
582 }
583
584 #[inline]
586 pub unsafe fn new_unchecked(idx: I::Signed, size: Dim<'n>) -> Self {
587 debug_assert!((idx.sx() as isize) < size.unbound() as isize);
588 Self {
589 unbound: I::from_signed(idx),
590 __marker: PhantomData,
591 }
592 }
593
594 #[inline]
596 pub unsafe fn new_unbound(idx: I) -> Self {
597 Self {
598 unbound: idx,
599 __marker: PhantomData,
600 }
601 }
602
603 #[inline]
605 pub fn unbound(self) -> I {
606 self.unbound
607 }
608
609 #[inline]
611 pub fn idx(self) -> Option<Idx<'n, I>> {
612 if self.unbound.to_signed() >= I::Signed::truncate(0) {
613 Some(unsafe { Idx::new_unbound(self.unbound()) })
614 } else {
615 None
616 }
617 }
618
619 #[inline]
621 pub fn sx(self) -> MaybeIdx<'n> {
622 unsafe { MaybeIdx::new_unbound(self.unbound.to_signed().sx()) }
623 }
624
625 #[track_caller]
627 #[inline]
628 pub fn from_slice_mut_checked<'a>(slice: &'a mut [I::Signed], size: Dim<'n>) -> &'a mut [MaybeIdx<'n, I>] {
629 Self::from_slice_ref_checked(slice, size);
630 unsafe { &mut *(slice as *mut _ as *mut _) }
631 }
632
633 #[track_caller]
635 #[inline]
636 pub unsafe fn from_slice_mut_unchecked<'a>(slice: &'a mut [I::Signed]) -> &'a mut [MaybeIdx<'n, I>] {
637 unsafe { &mut *(slice as *mut _ as *mut _) }
638 }
639
640 #[track_caller]
642 pub fn from_slice_ref_checked<'a>(slice: &'a [I::Signed], size: Dim<'n>) -> &'a [MaybeIdx<'n, I>] {
643 for &idx in slice {
644 Self::new_checked(idx, size);
645 }
646 unsafe { &*(slice as *const _ as *const _) }
647 }
648
649 #[track_caller]
651 pub fn as_slice_ref<'a>(slice: &'a [MaybeIdx<'n, I>]) -> &'a [I::Signed] {
652 unsafe { &*(slice as *const _ as *const _) }
653 }
654
655 #[track_caller]
657 #[inline]
658 pub unsafe fn from_slice_ref_unchecked<'a>(slice: &'a [I::Signed]) -> &'a [MaybeIdx<'n, I>] {
659 unsafe { &*(slice as *const _ as *const _) }
660 }
661}
662
663impl core::ops::Deref for Dim<'_> {
664 type Target = usize;
665
666 #[inline]
667 fn deref(&self) -> &Self::Target {
668 &self.unbound
669 }
670}
671impl<I: Index> core::ops::Deref for MaybeIdx<'_, I> {
672 type Target = I::Signed;
673
674 #[inline]
675 fn deref(&self) -> &Self::Target {
676 bytemuck::cast_ref(&self.unbound)
677 }
678}
679impl<I: Index> core::ops::Deref for Idx<'_, I> {
680 type Target = I;
681
682 #[inline]
683 fn deref(&self) -> &Self::Target {
684 &self.unbound
685 }
686}
687impl<I: Index> core::ops::Deref for IdxInc<'_, I> {
688 type Target = I;
689
690 #[inline]
691 fn deref(&self) -> &Self::Target {
692 &self.unbound
693 }
694}
695
696#[derive(PartialEq, Eq, PartialOrd, Ord)]
698#[repr(transparent)]
699pub struct Array<'n, T> {
700 __marker: PhantomData<Invariant<'n>>,
701 unbound: [T],
702}
703
704impl<'n, T> Array<'n, T> {
705 #[inline]
707 #[track_caller]
708 pub fn from_ref<'a>(slice: &'a [T], size: Dim<'n>) -> &'a Self {
709 assert!(slice.len() == size.unbound());
710 unsafe { &*(slice as *const [T] as *const Self) }
711 }
712
713 #[inline]
715 #[track_caller]
716 pub fn from_mut<'a>(slice: &'a mut [T], size: Dim<'n>) -> &'a mut Self {
717 assert!(slice.len() == size.unbound());
718 unsafe { &mut *(slice as *mut [T] as *mut Self) }
719 }
720
721 #[inline]
723 #[track_caller]
724 pub fn as_ref(&self) -> &[T] {
725 unsafe { &*(self as *const _ as *const _) }
726 }
727
728 #[inline]
730 #[track_caller]
731 pub fn as_mut<'a>(&mut self) -> &'a mut [T] {
732 unsafe { &mut *(self as *mut _ as *mut _) }
733 }
734
735 #[inline]
737 pub fn len(&self) -> Dim<'n> {
738 unsafe { Dim::new_unbound(self.unbound.len()) }
739 }
740}
741
742impl<T: core::fmt::Debug> core::fmt::Debug for Array<'_, T> {
743 #[inline]
744 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
745 self.unbound.fmt(f)
746 }
747}
748
749impl<'n, T> core::ops::Index<Range<IdxInc<'n>>> for Array<'n, T> {
750 type Output = [T];
751
752 #[track_caller]
753 fn index(&self, idx: Range<IdxInc<'n>>) -> &Self::Output {
754 #[cfg(debug_assertions)]
755 {
756 &self.unbound[idx.start.unbound()..idx.end.unbound()]
757 }
758 #[cfg(not(debug_assertions))]
759 unsafe {
760 self.unbound.get_unchecked(idx.start.unbound()..idx.end.unbound())
761 }
762 }
763}
764impl<'n, T> core::ops::IndexMut<Range<IdxInc<'n>>> for Array<'n, T> {
765 #[track_caller]
766 fn index_mut(&mut self, idx: Range<IdxInc<'n>>) -> &mut Self::Output {
767 #[cfg(debug_assertions)]
768 {
769 &mut self.unbound[idx.start.unbound()..idx.end.unbound()]
770 }
771 #[cfg(not(debug_assertions))]
772 unsafe {
773 self.unbound.get_unchecked_mut(idx.start.unbound()..idx.end.unbound())
774 }
775 }
776}
777impl<'n, T> core::ops::Index<Idx<'n>> for Array<'n, T> {
778 type Output = T;
779
780 #[track_caller]
781 fn index(&self, idx: Idx<'n>) -> &Self::Output {
782 #[cfg(debug_assertions)]
783 {
784 &self.unbound[idx.unbound()]
785 }
786 #[cfg(not(debug_assertions))]
787 unsafe {
788 self.unbound.get_unchecked(idx.unbound())
789 }
790 }
791}
792impl<'n, T> core::ops::IndexMut<Idx<'n>> for Array<'n, T> {
793 #[track_caller]
794 fn index_mut(&mut self, idx: Idx<'n>) -> &mut Self::Output {
795 #[cfg(debug_assertions)]
796 {
797 &mut self.unbound[idx.unbound()]
798 }
799 #[cfg(not(debug_assertions))]
800 unsafe {
801 self.unbound.get_unchecked_mut(idx.unbound())
802 }
803 }
804}
805
806#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
808pub struct One;
809
810#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
812pub struct Zero;
813
814#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
816pub struct IdxIncOne<I: Index = usize> {
817 inner: I,
818}
819
820#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
822pub struct MaybeIdxOne<I: Index = usize> {
823 inner: I,
824}
825
826impl<I: Index> Unbind<I> for IdxIncOne<I> {
827 #[inline]
828 unsafe fn new_unbound(idx: I) -> Self {
829 Self { inner: idx }
830 }
831
832 #[inline]
833 fn unbound(self) -> I {
834 self.inner
835 }
836}
837
838impl<I: Index> Unbind<I::Signed> for MaybeIdxOne<I> {
839 #[inline]
840 unsafe fn new_unbound(idx: I::Signed) -> Self {
841 Self { inner: I::from_signed(idx) }
842 }
843
844 #[inline]
845 fn unbound(self) -> I::Signed {
846 self.inner.to_signed()
847 }
848}
849
850impl<I: Index> Unbind<I> for Zero {
851 #[inline]
852 unsafe fn new_unbound(idx: I) -> Self {
853 equator::debug_assert!(idx.zx() == 0);
854 Zero
855 }
856
857 #[inline]
858 fn unbound(self) -> I {
859 I::truncate(0)
860 }
861}
862
863impl Unbind for One {
864 #[inline]
865 unsafe fn new_unbound(idx: usize) -> Self {
866 equator::debug_assert!(idx == 1);
867 One
868 }
869
870 #[inline]
871 fn unbound(self) -> usize {
872 1
873 }
874}
875
876impl<I: Index> From<Zero> for IdxIncOne<I> {
877 fn from(_: Zero) -> Self {
878 Self { inner: I::truncate(0) }
879 }
880}
881
882impl ShapeIdx for One {
883 type Idx<I: Index> = Zero;
884 type IdxInc<I: Index> = IdxIncOne<I>;
885 type MaybeIdx<I: Index> = MaybeIdxOne<I>;
886}
887
888impl PartialEq<One> for IdxIncOne {
889 #[inline]
890 fn eq(&self, _: &One) -> bool {
891 self.inner == 1
892 }
893}
894impl PartialOrd<One> for IdxIncOne {
895 #[inline]
896 fn partial_cmp(&self, _: &One) -> Option<core::cmp::Ordering> {
897 if self.inner == 1 {
898 Some(core::cmp::Ordering::Equal)
899 } else {
900 Some(core::cmp::Ordering::Less)
901 }
902 }
903}
904
905impl PartialEq<One> for Zero {
906 #[inline]
907 fn eq(&self, _: &One) -> bool {
908 false
909 }
910}
911impl PartialOrd<One> for Zero {
912 #[inline]
913 fn partial_cmp(&self, _: &One) -> Option<core::cmp::Ordering> {
914 Some(core::cmp::Ordering::Less)
915 }
916}
917impl Shape for One {
918 const IS_BOUND: bool = true;
919}