1use core::{self, cmp, convert, fmt, marker, mem, slice};
15
16use super::limb::{
17 LIMB_BITS, LIMB_BYTES, LimbChoice, LimbType, ct_find_first_set_bit_l, ct_find_last_set_bit_l,
18 ct_find_last_set_byte_l, ct_is_nonzero_l, ct_is_zero_l, ct_lsb_mask_l,
19};
20use super::usize_ct_cmp::ct_eq_usize_usize;
21
22pub const fn ct_mp_nlimbs(len: usize) -> usize {
30 (len + LIMB_BYTES - 1) / LIMB_BYTES
31}
32
33pub const fn ct_mp_limbs_align_len(len: usize) -> usize {
34 ct_mp_nlimbs(len) * LIMB_BYTES
35}
36
37#[test]
38fn test_ct_mp_nlimbs() {
39 assert_eq!(ct_mp_nlimbs(0), 0);
40 assert_eq!(ct_mp_nlimbs(1), 1);
41 assert_eq!(ct_mp_nlimbs(LIMB_BYTES - 1), 1);
42 assert_eq!(ct_mp_nlimbs(LIMB_BYTES), 1);
43 assert_eq!(ct_mp_nlimbs(LIMB_BYTES + 1), 2);
44 assert_eq!(ct_mp_nlimbs(2 * LIMB_BYTES - 1), 2);
45}
46
47fn _be_mp_load_l_full(limbs: &[u8], src_end: usize) -> LimbType {
67 let src_begin = src_end - LIMB_BYTES;
68 let src = &limbs[src_begin..src_end];
69 let src = <[u8; LIMB_BYTES] as TryFrom<&[u8]>>::try_from(src).unwrap();
70 LimbType::from_be_bytes(src)
71}
72
73fn _be_mp_load_l_high_partial(limbs: &[u8], src_end: usize) -> LimbType {
94 let mut src: [u8; LIMB_BYTES] = [0; LIMB_BYTES];
95 src[LIMB_BYTES - src_end..LIMB_BYTES].copy_from_slice(&limbs[0..src_end]);
96 LimbType::from_be_bytes(src)
97}
98
99fn be_mp_load_l_full(limbs: &[u8], i: usize) -> LimbType {
122 debug_assert!(i * LIMB_BYTES < limbs.len());
123 let src_end = limbs.len() - i * LIMB_BYTES;
124 _be_mp_load_l_full(limbs, src_end)
125}
126
127fn be_mp_load_l(limbs: &[u8], i: usize) -> LimbType {
149 debug_assert!(i * LIMB_BYTES <= limbs.len());
150 let src_end = limbs.len() - i * LIMB_BYTES;
151 if src_end >= LIMB_BYTES {
152 _be_mp_load_l_full(limbs, src_end)
153 } else {
154 _be_mp_load_l_high_partial(limbs, src_end)
155 }
156}
157
158#[test]
159fn test_be_mp_load_l() {
160 let limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
161 assert_eq!(be_mp_load_l(&limbs, 0), 0);
162 assert_eq!(be_mp_load_l(&limbs, 1), 0);
163
164 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
165 limbs[LIMB_BYTES] = 0x80;
166 limbs[LIMB_BYTES - 1] = 1;
167 assert_eq!(be_mp_load_l(&limbs, 0), 1 << (LIMB_BITS - 1));
168 assert_eq!(be_mp_load_l_full(&limbs, 0), 1 << (LIMB_BITS - 1));
169 assert_eq!(be_mp_load_l(&limbs, 1), 1);
170 assert_eq!(be_mp_load_l_full(&limbs, 1), 1);
171
172 let limbs: [u8; 1] = [0; 1];
173 assert_eq!(be_mp_load_l(&limbs, 0), 0);
174
175 let limbs: [u8; LIMB_BYTES + 1] = [0; LIMB_BYTES + 1];
176 assert_eq!(be_mp_load_l(&limbs, 0), 0);
177 assert_eq!(be_mp_load_l_full(&limbs, 0), 0);
178 assert_eq!(be_mp_load_l(&limbs, 1), 0);
179
180 let limbs: [u8; 2] = [0, 1];
181 assert_eq!(be_mp_load_l(&limbs, 0), 1);
182
183 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
184 limbs[1] = 1;
185 assert_eq!(be_mp_load_l(&limbs, 0), 0);
186 assert_eq!(be_mp_load_l_full(&limbs, 0), 0);
187 assert_eq!(be_mp_load_l(&limbs, 1), 1);
188
189 let limbs: [u8; 2] = [1, 0];
190 assert_eq!(be_mp_load_l(&limbs, 0), 0x0100);
191
192 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
193 limbs[0] = 1;
194 assert_eq!(be_mp_load_l(&limbs, 0), 0);
195 assert_eq!(be_mp_load_l_full(&limbs, 0), 0);
196 assert_eq!(be_mp_load_l(&limbs, 1), 0x0100);
197}
198
199fn _be_mp_store_l_full(limbs: &mut [u8], dst_end: usize, value: LimbType) {
218 let dst_begin = dst_end - LIMB_BYTES;
219 let dst = &mut limbs[dst_begin..dst_end];
220 let dst = <&mut [u8; LIMB_BYTES] as TryFrom<&mut [u8]>>::try_from(dst).unwrap();
221 *dst = value.to_be_bytes();
222}
223
224fn _be_mp_store_l_high_partial(limbs: &mut [u8], dst_end: usize, value: LimbType) {
246 let dst = &mut limbs[0..dst_end];
247 let src: [u8; LIMB_BYTES] = value.to_be_bytes();
248 dst.copy_from_slice(&src[LIMB_BYTES - dst_end..LIMB_BYTES]);
249}
250
251fn be_mp_store_l_full(limbs: &mut [u8], i: usize, value: LimbType) {
273 debug_assert!(i * LIMB_BYTES < limbs.len());
274 let dst_end = limbs.len() - i * LIMB_BYTES;
275 _be_mp_store_l_full(limbs, dst_end, value);
276}
277
278fn be_mp_store_l(limbs: &mut [u8], i: usize, value: LimbType) {
302 debug_assert!(i * LIMB_BYTES <= limbs.len());
303 let dst_end = limbs.len() - i * LIMB_BYTES;
304 if dst_end >= LIMB_BYTES {
305 _be_mp_store_l_full(limbs, dst_end, value);
306 } else {
307 debug_assert_eq!(value >> (8 * dst_end), 0);
308 _be_mp_store_l_high_partial(limbs, dst_end, value);
309 }
310}
311
312#[test]
313fn test_be_mp_store_l() {
314 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
315 be_mp_store_l(&mut limbs, 0, 1 << (LIMB_BITS - 1));
316 be_mp_store_l(&mut limbs, 1, 1);
317 assert_eq!(be_mp_load_l(&limbs, 0), 1 << (LIMB_BITS - 1));
318 assert_eq!(be_mp_load_l(&limbs, 1), 1);
319
320 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
321 be_mp_store_l_full(&mut limbs, 0, 1 << (LIMB_BITS - 1));
322 be_mp_store_l_full(&mut limbs, 1, 1);
323 assert_eq!(be_mp_load_l(&limbs, 0), 1 << (LIMB_BITS - 1));
324 assert_eq!(be_mp_load_l(&limbs, 1), 1);
325
326 let mut limbs: [u8; 1] = [0; 1];
327 be_mp_store_l(&mut limbs, 0, 1);
328 assert_eq!(be_mp_load_l(&limbs, 0), 1);
329
330 let mut limbs: [u8; LIMB_BYTES - 1] = [0; LIMB_BYTES - 1];
331 be_mp_store_l(&mut limbs, 0, 1);
332 assert_eq!(be_mp_load_l(&limbs, 0), 1);
333
334 let mut limbs: [u8; LIMB_BYTES - 1] = [0; LIMB_BYTES - 1];
335 be_mp_store_l(&mut limbs, 0, 1 << LIMB_BITS - 8 - 1);
336 assert_eq!(be_mp_load_l(&limbs, 0), 1 << LIMB_BITS - 8 - 1);
337
338 let mut limbs: [u8; 2] = [0; 2];
339 be_mp_store_l(&mut limbs, 0, 1);
340 assert_eq!(be_mp_load_l(&limbs, 0), 1);
341
342 let mut limbs: [u8; 2] = [0; 2];
343 be_mp_store_l(&mut limbs, 0, 0x0100);
344 assert_eq!(be_mp_load_l(&limbs, 0), 0x0100);
345
346 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
347 be_mp_store_l(&mut limbs, 1, 1);
348 assert_eq!(be_mp_load_l(&limbs, 0), 0);
349 assert_eq!(be_mp_load_l(&limbs, 1), 1);
350
351 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
352 be_mp_store_l(&mut limbs, 1, 0x0100);
353 assert_eq!(be_mp_load_l(&limbs, 0), 0);
354 assert_eq!(be_mp_load_l(&limbs, 1), 0x0100);
355}
356
357fn be_mp_clear_bytes_above(limbs: &mut [u8], begin: usize) {
358 let limbs_len = limbs.len();
359 if limbs_len <= begin {
360 return;
361 }
362 limbs[..limbs_len - begin].fill(0);
363}
364
365#[test]
366fn test_be_mp_clear_bytes_above() {
367 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
368 be_mp_store_l(&mut limbs, 0, !0);
369 be_mp_store_l(&mut limbs, 1, !0 >> 8);
370 be_mp_clear_bytes_above(&mut limbs, LIMB_BYTES + 1);
371 assert_eq!(be_mp_load_l(&mut limbs, 0), !0);
372 assert_eq!(be_mp_load_l(&mut limbs, 1), !0 & 0xff);
373
374 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
375 be_mp_store_l(&mut limbs, 0, !0);
376 be_mp_store_l(&mut limbs, 1, !0 >> 8);
377 be_mp_clear_bytes_above(&mut limbs, LIMB_BYTES - 1);
378 assert_eq!(be_mp_load_l(&mut limbs, 0), !0 >> 8);
379 assert_eq!(be_mp_load_l(&mut limbs, 1), 0);
380
381 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
382 be_mp_store_l(&mut limbs, 0, !0);
383 be_mp_store_l(&mut limbs, 1, !0);
384 be_mp_clear_bytes_above(&mut limbs, 2 * LIMB_BYTES);
385 assert_eq!(be_mp_load_l(&mut limbs, 0), !0);
386 assert_eq!(be_mp_load_l(&mut limbs, 1), !0);
387}
388
389fn be_mp_clear_bytes_below(limbs: &mut [u8], end: usize) {
390 let limbs_len = limbs.len();
391 let end = end.min(limbs_len);
392 limbs[limbs_len - end..].fill(0);
393}
394
395#[test]
396fn test_be_mp_clear_bytes_below() {
397 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
398 be_mp_store_l(&mut limbs, 0, !0);
399 be_mp_store_l(&mut limbs, 1, !0 >> 8);
400 be_mp_clear_bytes_below(&mut limbs, LIMB_BYTES + 1);
401 assert_eq!(be_mp_load_l(&mut limbs, 0), 0);
402 assert_eq!(be_mp_load_l(&mut limbs, 1), (!0 >> 8) & !0xff);
403
404 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
405 be_mp_store_l(&mut limbs, 0, !0);
406 be_mp_store_l(&mut limbs, 1, !0 >> 8);
407 be_mp_clear_bytes_below(&mut limbs, LIMB_BYTES - 1);
408 assert_eq!(be_mp_load_l(&mut limbs, 0), 0xff << 8 * (LIMB_BYTES - 1));
409 assert_eq!(be_mp_load_l(&mut limbs, 1), !0 >> 8);
410
411 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
412 be_mp_store_l(&mut limbs, 0, !0);
413 be_mp_store_l(&mut limbs, 1, !0);
414 be_mp_clear_bytes_below(&mut limbs, 0);
415 assert_eq!(be_mp_load_l(&mut limbs, 0), !0);
416 assert_eq!(be_mp_load_l(&mut limbs, 1), !0);
417}
418
419fn _le_mp_load_l_full(limbs: &[u8], src_begin: usize) -> LimbType {
438 let src_end = src_begin + LIMB_BYTES;
439 let src = &limbs[src_begin..src_end];
440 let src = <[u8; LIMB_BYTES] as TryFrom<&[u8]>>::try_from(src).unwrap();
441 LimbType::from_le_bytes(src)
442}
443
444fn _le_mp_load_l_high_partial(limbs: &[u8], src_begin: usize) -> LimbType {
464 let mut src: [u8; LIMB_BYTES] = [0; LIMB_BYTES];
465 src[..limbs.len() - src_begin].copy_from_slice(&limbs[src_begin..]);
466 LimbType::from_le_bytes(src)
467}
468
469fn le_mp_load_l_full(limbs: &[u8], i: usize) -> LimbType {
492 let src_begin = i * LIMB_BYTES;
493 debug_assert!(src_begin < limbs.len());
494 _le_mp_load_l_full(limbs, src_begin)
495}
496
497fn le_mp_load_l(limbs: &[u8], i: usize) -> LimbType {
519 let src_begin = i * LIMB_BYTES;
520 debug_assert!(src_begin < limbs.len());
521 if src_begin + LIMB_BYTES <= limbs.len() {
522 _le_mp_load_l_full(limbs, src_begin)
523 } else {
524 _le_mp_load_l_high_partial(limbs, src_begin)
525 }
526}
527
528#[test]
529fn test_le_mp_load_l() {
530 let limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
531 assert_eq!(le_mp_load_l(&limbs, 0), 0);
532 assert_eq!(le_mp_load_l(&limbs, 1), 0);
533
534 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
535 limbs[LIMB_BYTES - 1] = 0x80;
536 limbs[LIMB_BYTES] = 1;
537 assert_eq!(le_mp_load_l(&limbs, 0), 1 << (LIMB_BITS - 1));
538 assert_eq!(le_mp_load_l_full(&limbs, 0), 1 << (LIMB_BITS - 1));
539 assert_eq!(le_mp_load_l(&limbs, 1), 1);
540 assert_eq!(le_mp_load_l_full(&limbs, 1), 1);
541
542 let limbs: [u8; 1] = [0; 1];
543 assert_eq!(le_mp_load_l(&limbs, 0), 0);
544
545 let limbs: [u8; LIMB_BYTES + 1] = [0; LIMB_BYTES + 1];
546 assert_eq!(le_mp_load_l(&limbs, 0), 0);
547 assert_eq!(le_mp_load_l_full(&limbs, 0), 0);
548 assert_eq!(le_mp_load_l(&limbs, 1), 0);
549
550 let limbs: [u8; 2] = [1, 0];
551 assert_eq!(le_mp_load_l(&limbs, 0), 1);
552
553 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
554 limbs[LIMB_BYTES] = 1;
555 assert_eq!(le_mp_load_l(&limbs, 0), 0);
556 assert_eq!(le_mp_load_l_full(&limbs, 0), 0);
557 assert_eq!(le_mp_load_l(&limbs, 1), 1);
558
559 let limbs: [u8; 2] = [0, 1];
560 assert_eq!(le_mp_load_l(&limbs, 0), 0x0100);
561
562 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
563 limbs[LIMB_BYTES + 1] = 1;
564 assert_eq!(le_mp_load_l(&limbs, 0), 0);
565 assert_eq!(le_mp_load_l_full(&limbs, 0), 0);
566 assert_eq!(le_mp_load_l(&limbs, 1), 0x0100);
567}
568
569fn _le_mp_store_l_full(limbs: &mut [u8], dst_begin: usize, value: LimbType) {
587 let dst_end = dst_begin + LIMB_BYTES;
588 let dst = &mut limbs[dst_begin..dst_end];
589 let dst = <&mut [u8; LIMB_BYTES] as TryFrom<&mut [u8]>>::try_from(dst).unwrap();
590 *dst = value.to_le_bytes();
591}
592
593fn _le_mp_store_l_high_partial(limbs: &mut [u8], dst_begin: usize, value: LimbType) {
614 let dst_end = limbs.len();
615 let dst = &mut limbs[dst_begin..];
616 let src: [u8; LIMB_BYTES] = value.to_le_bytes();
617 dst.copy_from_slice(&src[0..dst_end - dst_begin]);
618}
619
620fn le_mp_store_l_full(limbs: &mut [u8], i: usize, value: LimbType) {
642 let dst_begin = i * LIMB_BYTES;
643 debug_assert!(dst_begin < limbs.len());
644 _le_mp_store_l_full(limbs, dst_begin, value);
645}
646
647fn le_mp_store_l(limbs: &mut [u8], i: usize, value: LimbType) {
671 let dst_begin = i * LIMB_BYTES;
672 debug_assert!(dst_begin < limbs.len());
673 if dst_begin + LIMB_BYTES <= limbs.len() {
674 _le_mp_store_l_full(limbs, dst_begin, value);
675 } else {
676 debug_assert_eq!(value >> (8 * (limbs.len() - dst_begin)), 0);
677 _le_mp_store_l_high_partial(limbs, dst_begin, value);
678 }
679}
680
681#[test]
682fn test_le_mp_store_l() {
683 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
684 le_mp_store_l(&mut limbs, 0, 1 << (LIMB_BITS - 1));
685 le_mp_store_l(&mut limbs, 1, 1);
686 assert_eq!(le_mp_load_l(&limbs, 0), 1 << (LIMB_BITS - 1));
687 assert_eq!(le_mp_load_l(&limbs, 1), 1);
688
689 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
690 le_mp_store_l_full(&mut limbs, 0, 1 << (LIMB_BITS - 1));
691 le_mp_store_l_full(&mut limbs, 1, 1);
692 assert_eq!(le_mp_load_l(&limbs, 0), 1 << (LIMB_BITS - 1));
693 assert_eq!(le_mp_load_l(&limbs, 1), 1);
694
695 let mut limbs: [u8; 1] = [0; 1];
696 le_mp_store_l(&mut limbs, 0, 1);
697 assert_eq!(le_mp_load_l(&limbs, 0), 1);
698
699 let mut limbs: [u8; LIMB_BYTES - 1] = [0; LIMB_BYTES - 1];
700 le_mp_store_l(&mut limbs, 0, 1);
701 assert_eq!(le_mp_load_l(&limbs, 0), 1);
702
703 let mut limbs: [u8; LIMB_BYTES - 1] = [0; LIMB_BYTES - 1];
704 le_mp_store_l(&mut limbs, 0, 1 << LIMB_BITS - 8 - 1);
705 assert_eq!(le_mp_load_l(&limbs, 0), 1 << LIMB_BITS - 8 - 1);
706
707 let mut limbs: [u8; 2] = [0; 2];
708 le_mp_store_l(&mut limbs, 0, 1);
709 assert_eq!(le_mp_load_l(&limbs, 0), 1);
710
711 let mut limbs: [u8; 2] = [0; 2];
712 le_mp_store_l(&mut limbs, 0, 0x0100);
713 assert_eq!(le_mp_load_l(&limbs, 0), 0x0100);
714
715 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
716 le_mp_store_l(&mut limbs, 1, 1);
717 assert_eq!(le_mp_load_l(&limbs, 0), 0);
718 assert_eq!(le_mp_load_l(&limbs, 1), 1);
719
720 let mut limbs: [u8; LIMB_BYTES + 2] = [0; LIMB_BYTES + 2];
721 le_mp_store_l(&mut limbs, 1, 0x0100);
722 assert_eq!(le_mp_load_l(&limbs, 0), 0);
723 assert_eq!(le_mp_load_l(&limbs, 1), 0x0100);
724}
725
726fn le_mp_clear_bytes_above(limbs: &mut [u8], begin: usize) {
727 if limbs.len() <= begin {
728 return;
729 }
730 limbs[begin..].fill(0);
731}
732
733#[test]
734fn test_le_mp_clear_bytes_above() {
735 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
736 le_mp_store_l(&mut limbs, 0, !0);
737 le_mp_store_l(&mut limbs, 1, !0 >> 8);
738 le_mp_clear_bytes_above(&mut limbs, LIMB_BYTES + 1);
739 assert_eq!(le_mp_load_l(&mut limbs, 0), !0);
740 assert_eq!(le_mp_load_l(&mut limbs, 1), !0 & 0xff);
741
742 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
743 le_mp_store_l(&mut limbs, 0, !0);
744 le_mp_store_l(&mut limbs, 1, !0 >> 8);
745 le_mp_clear_bytes_above(&mut limbs, LIMB_BYTES - 1);
746 assert_eq!(le_mp_load_l(&mut limbs, 0), !0 >> 8);
747 assert_eq!(le_mp_load_l(&mut limbs, 1), 0);
748
749 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
750 le_mp_store_l(&mut limbs, 0, !0);
751 le_mp_store_l(&mut limbs, 1, !0);
752 le_mp_clear_bytes_above(&mut limbs, 2 * LIMB_BYTES);
753 assert_eq!(le_mp_load_l(&mut limbs, 0), !0);
754 assert_eq!(le_mp_load_l(&mut limbs, 1), !0);
755}
756
757fn le_mp_clear_bytes_below(limbs: &mut [u8], end: usize) {
758 let end = end.min(limbs.len());
759 limbs[..end].fill(0);
760}
761
762#[test]
763fn test_le_mp_clear_bytes_below() {
764 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
765 le_mp_store_l(&mut limbs, 0, !0);
766 le_mp_store_l(&mut limbs, 1, !0 >> 8);
767 le_mp_clear_bytes_below(&mut limbs, LIMB_BYTES + 1);
768 assert_eq!(le_mp_load_l(&mut limbs, 0), 0);
769 assert_eq!(le_mp_load_l(&mut limbs, 1), (!0 >> 8) & !0xff);
770
771 let mut limbs: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
772 le_mp_store_l(&mut limbs, 0, !0);
773 le_mp_store_l(&mut limbs, 1, !0 >> 8);
774 le_mp_clear_bytes_below(&mut limbs, LIMB_BYTES - 1);
775 assert_eq!(le_mp_load_l(&mut limbs, 0), 0xff << 8 * (LIMB_BYTES - 1));
776 assert_eq!(le_mp_load_l(&mut limbs, 1), !0 >> 8);
777
778 let mut limbs: [u8; 2 * LIMB_BYTES] = [0; 2 * LIMB_BYTES];
779 le_mp_store_l(&mut limbs, 0, !0);
780 le_mp_store_l(&mut limbs, 1, !0);
781 le_mp_clear_bytes_below(&mut limbs, 0);
782 assert_eq!(le_mp_load_l(&mut limbs, 0), !0);
783 assert_eq!(le_mp_load_l(&mut limbs, 1), !0);
784}
785
786pub trait MpUIntCommonPriv: Sized {
787 const SUPPORTS_UNALIGNED_BUFFER_LENGTHS: bool;
788
789 fn _len(&self) -> usize;
790
791 fn partial_high_mask(&self) -> LimbType {
792 if Self::SUPPORTS_UNALIGNED_BUFFER_LENGTHS {
793 let high_npartial = if self._len() != 0 {
794 ((self._len()) - 1) % LIMB_BYTES + 1
795 } else {
796 0
797 };
798 ct_lsb_mask_l(8 * high_npartial as u32)
799 } else {
800 !0
801 }
802 }
803
804 fn partial_high_shift(&self) -> u32 {
805 if Self::SUPPORTS_UNALIGNED_BUFFER_LENGTHS {
806 let high_npartial = self._len() % LIMB_BYTES;
807 if high_npartial == 0 {
808 0
809 } else {
810 8 * high_npartial as u32
811 }
812 } else {
813 0
814 }
815 }
816}
817
818#[derive(Debug)]
819pub struct MpUIntCommonTryIntoNativeError {}
820
821macro_rules! _mpu_try_into_native_u {
822 ($nt:ty, $name:ident) => {
823 fn $name(&self) -> Result<$nt, MpUIntCommonTryIntoNativeError> {
824 let native_type_nlimbs = ct_mp_nlimbs(mem::size_of::<$nt>());
825 let nbytes_from_last = ((mem::size_of::<$nt>() - 1) % LIMB_BYTES) + 1;
826 let mut head_is_nonzero = 0;
827 for i in native_type_nlimbs..self.nlimbs() {
828 head_is_nonzero |= self.load_l(i);
829 }
830 let last_val = self.load_l(native_type_nlimbs - 1);
831 let last_mask = ct_lsb_mask_l(8 * nbytes_from_last as u32);
832 head_is_nonzero |= last_val & !last_mask;
833 if ct_is_nonzero_l(head_is_nonzero) != 0 {
834 return Err(MpUIntCommonTryIntoNativeError {});
835 }
836 let mut result: $nt = (last_val & last_mask) as $nt;
837 let mut i = native_type_nlimbs - 1;
838 while i > 0 {
839 i -= 1;
840 result <<= LIMB_BITS.min(<$nt>::BITS - 1);
844 result |= self.load_l_full(i) as $nt;
845 }
846 Ok(result)
847 }
848 };
849}
850
851pub trait MpUIntCommon: MpUIntCommonPriv + fmt::LowerHex {
852 fn len(&self) -> usize {
853 self._len()
854 }
855
856 fn nlimbs(&self) -> usize {
857 ct_mp_nlimbs(self.len())
858 }
859
860 fn is_empty(&self) -> bool;
861
862 fn load_l_full(&self, i: usize) -> LimbType;
863 fn load_l(&self, i: usize) -> LimbType;
864
865 fn test_bit(&self, pos: usize) -> LimbChoice {
866 let limb_index = pos / LIMB_BITS as usize;
867 if limb_index >= self.nlimbs() {
868 return LimbChoice::from(0);
869 }
870 let pos_in_limb = pos % LIMB_BITS as usize;
871 let l = self.load_l(limb_index);
872 LimbChoice::from((l >> pos_in_limb) & 1)
873 }
874
875 fn fmt_lower_hex(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
876 fn nibble_to_hexchar(nibble: u8) -> char {
877 let c = match nibble {
878 0x0..=0x9 => b'0' + nibble,
879 0xa..=0xf => b'a' + (nibble - 0xa),
880 _ => unreachable!(),
881 };
882 c as char
883 }
884
885 fn fmt_byte(f: &mut fmt::Formatter<'_>, v: u8) -> fmt::Result {
886 <fmt::Formatter<'_> as fmt::Write>::write_char(f, nibble_to_hexchar(v >> 4))?;
887 <fmt::Formatter<'_> as fmt::Write>::write_char(f, nibble_to_hexchar(v & 0xf))?;
888 Ok(())
889 }
890
891 fn fmt_l(f: &mut fmt::Formatter<'_>, v: LimbType, len: usize) -> fmt::Result {
892 for i in 0..len {
893 fmt_byte(f, (v >> (8 * (len - i - 1))) as u8)?;
894 }
895 Ok(())
896 }
897
898 if f.alternate() {
899 f.write_str("0x")?;
900 }
901 if self.is_empty() {
902 f.write_str("(empty)")?;
903 return Ok(());
904 }
905
906 let v = self.load_l(self.nlimbs() - 1);
907 fmt_l(f, v, (self.len() - 1) % LIMB_BYTES + 1)?;
908
909 let mut i = 0;
910 while i + 1 < self.nlimbs() {
911 <fmt::Formatter<'_> as fmt::Write>::write_char(f, '_')?;
912 let v = self.load_l(self.nlimbs() - 2 - i);
913 fmt_l(f, v, LIMB_BYTES)?;
914 i += 1;
915 }
916
917 Ok(())
918 }
919
920 fn len_is_compatible_with(&self, len: usize) -> bool {
921 if self._len() <= len {
922 true
923 } else if !Self::SUPPORTS_UNALIGNED_BUFFER_LENGTHS && self._len() - len < LIMB_BYTES {
924 self.load_l(self.nlimbs() - 1) >> (8 * (LIMB_BYTES - (self._len() - len))) == 0
926 } else {
927 false
928 }
929 }
930
931 _mpu_try_into_native_u!(u8, try_into_u8);
932 _mpu_try_into_native_u!(u16, try_into_u16);
933 _mpu_try_into_native_u!(u32, try_into_u32);
934 _mpu_try_into_native_u!(u64, try_into_u64);
935}
936
937pub trait MpUInt: MpUIntCommon {}
938
939macro_rules! _mpu_set_to_native_u {
940 ($nt:ty, $name:ident) => {
941 fn $name(&mut self, mut value: $nt) {
942 debug_assert!(self.len() >= mem::size_of::<$nt>());
943 self.clear_bytes_above(mem::size_of::<$nt>());
944 let native_type_nlimbs = ct_mp_nlimbs(mem::size_of::<$nt>());
945 for i in 0..native_type_nlimbs {
946 self.store_l(i, (value & (!(0 as LimbType) as $nt)) as LimbType);
947 value >>= LIMB_BITS.min(<$nt>::BITS - 1);
951 }
952 }
953 };
954}
955
956pub trait MpMutUInt: MpUIntCommon {
957 fn store_l_full(&mut self, i: usize, value: LimbType);
958 fn store_l(&mut self, i: usize, value: LimbType);
959 fn clear_bytes_above(&mut self, begin: usize);
960 fn clear_bytes_below(&mut self, end: usize);
961
962 fn clear_bytes_above_cond(&mut self, begin: usize, cond: LimbChoice) {
963 if begin >= self.len() {
964 return;
965 }
966
967 let mask = cond.select(!0, 0);
968 let begin_limb = begin / LIMB_BYTES;
969 let begin_in_limb = begin % LIMB_BYTES;
970 let val = self.load_l(begin_limb);
971 let first_mask = !ct_lsb_mask_l(8 * begin_in_limb as u32) | mask;
972 let val = val & first_mask;
973 self.store_l(begin_limb, val);
974
975 for i in begin_limb + 1..self.nlimbs() {
976 self.store_l(i, self.load_l(i) & mask);
977 }
978 }
979
980 fn copy_from<S: MpUIntCommon>(&'_ mut self, src: &S) {
981 let src_nlimbs = src.nlimbs();
982 let dst_nlimbs = self.nlimbs();
983 debug_assert!(find_last_set_byte_mp(src) <= self.len());
984
985 if src_nlimbs == 0 {
986 self.clear_bytes_above(0);
987 return;
988 } else if dst_nlimbs == 0 {
989 return;
990 }
991 let common_nlimbs = src_nlimbs.min(dst_nlimbs);
992 for i in 0..common_nlimbs - 1 {
993 self.store_l_full(i, src.load_l_full(i));
994 }
995 let high_limb = src.load_l(common_nlimbs - 1);
996 debug_assert!(src_nlimbs < dst_nlimbs || (high_limb & !self.partial_high_mask()) == 0);
997 self.store_l(common_nlimbs - 1, high_limb);
998 self.clear_bytes_above(src.len());
999 }
1000
1001 fn copy_from_cond<S: MpUIntCommon>(&'_ mut self, src: &S, cond: LimbChoice) {
1002 let src_nlimbs = src.nlimbs();
1003 let dst_nlimbs = self.nlimbs();
1004 debug_assert!(find_last_set_byte_mp(src) <= self.len());
1005
1006 if src_nlimbs == 0 {
1007 self.clear_bytes_above_cond(0, cond);
1008 return;
1009 } else if dst_nlimbs == 0 {
1010 return;
1011 }
1012 let common_nlimbs = src_nlimbs.min(dst_nlimbs);
1013 for i in 0..common_nlimbs - 1 {
1014 let val = cond.select(self.load_l_full(i), src.load_l_full(i));
1015 self.store_l_full(i, val);
1016 }
1017 let high_limb = cond.select(
1018 self.load_l(common_nlimbs - 1),
1019 src.load_l(common_nlimbs - 1),
1020 );
1021 debug_assert!(src_nlimbs < dst_nlimbs || (high_limb & !self.partial_high_mask()) == 0);
1022 self.store_l(common_nlimbs - 1, high_limb);
1023 self.clear_bytes_above_cond(src.len(), cond);
1024 }
1025
1026 fn set_bit_to(&mut self, pos: usize, val: bool) {
1027 let limb_index = pos / LIMB_BITS as usize;
1028 debug_assert!(limb_index < self.nlimbs());
1029 let pos_in_limb = pos % LIMB_BITS as usize;
1030 let mut l = self.load_l(limb_index);
1031 let bit_pos_mask = (1 as LimbType) << pos_in_limb;
1032 let val_mask = LimbChoice::from(val as LimbType).select(0, bit_pos_mask);
1033 l &= !bit_pos_mask;
1034 l |= val_mask;
1035 self.store_l(limb_index, l)
1036 }
1037
1038 _mpu_set_to_native_u!(u8, set_to_u8);
1039 _mpu_set_to_native_u!(u16, set_to_u16);
1040 _mpu_set_to_native_u!(u32, set_to_u32);
1041 _mpu_set_to_native_u!(u64, set_to_u64);
1042}
1043
1044pub trait MpUIntSliceCommonPriv: MpUIntCommonPriv {
1045 type BackingSliceElementType: Sized + Copy + cmp::PartialEq + convert::From<u8> + fmt::Debug;
1046
1047 const BACKING_ELEMENT_SIZE: usize = mem::size_of::<Self::BackingSliceElementType>();
1048
1049 fn _limbs_align_len(nbytes: usize) -> usize {
1050 if Self::SUPPORTS_UNALIGNED_BUFFER_LENGTHS {
1051 nbytes
1052 } else {
1053 ct_mp_limbs_align_len(nbytes)
1054 }
1055 }
1056
1057 fn n_backing_elements_for_len(nbytes: usize) -> usize {
1058 let nbytes = Self::_limbs_align_len(nbytes);
1059 nbytes / Self::BACKING_ELEMENT_SIZE + (nbytes % Self::BACKING_ELEMENT_SIZE != 0) as usize
1060 }
1061
1062 fn n_backing_elements(&self) -> usize;
1063
1064 fn take(self, nbytes: usize) -> (Self, Self);
1065}
1066
1067pub trait MpUIntSliceCommon: MpUIntSliceCommonPriv + MpUIntCommon {}
1068
1069pub trait MpUIntSlicePriv: MpUIntSliceCommon + MpUInt {
1070 type SelfT<'a>: MpUIntSlice<BackingSliceElementType = Self::BackingSliceElementType>
1071 where
1072 Self: 'a;
1073
1074 type FromSliceError: fmt::Debug;
1075
1076 fn from_slice<'a: 'b, 'b>(
1077 s: &'a [Self::BackingSliceElementType],
1078 ) -> Result<Self::SelfT<'b>, Self::FromSliceError>
1079 where
1080 Self: 'b;
1081
1082 fn _shrink_to(&self, nbytes: usize) -> Self::SelfT<'_>;
1083}
1084
1085pub trait MpUIntSlice: MpUIntSlicePriv + MpUInt {
1086 fn coerce_lifetime(&self) -> Self::SelfT<'_>;
1087
1088 fn shrink_to(&self, nbytes: usize) -> Self::SelfT<'_> {
1089 let nbytes = nbytes.min(self._len());
1090 debug_assert!(nbytes >= find_last_set_byte_mp(self));
1091 self._shrink_to(nbytes)
1092 }
1093}
1094
1095pub trait MpMutUIntSlicePriv: MpUIntSliceCommon + MpMutUInt {
1096 type SelfT<'a>: MpMutUIntSlice<BackingSliceElementType = Self::BackingSliceElementType>
1097 where
1098 Self: 'a;
1099
1100 type FromSliceError: fmt::Debug;
1101
1102 fn from_slice<'a: 'b, 'b>(
1103 s: &'a mut [Self::BackingSliceElementType],
1104 ) -> Result<Self::SelfT<'b>, Self::FromSliceError>
1105 where
1106 Self: 'b;
1107
1108 fn _shrink_to(&mut self, nbytes: usize) -> Self::SelfT<'_>;
1109}
1110
1111pub trait MpMutUIntSlice: MpMutUIntSlicePriv + MpMutUInt {
1112 fn coerce_lifetime(&mut self) -> Self::SelfT<'_>;
1113
1114 fn shrink_to(&mut self, nbytes: usize) -> Self::SelfT<'_> {
1115 let nbytes = nbytes.min(self._len());
1116 debug_assert!(nbytes >= find_last_set_byte_mp(self));
1117 self._shrink_to(nbytes)
1118 }
1119}
1120
1121#[derive(Clone)]
1122pub struct MpBigEndianUIntByteSlice<'a> {
1123 bytes: &'a [u8],
1124}
1125
1126impl<'a> MpBigEndianUIntByteSlice<'a> {
1127 pub fn from_bytes(bytes: &'a [u8]) -> Self {
1128 Self { bytes }
1129 }
1130
1131 pub fn as_bytes(&self) -> &'a [u8] {
1132 &self.bytes
1133 }
1134}
1135
1136impl<'a> MpUIntCommonPriv for MpBigEndianUIntByteSlice<'a> {
1137 const SUPPORTS_UNALIGNED_BUFFER_LENGTHS: bool = true;
1138
1139 fn _len(&self) -> usize {
1140 self.bytes.len()
1141 }
1142}
1143
1144impl<'a> MpUIntCommon for MpBigEndianUIntByteSlice<'a> {
1145 fn is_empty(&self) -> bool {
1146 self.bytes.is_empty()
1147 }
1148
1149 fn load_l_full(&self, i: usize) -> LimbType {
1150 be_mp_load_l_full(self.bytes, i)
1151 }
1152
1153 fn load_l(&self, i: usize) -> LimbType {
1154 be_mp_load_l(self.bytes, i)
1155 }
1156}
1157
1158impl<'a> MpUInt for MpBigEndianUIntByteSlice<'a> {}
1159
1160impl<'a> MpUIntSliceCommonPriv for MpBigEndianUIntByteSlice<'a> {
1161 type BackingSliceElementType = u8;
1162
1163 fn n_backing_elements(&self) -> usize {
1164 self.bytes.len()
1165 }
1166
1167 fn take(self, nbytes: usize) -> (Self, Self) {
1168 let (h, l) = self.bytes.split_at(self.bytes.len() - nbytes);
1169 (Self { bytes: h }, Self { bytes: l })
1170 }
1171}
1172
1173impl<'a> MpUIntSliceCommon for MpBigEndianUIntByteSlice<'a> {}
1174
1175impl<'a> MpUIntSlicePriv for MpBigEndianUIntByteSlice<'a> {
1176 type SelfT<'b>
1177 = MpBigEndianUIntByteSlice<'b>
1178 where
1179 Self: 'b;
1180
1181 type FromSliceError = convert::Infallible;
1182
1183 fn from_slice<'b: 'c, 'c>(
1184 s: &'b [Self::BackingSliceElementType],
1185 ) -> Result<Self::SelfT<'c>, Self::FromSliceError>
1186 where
1187 Self: 'c,
1188 {
1189 Ok(Self::SelfT::<'c> { bytes: s })
1190 }
1191
1192 fn _shrink_to(&self, nbytes: usize) -> Self::SelfT<'_> {
1193 MpBigEndianUIntByteSlice {
1194 bytes: &self.bytes[self.bytes.len() - nbytes..],
1195 }
1196 }
1197}
1198
1199impl<'a> MpUIntSlice for MpBigEndianUIntByteSlice<'a> {
1200 fn coerce_lifetime(&self) -> Self::SelfT<'_> {
1201 MpBigEndianUIntByteSlice { bytes: &self.bytes }
1202 }
1203}
1204
1205impl<'a> fmt::LowerHex for MpBigEndianUIntByteSlice<'a> {
1206 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1207 self.fmt_lower_hex(f)
1208 }
1209}
1210
1211impl<'a, 'b> From<&'a MpMutBigEndianUIntByteSlice<'b>> for MpBigEndianUIntByteSlice<'a> {
1212 fn from(value: &'a MpMutBigEndianUIntByteSlice<'b>) -> Self {
1213 Self { bytes: value.bytes }
1214 }
1215}
1216
1217impl<'a> From<MpBigEndianUIntByteSlice<'a>> for &'a [u8] {
1218 fn from(value: MpBigEndianUIntByteSlice<'a>) -> Self {
1219 value.bytes
1220 }
1221}
1222
1223pub struct MpMutBigEndianUIntByteSlice<'a> {
1224 bytes: &'a mut [u8],
1225}
1226
1227impl<'a> MpMutBigEndianUIntByteSlice<'a> {
1228 pub fn from_bytes(bytes: &'a mut [u8]) -> Self {
1229 Self { bytes }
1230 }
1231
1232 pub fn as_bytes<'b>(&'b self) -> &'b [u8] {
1233 &self.bytes
1234 }
1235
1236 pub fn as_bytes_mut<'b>(&'b mut self) -> &'b mut [u8] {
1237 &mut self.bytes
1238 }
1239}
1240
1241impl<'a> MpUIntCommonPriv for MpMutBigEndianUIntByteSlice<'a> {
1242 const SUPPORTS_UNALIGNED_BUFFER_LENGTHS: bool = true;
1243
1244 fn _len(&self) -> usize {
1245 self.bytes.len()
1246 }
1247}
1248
1249impl<'a> MpUIntCommon for MpMutBigEndianUIntByteSlice<'a> {
1250 fn is_empty(&self) -> bool {
1251 self.bytes.is_empty()
1252 }
1253
1254 fn load_l_full(&self, i: usize) -> LimbType {
1255 be_mp_load_l_full(self.bytes, i)
1256 }
1257
1258 fn load_l(&self, i: usize) -> LimbType {
1259 be_mp_load_l(self.bytes, i)
1260 }
1261}
1262
1263impl<'a> MpMutUInt for MpMutBigEndianUIntByteSlice<'a> {
1264 fn store_l_full(&mut self, i: usize, value: LimbType) {
1265 be_mp_store_l_full(self.bytes, i, value)
1266 }
1267
1268 fn store_l(&mut self, i: usize, value: LimbType) {
1269 be_mp_store_l(self.bytes, i, value)
1270 }
1271
1272 fn clear_bytes_above(&mut self, begin: usize) {
1273 be_mp_clear_bytes_above(self.bytes, begin)
1274 }
1275
1276 fn clear_bytes_below(&mut self, end: usize) {
1277 be_mp_clear_bytes_below(self.bytes, end)
1278 }
1279}
1280
1281impl<'a> MpUIntSliceCommonPriv for MpMutBigEndianUIntByteSlice<'a> {
1282 type BackingSliceElementType = u8;
1283
1284 fn n_backing_elements(&self) -> usize {
1285 self.bytes.len()
1286 }
1287
1288 fn take(self, nbytes: usize) -> (Self, Self) {
1289 let (h, l) = self.bytes.split_at_mut(self.bytes.len() - nbytes);
1290 (Self { bytes: h }, Self { bytes: l })
1291 }
1292}
1293
1294impl<'a> MpUIntSliceCommon for MpMutBigEndianUIntByteSlice<'a> {}
1295
1296impl<'a> MpMutUIntSlicePriv for MpMutBigEndianUIntByteSlice<'a> {
1297 type SelfT<'b>
1298 = MpMutBigEndianUIntByteSlice<'b>
1299 where
1300 Self: 'b;
1301
1302 type FromSliceError = convert::Infallible;
1303
1304 fn from_slice<'b: 'c, 'c>(
1305 s: &'b mut [Self::BackingSliceElementType],
1306 ) -> Result<Self::SelfT<'c>, Self::FromSliceError>
1307 where
1308 Self: 'c,
1309 {
1310 Ok(Self::SelfT::<'c> { bytes: s })
1311 }
1312
1313 fn _shrink_to(&mut self, nbytes: usize) -> Self::SelfT<'_> {
1314 let l = self.bytes.len();
1315 MpMutBigEndianUIntByteSlice {
1316 bytes: &mut self.bytes[l - nbytes..],
1317 }
1318 }
1319}
1320
1321impl<'a> MpMutUIntSlice for MpMutBigEndianUIntByteSlice<'a> {
1322 fn coerce_lifetime(&mut self) -> Self::SelfT<'_> {
1323 MpMutBigEndianUIntByteSlice {
1324 bytes: &mut self.bytes,
1325 }
1326 }
1327}
1328
1329impl<'a> fmt::LowerHex for MpMutBigEndianUIntByteSlice<'a> {
1330 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1331 self.fmt_lower_hex(f)
1332 }
1333}
1334
1335impl<'a, 'b> From<&'a mut MpMutBigEndianUIntByteSlice<'b>> for &'a mut [u8] {
1336 fn from(value: &'a mut MpMutBigEndianUIntByteSlice<'b>) -> Self {
1337 &mut value.bytes
1338 }
1339}
1340
1341impl<'a, 'b> From<&'a MpMutBigEndianUIntByteSlice<'b>> for &'a [u8] {
1342 fn from(value: &'a MpMutBigEndianUIntByteSlice<'b>) -> Self {
1343 &value.bytes
1344 }
1345}
1346
1347#[derive(Clone)]
1348pub struct MpLittleEndianUIntByteSlice<'a> {
1349 bytes: &'a [u8],
1350}
1351
1352impl<'a> MpLittleEndianUIntByteSlice<'a> {
1353 pub fn from_bytes(bytes: &'a [u8]) -> Self {
1354 Self { bytes }
1355 }
1356
1357 pub fn as_bytes(&self) -> &'a [u8] {
1358 &self.bytes
1359 }
1360}
1361
1362impl<'a> MpUIntCommonPriv for MpLittleEndianUIntByteSlice<'a> {
1363 const SUPPORTS_UNALIGNED_BUFFER_LENGTHS: bool = true;
1364
1365 fn _len(&self) -> usize {
1366 self.bytes.len()
1367 }
1368}
1369
1370impl<'a> MpUIntCommon for MpLittleEndianUIntByteSlice<'a> {
1371 fn is_empty(&self) -> bool {
1372 self.bytes.is_empty()
1373 }
1374
1375 fn load_l_full(&self, i: usize) -> LimbType {
1376 le_mp_load_l_full(self.bytes, i)
1377 }
1378
1379 fn load_l(&self, i: usize) -> LimbType {
1380 le_mp_load_l(self.bytes, i)
1381 }
1382}
1383
1384impl<'a> MpUInt for MpLittleEndianUIntByteSlice<'a> {}
1385
1386impl<'a> MpUIntSliceCommonPriv for MpLittleEndianUIntByteSlice<'a> {
1387 type BackingSliceElementType = u8;
1388
1389 fn n_backing_elements(&self) -> usize {
1390 self.bytes.len()
1391 }
1392
1393 fn take(self, nbytes: usize) -> (Self, Self) {
1394 let (l, h) = self.bytes.split_at(nbytes);
1395 (Self { bytes: h }, Self { bytes: l })
1396 }
1397}
1398
1399impl<'a> MpUIntSliceCommon for MpLittleEndianUIntByteSlice<'a> {}
1400
1401impl<'a> MpUIntSlicePriv for MpLittleEndianUIntByteSlice<'a> {
1402 type SelfT<'b>
1403 = MpLittleEndianUIntByteSlice<'b>
1404 where
1405 Self: 'b;
1406
1407 type FromSliceError = convert::Infallible;
1408
1409 fn from_slice<'b: 'c, 'c>(
1410 s: &'b [Self::BackingSliceElementType],
1411 ) -> Result<Self::SelfT<'c>, Self::FromSliceError>
1412 where
1413 Self: 'c,
1414 {
1415 Ok(Self::SelfT::<'c> { bytes: s })
1416 }
1417
1418 fn _shrink_to(&self, nbytes: usize) -> Self::SelfT<'_> {
1419 MpLittleEndianUIntByteSlice {
1420 bytes: &self.bytes[..nbytes],
1421 }
1422 }
1423}
1424
1425impl<'a> MpUIntSlice for MpLittleEndianUIntByteSlice<'a> {
1426 fn coerce_lifetime(&self) -> Self::SelfT<'_> {
1427 MpLittleEndianUIntByteSlice { bytes: &self.bytes }
1428 }
1429}
1430
1431impl<'a> fmt::LowerHex for MpLittleEndianUIntByteSlice<'a> {
1432 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1433 self.fmt_lower_hex(f)
1434 }
1435}
1436
1437impl<'a, 'b> From<&'a MpMutLittleEndianUIntByteSlice<'b>> for MpLittleEndianUIntByteSlice<'a> {
1438 fn from(value: &'a MpMutLittleEndianUIntByteSlice<'b>) -> Self {
1439 Self { bytes: value.bytes }
1440 }
1441}
1442
1443impl<'a> From<MpLittleEndianUIntByteSlice<'a>> for &'a [u8] {
1444 fn from(value: MpLittleEndianUIntByteSlice<'a>) -> Self {
1445 value.bytes
1446 }
1447}
1448
1449pub struct MpMutLittleEndianUIntByteSlice<'a> {
1450 bytes: &'a mut [u8],
1451}
1452
1453impl<'a> MpMutLittleEndianUIntByteSlice<'a> {
1454 pub fn from_bytes(bytes: &'a mut [u8]) -> Self {
1455 Self { bytes }
1456 }
1457
1458 pub fn as_bytes<'b>(&'b self) -> &'b [u8] {
1459 &self.bytes
1460 }
1461
1462 pub fn as_bytes_mut<'b>(&'b mut self) -> &'b mut [u8] {
1463 &mut self.bytes
1464 }
1465}
1466
1467impl<'a> MpUIntCommonPriv for MpMutLittleEndianUIntByteSlice<'a> {
1468 const SUPPORTS_UNALIGNED_BUFFER_LENGTHS: bool = true;
1469
1470 fn _len(&self) -> usize {
1471 self.bytes.len()
1472 }
1473}
1474
1475impl<'a> MpUIntCommon for MpMutLittleEndianUIntByteSlice<'a> {
1476 fn is_empty(&self) -> bool {
1477 self.bytes.is_empty()
1478 }
1479
1480 fn load_l_full(&self, i: usize) -> LimbType {
1481 le_mp_load_l_full(self.bytes, i)
1482 }
1483
1484 fn load_l(&self, i: usize) -> LimbType {
1485 le_mp_load_l(self.bytes, i)
1486 }
1487}
1488
1489impl<'a> MpMutUInt for MpMutLittleEndianUIntByteSlice<'a> {
1490 fn store_l_full(&mut self, i: usize, value: LimbType) {
1491 le_mp_store_l_full(self.bytes, i, value)
1492 }
1493
1494 fn store_l(&mut self, i: usize, value: LimbType) {
1495 le_mp_store_l(self.bytes, i, value)
1496 }
1497
1498 fn clear_bytes_above(&mut self, begin: usize) {
1499 le_mp_clear_bytes_above(self.bytes, begin)
1500 }
1501
1502 fn clear_bytes_below(&mut self, end: usize) {
1503 le_mp_clear_bytes_below(self.bytes, end)
1504 }
1505}
1506
1507impl<'a> MpUIntSliceCommonPriv for MpMutLittleEndianUIntByteSlice<'a> {
1508 type BackingSliceElementType = u8;
1509
1510 fn n_backing_elements(&self) -> usize {
1511 self.bytes.len()
1512 }
1513
1514 fn take(self, nbytes: usize) -> (Self, Self) {
1515 let (l, h) = self.bytes.split_at_mut(nbytes);
1516 (Self { bytes: h }, Self { bytes: l })
1517 }
1518}
1519
1520impl<'a> MpUIntSliceCommon for MpMutLittleEndianUIntByteSlice<'a> {}
1521
1522impl<'a> MpMutUIntSlicePriv for MpMutLittleEndianUIntByteSlice<'a> {
1523 type SelfT<'b>
1524 = MpMutLittleEndianUIntByteSlice<'b>
1525 where
1526 Self: 'b;
1527
1528 type FromSliceError = convert::Infallible;
1529
1530 fn from_slice<'b: 'c, 'c>(
1531 s: &'b mut [Self::BackingSliceElementType],
1532 ) -> Result<Self::SelfT<'c>, Self::FromSliceError>
1533 where
1534 Self: 'c,
1535 {
1536 Ok(Self::SelfT::<'c> { bytes: s })
1537 }
1538
1539 fn _shrink_to(&mut self, nbytes: usize) -> Self::SelfT<'_> {
1540 MpMutLittleEndianUIntByteSlice {
1541 bytes: &mut self.bytes[..nbytes],
1542 }
1543 }
1544}
1545
1546impl<'a> MpMutUIntSlice for MpMutLittleEndianUIntByteSlice<'a> {
1547 fn coerce_lifetime(&mut self) -> Self::SelfT<'_> {
1548 MpMutLittleEndianUIntByteSlice {
1549 bytes: &mut self.bytes,
1550 }
1551 }
1552}
1553
1554impl<'a> fmt::LowerHex for MpMutLittleEndianUIntByteSlice<'a> {
1555 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1556 self.fmt_lower_hex(f)
1557 }
1558}
1559
1560impl<'a, 'b> From<&'a mut MpMutLittleEndianUIntByteSlice<'b>> for &'a mut [u8] {
1561 fn from(value: &'a mut MpMutLittleEndianUIntByteSlice<'b>) -> Self {
1562 &mut value.bytes
1563 }
1564}
1565
1566impl<'a, 'b> From<&'a MpMutLittleEndianUIntByteSlice<'b>> for &'a [u8] {
1567 fn from(value: &'a MpMutLittleEndianUIntByteSlice<'b>) -> Self {
1568 &value.bytes
1569 }
1570}
1571
1572pub struct MpNativeEndianUIntLimbsSlice<'a> {
1573 limbs: &'a [LimbType],
1574}
1575
1576impl<'a> MpNativeEndianUIntLimbsSlice<'a> {
1577 pub fn from_limbs(limbs: &'a [LimbType]) -> Self {
1578 Self { limbs }
1579 }
1580
1581 pub fn nlimbs_for_len(nbytes: usize) -> usize {
1582 Self::n_backing_elements_for_len(nbytes)
1583 }
1584}
1585
1586impl<'a> MpUIntCommonPriv for MpNativeEndianUIntLimbsSlice<'a> {
1587 const SUPPORTS_UNALIGNED_BUFFER_LENGTHS: bool = false;
1588
1589 fn _len(&self) -> usize {
1590 self.limbs.len() * Self::BACKING_ELEMENT_SIZE
1591 }
1592}
1593
1594impl<'a> MpUIntCommon for MpNativeEndianUIntLimbsSlice<'a> {
1595 fn is_empty(&self) -> bool {
1596 self.limbs.is_empty()
1597 }
1598
1599 #[inline(always)]
1600 fn load_l_full(&self, i: usize) -> LimbType {
1601 self.limbs[i]
1602 }
1603
1604 #[inline(always)]
1605 fn load_l(&self, i: usize) -> LimbType {
1606 self.load_l_full(i)
1607 }
1608}
1609
1610impl<'a> MpUInt for MpNativeEndianUIntLimbsSlice<'a> {}
1611
1612impl<'a> MpUIntSliceCommonPriv for MpNativeEndianUIntLimbsSlice<'a> {
1613 type BackingSliceElementType = LimbType;
1614
1615 fn n_backing_elements(&self) -> usize {
1616 self.limbs.len()
1617 }
1618
1619 fn take(self, nbytes: usize) -> (Self, Self) {
1620 debug_assert_eq!(nbytes % LIMB_BYTES, 0);
1621 let (l, h) = self
1622 .limbs
1623 .split_at(Self::n_backing_elements_for_len(nbytes));
1624 (Self { limbs: h }, Self { limbs: l })
1625 }
1626}
1627
1628impl<'a> MpUIntSliceCommon for MpNativeEndianUIntLimbsSlice<'a> {}
1629
1630impl<'a> MpUIntSlicePriv for MpNativeEndianUIntLimbsSlice<'a> {
1631 type SelfT<'b>
1632 = MpNativeEndianUIntLimbsSlice<'b>
1633 where
1634 Self: 'b;
1635
1636 type FromSliceError = convert::Infallible;
1637
1638 fn from_slice<'b: 'c, 'c>(
1639 s: &'b [Self::BackingSliceElementType],
1640 ) -> Result<Self::SelfT<'c>, Self::FromSliceError>
1641 where
1642 Self: 'c,
1643 {
1644 Ok(Self::SelfT::<'c> { limbs: s })
1645 }
1646
1647 fn _shrink_to(&self, nbytes: usize) -> Self::SelfT<'_> {
1648 let nlimbs = Self::n_backing_elements_for_len(nbytes);
1649 MpNativeEndianUIntLimbsSlice {
1650 limbs: &self.limbs[..nlimbs],
1651 }
1652 }
1653}
1654
1655impl<'a> MpUIntSlice for MpNativeEndianUIntLimbsSlice<'a> {
1656 fn coerce_lifetime(&self) -> Self::SelfT<'_> {
1657 MpNativeEndianUIntLimbsSlice { limbs: &self.limbs }
1658 }
1659}
1660
1661impl<'a> fmt::LowerHex for MpNativeEndianUIntLimbsSlice<'a> {
1662 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1663 self.fmt_lower_hex(f)
1664 }
1665}
1666
1667impl<'a, 'b> From<&'a MpMutNativeEndianUIntLimbsSlice<'b>> for MpNativeEndianUIntLimbsSlice<'a> {
1668 fn from(value: &'a MpMutNativeEndianUIntLimbsSlice<'b>) -> Self {
1669 Self { limbs: value.limbs }
1670 }
1671}
1672
1673pub struct MpMutNativeEndianUIntLimbsSlice<'a> {
1674 limbs: &'a mut [LimbType],
1675}
1676
1677impl<'a> MpMutNativeEndianUIntLimbsSlice<'a> {
1678 pub fn from_limbs(limbs: &'a mut [LimbType]) -> Self {
1679 Self { limbs }
1680 }
1681
1682 pub fn nlimbs_for_len(nbytes: usize) -> usize {
1683 Self::n_backing_elements_for_len(nbytes)
1684 }
1685}
1686
1687impl<'a> MpUIntCommonPriv for MpMutNativeEndianUIntLimbsSlice<'a> {
1688 const SUPPORTS_UNALIGNED_BUFFER_LENGTHS: bool = false;
1689
1690 fn _len(&self) -> usize {
1691 self.limbs.len() * Self::BACKING_ELEMENT_SIZE
1692 }
1693}
1694
1695impl<'a> MpUIntCommon for MpMutNativeEndianUIntLimbsSlice<'a> {
1696 fn is_empty(&self) -> bool {
1697 self.limbs.is_empty()
1698 }
1699
1700 #[inline(always)]
1701 fn load_l_full(&self, i: usize) -> LimbType {
1702 self.limbs[i]
1703 }
1704
1705 #[inline(always)]
1706 fn load_l(&self, i: usize) -> LimbType {
1707 self.load_l_full(i)
1708 }
1709}
1710
1711impl<'a> MpMutUInt for MpMutNativeEndianUIntLimbsSlice<'a> {
1712 #[inline(always)]
1713 fn store_l_full(&mut self, i: usize, value: LimbType) {
1714 self.limbs[i] = value;
1715 }
1716
1717 #[inline(always)]
1718 fn store_l(&mut self, i: usize, value: LimbType) {
1719 self.store_l_full(i, value);
1720 }
1721
1722 fn clear_bytes_above(&mut self, begin: usize) {
1723 let mut begin_limb = begin / LIMB_BYTES;
1724 if begin_limb >= self.limbs.len() {
1725 return;
1726 }
1727 let begin_in_limb = begin % LIMB_BYTES;
1728 if begin_in_limb != 0 {
1729 self.limbs[begin_limb] &= ct_lsb_mask_l(8 * begin_in_limb as u32);
1730 begin_limb += 1;
1731 }
1732 self.limbs[begin_limb..].fill(0);
1733 }
1734
1735 fn clear_bytes_below(&mut self, end: usize) {
1736 let mut end_limb = ct_mp_nlimbs(end).min(self.limbs.len());
1737 let end_in_limb = end % LIMB_BYTES;
1738 if end_in_limb != 0 {
1739 end_limb -= 1;
1740 let mut l = self.load_l(end_limb);
1741 l >>= 8 * end_in_limb;
1742 l <<= 8 * end_in_limb;
1743 self.limbs[end_limb] = l;
1744 }
1745 self.limbs[..end_limb].fill(0);
1746 }
1747}
1748
1749impl<'a> MpUIntSliceCommonPriv for MpMutNativeEndianUIntLimbsSlice<'a> {
1750 type BackingSliceElementType = LimbType;
1751
1752 fn n_backing_elements(&self) -> usize {
1753 self.limbs.len()
1754 }
1755
1756 fn take(self, nbytes: usize) -> (Self, Self) {
1757 debug_assert_eq!(nbytes % LIMB_BYTES, 0);
1758 let (l, h) = self
1759 .limbs
1760 .split_at_mut(Self::n_backing_elements_for_len(nbytes));
1761 (Self { limbs: h }, Self { limbs: l })
1762 }
1763}
1764
1765impl<'a> MpUIntSliceCommon for MpMutNativeEndianUIntLimbsSlice<'a> {}
1766
1767impl<'a> MpMutUIntSlicePriv for MpMutNativeEndianUIntLimbsSlice<'a> {
1768 type SelfT<'b>
1769 = MpMutNativeEndianUIntLimbsSlice<'b>
1770 where
1771 Self: 'b;
1772
1773 type FromSliceError = convert::Infallible;
1774
1775 fn from_slice<'b: 'c, 'c>(
1776 s: &'b mut [Self::BackingSliceElementType],
1777 ) -> Result<Self::SelfT<'c>, Self::FromSliceError>
1778 where
1779 Self: 'c,
1780 {
1781 Ok(Self::SelfT::<'c> { limbs: s })
1782 }
1783
1784 fn _shrink_to(&mut self, nbytes: usize) -> Self::SelfT<'_> {
1785 let nlimbs = Self::n_backing_elements_for_len(nbytes);
1786 MpMutNativeEndianUIntLimbsSlice {
1787 limbs: &mut self.limbs[..nlimbs],
1788 }
1789 }
1790}
1791
1792impl<'a> MpMutUIntSlice for MpMutNativeEndianUIntLimbsSlice<'a> {
1793 fn coerce_lifetime(&mut self) -> Self::SelfT<'_> {
1794 MpMutNativeEndianUIntLimbsSlice {
1795 limbs: &mut self.limbs,
1796 }
1797 }
1798}
1799
1800impl<'a> fmt::LowerHex for MpMutNativeEndianUIntLimbsSlice<'a> {
1801 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1802 self.fmt_lower_hex(f)
1803 }
1804}
1805
1806fn find_last_set_limb<T0: MpUIntCommon>(op0: &T0) -> usize {
1807 let mut nlimbs = op0.nlimbs();
1808 if nlimbs == 0 {
1809 return 0;
1810 }
1811
1812 if op0.load_l(nlimbs - 1) == 0 {
1813 nlimbs -= 1;
1814 while nlimbs > 0 {
1815 if op0.load_l_full(nlimbs - 1) != 0 {
1816 break;
1817 }
1818 nlimbs -= 1;
1819 }
1820 }
1821
1822 nlimbs
1823}
1824
1825#[cfg(test)]
1826fn test_find_last_set_limb_with_unaligned_lengths<
1827 T0: MpMutUIntSlice<BackingSliceElementType = u8>,
1828>() {
1829 let mut op0 = [0u8; 2 * LIMB_BYTES + 2];
1830 let mut op0 = T0::from_slice(op0.as_mut_slice()).unwrap();
1831 op0.store_l(0, 1);
1832 assert_eq!(find_last_set_limb(&op0), 1);
1833
1834 op0.store_l(1, 1);
1835 assert_eq!(find_last_set_limb(&op0), 2);
1836
1837 op0.store_l(2, 1);
1838 assert_eq!(find_last_set_limb(&op0), 3);
1839}
1840
1841#[cfg(test)]
1842fn test_find_last_set_limb_with_aligned_lengths<T0: MpMutUIntSlice>() {
1843 let mut op0 = tst_mk_mp_backing_vec!(T0, 0);
1844 let op0 = T0::from_slice(op0.as_mut_slice()).unwrap();
1845 assert_eq!(find_last_set_limb(&op0), 0);
1846
1847 let mut op0 = tst_mk_mp_backing_vec!(T0, 2 * LIMB_BYTES);
1848 let mut op0 = T0::from_slice(op0.as_mut_slice()).unwrap();
1849 assert_eq!(find_last_set_limb(&op0), 0);
1850
1851 op0.store_l(0, 1);
1852 assert_eq!(find_last_set_limb(&op0), 1);
1853
1854 op0.store_l(1, 1);
1855 assert_eq!(find_last_set_limb(&op0), 2);
1856}
1857
1858#[test]
1859fn test_find_last_set_limb_be() {
1860 test_find_last_set_limb_with_unaligned_lengths::<MpMutBigEndianUIntByteSlice>();
1861 test_find_last_set_limb_with_aligned_lengths::<MpMutBigEndianUIntByteSlice>();
1862}
1863
1864#[test]
1865fn test_find_last_set_limb_le() {
1866 test_find_last_set_limb_with_unaligned_lengths::<MpMutLittleEndianUIntByteSlice>();
1867 test_find_last_set_limb_with_aligned_lengths::<MpMutLittleEndianUIntByteSlice>();
1868}
1869
1870#[test]
1871fn test_find_last_set_limb_ne() {
1872 test_find_last_set_limb_with_aligned_lengths::<MpMutNativeEndianUIntLimbsSlice>();
1873}
1874
1875pub fn find_last_set_byte_mp<T0: MpUIntCommon>(op0: &T0) -> usize {
1876 let nlimbs = find_last_set_limb(op0);
1877 if nlimbs == 0 {
1878 return 0;
1879 }
1880 let nlimbs = nlimbs - 1;
1881 nlimbs * LIMB_BYTES + ct_find_last_set_byte_l(op0.load_l(nlimbs))
1882}
1883
1884#[cfg(test)]
1885fn test_find_last_set_byte_mp_with_unaligned_lengths<
1886 T0: MpMutUIntSlice<BackingSliceElementType = u8>,
1887>() {
1888 let mut op0 = [0u8; 2 * LIMB_BYTES + 2];
1889 let mut op0 = T0::from_slice(op0.as_mut_slice()).unwrap();
1890 op0.store_l(0, 1);
1891 assert_eq!(find_last_set_byte_mp(&op0), 1);
1892
1893 op0.store_l(1, 1);
1894 assert_eq!(find_last_set_byte_mp(&op0), LIMB_BYTES + 1);
1895
1896 op0.store_l(2, 1);
1897 assert_eq!(find_last_set_byte_mp(&op0), 2 * LIMB_BYTES + 1);
1898}
1899
1900#[cfg(test)]
1901fn test_find_last_set_byte_mp_with_aligned_lengths<T0: MpMutUIntSlice>() {
1902 let mut op0 = tst_mk_mp_backing_vec!(T0, 0);
1903 let op0 = T0::from_slice(op0.as_mut_slice()).unwrap();
1904 assert_eq!(find_last_set_byte_mp(&op0), 0);
1905
1906 let mut op0 = tst_mk_mp_backing_vec!(T0, 2 * LIMB_BYTES);
1907 let mut op0 = T0::from_slice(op0.as_mut_slice()).unwrap();
1908 op0.store_l(0, (1 as LimbType) << LIMB_BITS - 1);
1909 assert_eq!(find_last_set_byte_mp(&op0), LIMB_BYTES);
1910
1911 op0.store_l(1, (1 as LimbType) << LIMB_BITS - 1);
1912 assert_eq!(find_last_set_byte_mp(&op0), 2 * LIMB_BYTES);
1913}
1914
1915#[test]
1916fn test_find_last_set_byte_be() {
1917 test_find_last_set_byte_mp_with_unaligned_lengths::<MpMutBigEndianUIntByteSlice>();
1918 test_find_last_set_byte_mp_with_aligned_lengths::<MpMutBigEndianUIntByteSlice>();
1919}
1920
1921#[test]
1922fn test_find_last_set_byte_le() {
1923 test_find_last_set_byte_mp_with_unaligned_lengths::<MpMutLittleEndianUIntByteSlice>();
1924 test_find_last_set_byte_mp_with_aligned_lengths::<MpMutLittleEndianUIntByteSlice>();
1925}
1926
1927#[test]
1928fn test_find_last_set_byte_ne() {
1929 test_find_last_set_byte_mp_with_aligned_lengths::<MpMutNativeEndianUIntLimbsSlice>();
1930}
1931
1932pub fn ct_find_first_set_bit_mp<T0: MpUIntCommon>(op0: &T0) -> (LimbChoice, usize) {
1933 let mut tail_is_zero = LimbChoice::from(1);
1934 let mut ntrailing_zeroes: usize = 0;
1935 for i in 0..op0.nlimbs() {
1936 let op0_val = op0.load_l(i);
1937 ntrailing_zeroes +=
1938 tail_is_zero.select(0, ct_find_first_set_bit_l(op0_val) as LimbType) as usize;
1939 tail_is_zero &= LimbChoice::from(ct_is_zero_l(op0_val));
1940 }
1941 (
1942 !tail_is_zero,
1943 tail_is_zero.select_usize(ntrailing_zeroes, 0),
1944 )
1945}
1946
1947#[cfg(test)]
1948fn test_ct_find_first_set_bit_mp<T0: MpMutUIntSlice>() {
1949 let mut limbs = tst_mk_mp_backing_vec!(T0, 3 * LIMB_BYTES);
1950 let limbs = T0::from_slice(&mut limbs).unwrap();
1951 let (is_nonzero, first_set_bit_pos) = ct_find_first_set_bit_mp(&limbs);
1952 assert_eq!(is_nonzero.unwrap(), 0);
1953 assert_eq!(first_set_bit_pos, 0);
1954
1955 for i in 0..3 * LIMB_BITS as usize {
1956 let mut limbs = tst_mk_mp_backing_vec!(T0, 3 * LIMB_BYTES);
1957 let mut limbs = T0::from_slice(&mut limbs).unwrap();
1958 let limb_index = i / LIMB_BITS as usize;
1959 let bit_pos_in_limb = i % LIMB_BITS as usize;
1960
1961 limbs.store_l(limb_index, 1 << bit_pos_in_limb);
1962 let (is_nonzero, first_set_bit_pos) = ct_find_first_set_bit_mp(&limbs);
1963 assert!(is_nonzero.unwrap() != 0);
1964 assert_eq!(first_set_bit_pos, i);
1965
1966 limbs.store_l(limb_index, !((1 << bit_pos_in_limb) - 1));
1967 for j in limb_index + 1..limbs.nlimbs() {
1968 limbs.store_l(j, !0);
1969 }
1970 let (is_nonzero, first_set_bit_pos) = ct_find_first_set_bit_mp(&limbs);
1971 assert!(is_nonzero.unwrap() != 0);
1972 assert_eq!(first_set_bit_pos, i);
1973 }
1974}
1975
1976#[test]
1977fn test_ct_find_first_set_bit_be() {
1978 test_ct_find_first_set_bit_mp::<MpMutBigEndianUIntByteSlice>()
1979}
1980
1981#[test]
1982fn test_ct_find_first_set_bit_le() {
1983 test_ct_find_first_set_bit_mp::<MpMutLittleEndianUIntByteSlice>()
1984}
1985
1986#[test]
1987fn test_ct_find_first_set_bit_ne() {
1988 test_ct_find_first_set_bit_mp::<MpMutNativeEndianUIntLimbsSlice>()
1989}
1990
1991pub fn ct_find_last_set_bit_mp<T0: MpUIntCommon>(op0: &T0) -> (LimbChoice, usize) {
1992 let mut head_is_zero = LimbChoice::from(1);
1993 let mut nleading_zeroes: usize = 0;
1994 let mut i = op0.nlimbs();
1995 while i > 0 {
1996 i -= 1;
1997 let op0_val = op0.load_l(i);
1998 nleading_zeroes += head_is_zero.select(
1999 0,
2000 LIMB_BITS as LimbType - ct_find_last_set_bit_l(op0_val) as LimbType,
2001 ) as usize;
2002 head_is_zero &= LimbChoice::from(ct_is_zero_l(op0_val));
2003 }
2004 (
2005 !head_is_zero,
2006 op0.nlimbs() * LIMB_BITS as usize - nleading_zeroes,
2007 )
2008}
2009
2010#[cfg(test)]
2011fn test_ct_find_last_set_bit_mp<T0: MpMutUIntSlice>() {
2012 let mut limbs = tst_mk_mp_backing_vec!(T0, 3 * LIMB_BYTES);
2013 let limbs = T0::from_slice(&mut limbs).unwrap();
2014 let (is_nonzero, first_set_bit_pos) = ct_find_last_set_bit_mp(&limbs);
2015 assert_eq!(is_nonzero.unwrap(), 0);
2016 assert_eq!(first_set_bit_pos, 0);
2017
2018 for i in 0..3 * LIMB_BITS as usize {
2019 let mut limbs = tst_mk_mp_backing_vec!(T0, 3 * LIMB_BYTES);
2020 let mut limbs = T0::from_slice(&mut limbs).unwrap();
2021 let limb_index = i / LIMB_BITS as usize;
2022 let bit_pos_in_limb = i % LIMB_BITS as usize;
2023
2024 limbs.store_l(limb_index, 1 << bit_pos_in_limb);
2025 let (is_nonzero, last_set_bit_pos) = ct_find_last_set_bit_mp(&limbs);
2026 assert!(is_nonzero.unwrap() != 0);
2027 assert_eq!(last_set_bit_pos, i + 1);
2028
2029 limbs.store_l(limb_index, (1 << bit_pos_in_limb) - 1);
2030 for j in 0..limb_index {
2031 limbs.store_l(j, !0);
2032 }
2033 let (is_nonzero, last_set_bit_pos) = ct_find_last_set_bit_mp(&limbs);
2034 assert_eq!(is_nonzero.unwrap() != 0, i != 0);
2035 assert_eq!(last_set_bit_pos, i);
2036 }
2037}
2038
2039#[test]
2040fn test_ct_find_last_set_bit_be() {
2041 test_ct_find_last_set_bit_mp::<MpMutBigEndianUIntByteSlice>()
2042}
2043
2044#[test]
2045fn test_ct_find_last_set_bit_le() {
2046 test_ct_find_last_set_bit_mp::<MpMutLittleEndianUIntByteSlice>()
2047}
2048
2049#[test]
2050fn test_ct_find_last_set_bit_ne() {
2051 test_ct_find_last_set_bit_mp::<MpMutNativeEndianUIntLimbsSlice>()
2052}
2053
2054pub fn ct_clear_bits_above_mp<T0: MpMutUInt>(op0: &mut T0, begin: usize) {
2055 let first_limb_index = begin / LIMB_BITS as usize;
2056 let first_limb_retain_nbits = begin % LIMB_BITS as usize;
2057 let first_limb_mask = ct_lsb_mask_l(first_limb_retain_nbits as u32);
2058 let mut next_mask = !0;
2059 for i in 0..op0.nlimbs() {
2060 let is_first_limb = ct_eq_usize_usize(i, first_limb_index);
2061 let mask = is_first_limb.select(next_mask, first_limb_mask);
2062 next_mask = is_first_limb.select(next_mask, 0);
2063 let val = op0.load_l(i);
2064 op0.store_l(i, val & mask)
2065 }
2066}
2067
2068#[cfg(test)]
2069fn test_ct_clear_bits_above_mp_common<T0: MpMutUIntSlice>(op0_len: usize) {
2070 fn fill_with_ones<T0: MpMutUInt>(op0: &mut T0) {
2071 for i in 0..op0.nlimbs() {
2072 if i + 1 != op0.nlimbs() {
2073 op0.store_l_full(i, !0);
2074 } else {
2075 op0.store_l(i, op0.partial_high_mask());
2076 }
2077 }
2078 }
2079
2080 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2081 let mut op0 = T0::from_slice(&mut op0).unwrap();
2082 fill_with_ones(&mut op0);
2083 for begin in [
2084 8 * op0_len,
2085 8 * op0_len + 1,
2086 8 * op0.nlimbs() * LIMB_BYTES,
2087 8 * op0.nlimbs() * LIMB_BYTES + 1,
2088 ] {
2089 ct_clear_bits_above_mp(&mut op0, begin);
2090 for i in 0..op0.nlimbs() {
2091 if i + 1 != op0.nlimbs() {
2092 assert_eq!(op0.load_l_full(i), !0);
2093 } else {
2094 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2095 }
2096 }
2097 }
2098
2099 for j in 0..ct_mp_nlimbs(op0_len) {
2100 let begin = j * LIMB_BITS as usize;
2101
2102 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2103 let mut op0 = T0::from_slice(&mut op0).unwrap();
2104 fill_with_ones(&mut op0);
2105 ct_clear_bits_above_mp(&mut op0, begin);
2106 for i in 0..j {
2107 assert_eq!(op0.load_l_full(i), !0);
2108 }
2109
2110 for i in j..op0.nlimbs() {
2111 assert_eq!(op0.load_l(i), 0);
2112 }
2113 }
2114
2115 for j in 1..ct_mp_nlimbs(op0_len) {
2116 let begin = j * LIMB_BITS as usize - 1;
2117 let begin = begin.min(8 * op0_len - 1);
2118
2119 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2120 let mut op0 = T0::from_slice(&mut op0).unwrap();
2121 fill_with_ones(&mut op0);
2122 ct_clear_bits_above_mp(&mut op0, begin);
2123 for i in 0..j - 1 {
2124 assert_eq!(op0.load_l_full(i), !0);
2125 }
2126
2127 let expected = ct_lsb_mask_l((begin % LIMB_BITS as usize) as u32);
2128 assert_eq!(op0.load_l(j - 1), expected);
2129
2130 for i in j..op0.nlimbs() {
2131 assert_eq!(op0.load_l(i), 0);
2132 }
2133 }
2134}
2135
2136#[cfg(test)]
2137fn test_ct_clear_bits_above_mp_with_aligned_lengths<T0: MpMutUIntSlice>() {
2138 test_ct_clear_bits_above_mp_common::<T0>(0);
2139 test_ct_clear_bits_above_mp_common::<T0>(LIMB_BYTES);
2140 test_ct_clear_bits_above_mp_common::<T0>(2 * LIMB_BYTES);
2141 test_ct_clear_bits_above_mp_common::<T0>(3 * LIMB_BYTES);
2142}
2143
2144#[cfg(test)]
2145fn test_ct_clear_bits_above_mp_with_unaligned_lengths<T0: MpMutUIntSlice>() {
2146 test_ct_clear_bits_above_mp_common::<T0>(LIMB_BYTES - 1);
2147 test_ct_clear_bits_above_mp_common::<T0>(2 * LIMB_BYTES - 1);
2148 test_ct_clear_bits_above_mp_common::<T0>(3 * LIMB_BYTES - 1);
2149}
2150
2151#[test]
2152fn test_ct_clear_bits_above_be() {
2153 test_ct_clear_bits_above_mp_with_aligned_lengths::<MpMutBigEndianUIntByteSlice>();
2154 test_ct_clear_bits_above_mp_with_unaligned_lengths::<MpMutBigEndianUIntByteSlice>();
2155}
2156
2157#[test]
2158fn test_ct_clear_bits_above_le() {
2159 test_ct_clear_bits_above_mp_with_aligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2160 test_ct_clear_bits_above_mp_with_unaligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2161}
2162
2163#[test]
2164fn test_ct_clear_bits_above_ne() {
2165 test_ct_clear_bits_above_mp_with_aligned_lengths::<MpMutNativeEndianUIntLimbsSlice>();
2166}
2167
2168pub fn clear_bits_above_mp<T0: MpMutUInt>(op0: &mut T0, begin: usize) {
2169 let first_limb_index = begin / LIMB_BITS as usize;
2170 if op0.nlimbs() <= first_limb_index {
2171 return;
2172 }
2173 let first_limb_retain_nbits = begin % LIMB_BITS as usize;
2174 let first_limb_mask = ct_lsb_mask_l(first_limb_retain_nbits as u32);
2175 op0.store_l(
2176 first_limb_index,
2177 op0.load_l(first_limb_index) & first_limb_mask,
2178 );
2179
2180 op0.clear_bytes_above((begin + LIMB_BITS as usize - 1) / LIMB_BITS as usize * LIMB_BYTES);
2181}
2182
2183#[cfg(test)]
2184fn test_clear_bits_above_mp_common<T0: MpMutUIntSlice>(op0_len: usize) {
2185 fn fill_with_ones<T0: MpMutUInt>(op0: &mut T0) {
2186 for i in 0..op0.nlimbs() {
2187 if i + 1 != op0.nlimbs() {
2188 op0.store_l_full(i, !0);
2189 } else {
2190 op0.store_l(i, op0.partial_high_mask());
2191 }
2192 }
2193 }
2194
2195 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2196 let mut op0 = T0::from_slice(&mut op0).unwrap();
2197 fill_with_ones(&mut op0);
2198 for begin in [
2199 8 * op0_len,
2200 8 * op0_len + 1,
2201 8 * op0.nlimbs() * LIMB_BYTES,
2202 8 * op0.nlimbs() * LIMB_BYTES + 1,
2203 ] {
2204 clear_bits_above_mp(&mut op0, begin);
2205 for i in 0..op0.nlimbs() {
2206 if i + 1 != op0.nlimbs() {
2207 assert_eq!(op0.load_l_full(i), !0);
2208 } else {
2209 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2210 }
2211 }
2212 }
2213
2214 for j in 0..ct_mp_nlimbs(op0_len) {
2215 let begin = j * LIMB_BITS as usize;
2216
2217 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2218 let mut op0 = T0::from_slice(&mut op0).unwrap();
2219 fill_with_ones(&mut op0);
2220 clear_bits_above_mp(&mut op0, begin);
2221 for i in 0..j {
2222 assert_eq!(op0.load_l_full(i), !0);
2223 }
2224
2225 for i in j..op0.nlimbs() {
2226 assert_eq!(op0.load_l(i), 0);
2227 }
2228 }
2229
2230 for j in 1..ct_mp_nlimbs(op0_len) {
2231 let begin = j * LIMB_BITS as usize - 1;
2232 let begin = begin.min(8 * op0_len - 1);
2233
2234 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2235 let mut op0 = T0::from_slice(&mut op0).unwrap();
2236 fill_with_ones(&mut op0);
2237 clear_bits_above_mp(&mut op0, begin);
2238 for i in 0..j - 1 {
2239 assert_eq!(op0.load_l_full(i), !0);
2240 }
2241
2242 let expected = ct_lsb_mask_l((begin % LIMB_BITS as usize) as u32);
2243 assert_eq!(op0.load_l(j - 1), expected);
2244
2245 for i in j..op0.nlimbs() {
2246 assert_eq!(op0.load_l(i), 0);
2247 }
2248 }
2249}
2250
2251#[cfg(test)]
2252fn test_clear_bits_above_mp_with_aligned_lengths<T0: MpMutUIntSlice>() {
2253 test_clear_bits_above_mp_common::<T0>(0);
2254 test_clear_bits_above_mp_common::<T0>(LIMB_BYTES);
2255 test_clear_bits_above_mp_common::<T0>(2 * LIMB_BYTES);
2256 test_clear_bits_above_mp_common::<T0>(3 * LIMB_BYTES);
2257}
2258
2259#[cfg(test)]
2260fn test_clear_bits_above_mp_with_unaligned_lengths<T0: MpMutUIntSlice>() {
2261 test_clear_bits_above_mp_common::<T0>(LIMB_BYTES - 1);
2262 test_clear_bits_above_mp_common::<T0>(2 * LIMB_BYTES - 1);
2263 test_clear_bits_above_mp_common::<T0>(3 * LIMB_BYTES - 1);
2264}
2265
2266#[test]
2267fn test_clear_bits_above_be() {
2268 test_clear_bits_above_mp_with_aligned_lengths::<MpMutBigEndianUIntByteSlice>();
2269 test_clear_bits_above_mp_with_unaligned_lengths::<MpMutBigEndianUIntByteSlice>();
2270}
2271
2272#[test]
2273fn test_clear_bits_above_le() {
2274 test_clear_bits_above_mp_with_aligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2275 test_clear_bits_above_mp_with_unaligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2276}
2277
2278#[test]
2279fn test_clear_bits_above_ne() {
2280 test_clear_bits_above_mp_with_aligned_lengths::<MpMutNativeEndianUIntLimbsSlice>();
2281}
2282
2283pub fn ct_clear_bits_below_mp<T0: MpMutUInt>(op0: &mut T0, end: usize) {
2284 let last_limb_index = end / LIMB_BITS as usize;
2285 let last_limb_clear_nbits = end % LIMB_BITS as usize;
2286 let last_limb_mask = !ct_lsb_mask_l(last_limb_clear_nbits as u32);
2287 let mut next_mask = 0;
2288 for i in 0..op0.nlimbs() {
2289 let is_last_limb = ct_eq_usize_usize(i, last_limb_index);
2290 let mask = is_last_limb.select(next_mask, last_limb_mask);
2291 next_mask = is_last_limb.select(next_mask, !0);
2292 let val = op0.load_l(i);
2293 op0.store_l(i, val & mask)
2294 }
2295}
2296
2297#[cfg(test)]
2298fn test_ct_clear_bits_mp_below_common<T0: MpMutUIntSlice>(op0_len: usize) {
2299 fn fill_with_ones<T0: MpMutUInt>(op0: &mut T0) {
2300 for i in 0..op0.nlimbs() {
2301 if i + 1 != op0.nlimbs() {
2302 op0.store_l_full(i, !0);
2303 } else {
2304 op0.store_l(i, op0.partial_high_mask());
2305 }
2306 }
2307 }
2308
2309 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2310 let mut op0 = T0::from_slice(&mut op0).unwrap();
2311 fill_with_ones(&mut op0);
2312 ct_clear_bits_below_mp(&mut op0, 0);
2313 for i in 0..op0.nlimbs() {
2314 if i + 1 != op0.nlimbs() {
2315 assert_eq!(op0.load_l_full(i), !0);
2316 } else {
2317 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2318 }
2319 }
2320
2321 for j in 0..ct_mp_nlimbs(op0_len) {
2322 let begin = j * LIMB_BITS as usize;
2323
2324 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2325 let mut op0 = T0::from_slice(&mut op0).unwrap();
2326 fill_with_ones(&mut op0);
2327 ct_clear_bits_below_mp(&mut op0, begin);
2328 for i in 0..j {
2329 assert_eq!(op0.load_l_full(i), 0);
2330 }
2331
2332 for i in j..op0.nlimbs() {
2333 if i + 1 != op0.nlimbs() {
2334 assert_eq!(op0.load_l(i), !0);
2335 } else {
2336 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2337 }
2338 }
2339 }
2340
2341 for j in 1..ct_mp_nlimbs(op0_len) {
2342 let begin = j * LIMB_BITS as usize - 1;
2343 let begin = begin.min(8 * op0_len - 1);
2344
2345 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2346 let mut op0 = T0::from_slice(&mut op0).unwrap();
2347 fill_with_ones(&mut op0);
2348 ct_clear_bits_below_mp(&mut op0, begin);
2349 for i in 0..j - 1 {
2350 assert_eq!(op0.load_l_full(i), 0);
2351 }
2352
2353 let expected = !ct_lsb_mask_l((begin % LIMB_BITS as usize) as u32);
2354 assert_eq!(op0.load_l(j - 1), expected);
2355
2356 for i in j..op0.nlimbs() {
2357 if i + 1 != op0.nlimbs() {
2358 assert_eq!(op0.load_l(i), !0);
2359 } else {
2360 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2361 }
2362 }
2363 }
2364}
2365
2366#[cfg(test)]
2367fn test_ct_clear_bits_below_mp_with_aligned_lengths<T0: MpMutUIntSlice>() {
2368 test_ct_clear_bits_mp_below_common::<T0>(0);
2369 test_ct_clear_bits_mp_below_common::<T0>(LIMB_BYTES);
2370 test_ct_clear_bits_mp_below_common::<T0>(2 * LIMB_BYTES);
2371 test_ct_clear_bits_mp_below_common::<T0>(3 * LIMB_BYTES);
2372}
2373
2374#[cfg(test)]
2375fn test_ct_clear_bits_below_mp_with_unaligned_lengths<T0: MpMutUIntSlice>() {
2376 test_ct_clear_bits_mp_below_common::<T0>(LIMB_BYTES - 1);
2377 test_ct_clear_bits_mp_below_common::<T0>(2 * LIMB_BYTES - 1);
2378 test_ct_clear_bits_mp_below_common::<T0>(3 * LIMB_BYTES - 1);
2379}
2380
2381#[test]
2382fn test_ct_clear_bits_below_be() {
2383 test_ct_clear_bits_below_mp_with_aligned_lengths::<MpMutBigEndianUIntByteSlice>();
2384 test_ct_clear_bits_below_mp_with_unaligned_lengths::<MpMutBigEndianUIntByteSlice>();
2385}
2386
2387#[test]
2388fn test_ct_clear_bits_below_le() {
2389 test_ct_clear_bits_below_mp_with_aligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2390 test_ct_clear_bits_below_mp_with_unaligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2391}
2392
2393#[test]
2394fn test_ct_clear_bits_below_ne() {
2395 test_ct_clear_bits_below_mp_with_aligned_lengths::<MpMutNativeEndianUIntLimbsSlice>();
2396}
2397
2398pub fn clear_bits_below_mp<T0: MpMutUInt>(op0: &mut T0, end: usize) {
2399 let last_limb_index = end / LIMB_BITS as usize;
2400 op0.clear_bytes_below(last_limb_index * LIMB_BYTES);
2401 if last_limb_index >= op0.nlimbs() {
2402 return;
2403 }
2404 let last_limb_clear_nbits = end % LIMB_BITS as usize;
2405 let last_limb_mask = !ct_lsb_mask_l(last_limb_clear_nbits as u32);
2406 op0.store_l(
2407 last_limb_index,
2408 op0.load_l(last_limb_index) & last_limb_mask,
2409 );
2410}
2411
2412#[cfg(test)]
2413fn test_clear_bits_below_mp_common<T0: MpMutUIntSlice>(op0_len: usize) {
2414 fn fill_with_ones<T0: MpMutUInt>(op0: &mut T0) {
2415 for i in 0..op0.nlimbs() {
2416 if i + 1 != op0.nlimbs() {
2417 op0.store_l_full(i, !0);
2418 } else {
2419 op0.store_l(i, op0.partial_high_mask());
2420 }
2421 }
2422 }
2423
2424 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2425 let mut op0 = T0::from_slice(&mut op0).unwrap();
2426 fill_with_ones(&mut op0);
2427 clear_bits_below_mp(&mut op0, 0);
2428 for i in 0..op0.nlimbs() {
2429 if i + 1 != op0.nlimbs() {
2430 assert_eq!(op0.load_l_full(i), !0);
2431 } else {
2432 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2433 }
2434 }
2435
2436 for j in 0..ct_mp_nlimbs(op0_len) {
2437 let begin = j * LIMB_BITS as usize;
2438
2439 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2440 let mut op0 = T0::from_slice(&mut op0).unwrap();
2441 fill_with_ones(&mut op0);
2442 clear_bits_below_mp(&mut op0, begin);
2443 for i in 0..j {
2444 assert_eq!(op0.load_l_full(i), 0);
2445 }
2446
2447 for i in j..op0.nlimbs() {
2448 if i + 1 != op0.nlimbs() {
2449 assert_eq!(op0.load_l(i), !0);
2450 } else {
2451 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2452 }
2453 }
2454 }
2455
2456 for j in 1..ct_mp_nlimbs(op0_len) {
2457 let begin = j * LIMB_BITS as usize - 1;
2458 let begin = begin.min(8 * op0_len - 1);
2459
2460 let mut op0 = tst_mk_mp_backing_vec!(T0, op0_len);
2461 let mut op0 = T0::from_slice(&mut op0).unwrap();
2462 fill_with_ones(&mut op0);
2463 clear_bits_below_mp(&mut op0, begin);
2464 for i in 0..j - 1 {
2465 assert_eq!(op0.load_l_full(i), 0);
2466 }
2467
2468 let expected = !ct_lsb_mask_l((begin % LIMB_BITS as usize) as u32);
2469 assert_eq!(op0.load_l(j - 1), expected);
2470
2471 for i in j..op0.nlimbs() {
2472 if i + 1 != op0.nlimbs() {
2473 assert_eq!(op0.load_l(i), !0);
2474 } else {
2475 assert_eq!(op0.load_l(i), op0.partial_high_mask());
2476 }
2477 }
2478 }
2479}
2480
2481#[cfg(test)]
2482fn test_clear_bits_below_mp_with_aligned_lengths<T0: MpMutUIntSlice>() {
2483 test_clear_bits_below_mp_common::<T0>(0);
2484 test_clear_bits_below_mp_common::<T0>(LIMB_BYTES);
2485 test_clear_bits_below_mp_common::<T0>(2 * LIMB_BYTES);
2486 test_clear_bits_below_mp_common::<T0>(3 * LIMB_BYTES);
2487}
2488
2489#[cfg(test)]
2490fn test_clear_bits_below_mp_with_unaligned_lengths<T0: MpMutUIntSlice>() {
2491 test_clear_bits_below_mp_common::<T0>(LIMB_BYTES - 1);
2492 test_clear_bits_below_mp_common::<T0>(2 * LIMB_BYTES - 1);
2493 test_clear_bits_below_mp_common::<T0>(3 * LIMB_BYTES - 1);
2494}
2495
2496#[test]
2497fn test_clear_bits_below_be() {
2498 test_clear_bits_below_mp_with_aligned_lengths::<MpMutBigEndianUIntByteSlice>();
2499 test_clear_bits_below_mp_with_unaligned_lengths::<MpMutBigEndianUIntByteSlice>();
2500}
2501
2502#[test]
2503fn test_clear_bits_below_le() {
2504 test_clear_bits_below_mp_with_aligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2505 test_clear_bits_below_mp_with_unaligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2506}
2507
2508#[test]
2509fn test_clear_bits_below_ne() {
2510 test_clear_bits_below_mp_with_aligned_lengths::<MpMutNativeEndianUIntLimbsSlice>();
2511}
2512
2513pub fn ct_swap_cond_mp<T0: MpMutUInt, T1: MpMutUInt>(op0: &mut T0, op1: &mut T1, cond: LimbChoice) {
2514 debug_assert_eq!(op0.nlimbs(), op1.nlimbs());
2515 let nlimbs = op0.nlimbs();
2516 let cond_mask = cond.select(0, !0);
2517 for i in 0..nlimbs {
2518 let mut op0_val = op0.load_l(i);
2519 let mut op1_val = op1.load_l(i);
2520 op0_val ^= op1_val;
2521 op1_val ^= op0_val & cond_mask;
2522 op0_val ^= op1_val;
2523 op0.store_l(i, op0_val);
2524 op1.store_l(i, op1_val);
2525 }
2526}
2527
2528#[cfg(test)]
2529fn test_ct_swap_cond_mp<T0: MpMutUIntSlice, T1: MpMutUIntSlice>() {
2530 use super::cmp_impl::ct_eq_mp_mp;
2531
2532 let len = 2 * LIMB_BYTES - 1;
2533 let mut op0_orig = tst_mk_mp_backing_vec!(T0, len);
2534 op0_orig.fill(0xccu8.into());
2535 let mut op0 = op0_orig.clone();
2536 let op0_orig = T0::from_slice(&mut op0_orig).unwrap();
2537 let mut op0 = T0::from_slice(&mut op0).unwrap();
2538
2539 let mut op1_orig = tst_mk_mp_backing_vec!(T1, len);
2540 op1_orig.fill(0xbbu8.into());
2541 let mut op1 = op1_orig.clone();
2542 let op1_orig = T1::from_slice(&mut op1_orig).unwrap();
2543 let mut op1 = T1::from_slice(&mut op1).unwrap();
2544
2545 ct_swap_cond_mp(&mut op0, &mut op1, LimbChoice::from(0));
2546 assert_ne!(ct_eq_mp_mp(&op0, &op0_orig).unwrap(), 0);
2547 assert_ne!(ct_eq_mp_mp(&op1, &op1_orig).unwrap(), 0);
2548
2549 ct_swap_cond_mp(&mut op0, &mut op1, LimbChoice::from(1));
2550 assert_ne!(ct_eq_mp_mp(&op0, &op1_orig).unwrap(), 0);
2551 assert_ne!(ct_eq_mp_mp(&op1, &op0_orig).unwrap(), 0);
2552}
2553
2554#[test]
2555fn test_ct_swap_cond_be_be() {
2556 test_ct_swap_cond_mp::<MpMutBigEndianUIntByteSlice, MpMutBigEndianUIntByteSlice>();
2557}
2558
2559#[test]
2560fn test_ct_swap_cond_le_le() {
2561 test_ct_swap_cond_mp::<MpMutLittleEndianUIntByteSlice, MpMutLittleEndianUIntByteSlice>();
2562}
2563
2564#[test]
2565fn test_ct_swap_cond_ne_ne() {
2566 test_ct_swap_cond_mp::<MpMutNativeEndianUIntLimbsSlice, MpMutNativeEndianUIntLimbsSlice>();
2567}
2568
2569struct CompositeLimbsBufferSegment<'a, ST: MpUIntCommon> {
2572 end: usize,
2576 segment: ST,
2584 high_next_partial: ST,
2593
2594 _phantom: marker::PhantomData<&'a [u8]>,
2595}
2596
2597pub struct CompositeLimbsBuffer<'a, ST: MpUIntCommon, const N_SEGMENTS: usize> {
2620 segments: [CompositeLimbsBufferSegment<'a, ST>; N_SEGMENTS],
2623}
2624
2625impl<'a, ST: MpUIntSliceCommon, const N_SEGMENTS: usize> CompositeLimbsBuffer<'a, ST, N_SEGMENTS> {
2626 pub fn new(segments: [ST; N_SEGMENTS]) -> Self {
2636 let mut segments = <[ST; N_SEGMENTS] as IntoIterator>::into_iter(segments);
2637 let mut segments: [Option<ST>; N_SEGMENTS] = core::array::from_fn(|_| segments.next());
2638 let mut n_bytes_total = 0;
2639 let mut create_segment = |i: usize| {
2640 let segment = segments[i].take().unwrap();
2641 let segment_len = segment.len();
2642 n_bytes_total += segment_len;
2643
2644 let n_high_partial = n_bytes_total % LIMB_BYTES;
2645 let (high_next_partial, segment) = if i + 1 != segments.len() && n_high_partial != 0 {
2646 let next_segment = segments[i + 1].take().unwrap();
2647 let next_segment_len = next_segment.len();
2648 let n_from_next = (LIMB_BYTES - n_high_partial).min(next_segment_len);
2649 let (next_segment, high_next_partial) = next_segment.take(n_from_next);
2650 segments[i + 1] = Some(next_segment);
2651 (high_next_partial, segment)
2652 } else {
2653 let (high_next_partial, segment) = segment.take(segment_len);
2654 (high_next_partial, segment)
2655 };
2656
2657 let high_next_partial_len = high_next_partial.len();
2658 n_bytes_total += high_next_partial_len;
2659 let end = ct_mp_nlimbs(n_bytes_total);
2660 CompositeLimbsBufferSegment {
2661 end,
2662 segment,
2663 high_next_partial,
2664 _phantom: marker::PhantomData,
2665 }
2666 };
2667
2668 let segments: [CompositeLimbsBufferSegment<'a, ST>; N_SEGMENTS] =
2669 core::array::from_fn(&mut create_segment);
2670 Self { segments }
2671 }
2672
2673 fn limb_index_to_segment(&self, i: usize) -> (usize, usize) {
2689 let mut segment_offset = 0;
2690 for segment_index in 0..N_SEGMENTS {
2691 let segment_end = self.segments[segment_index].end;
2692 if i < segment_end {
2693 return (segment_index, segment_offset);
2694 } else {
2695 segment_offset = segment_end;
2696 }
2697 }
2698 unreachable!();
2699 }
2700
2701 pub fn load(&self, i: usize) -> LimbType {
2711 let (segment_index, segment_offset) = self.limb_index_to_segment(i);
2712 let segment = &self.segments[segment_index];
2713 let segment_slice = &segment.segment;
2714 if i != segment.end - 1 || !ST::SUPPORTS_UNALIGNED_BUFFER_LENGTHS {
2715 segment_slice.load_l_full(i - segment_offset)
2716 } else if segment_index + 1 == N_SEGMENTS || segment_slice.len() % LIMB_BYTES == 0 {
2717 segment_slice.load_l(i - segment_offset)
2720 } else {
2721 let mut npartial = segment_slice.len() % LIMB_BYTES;
2722 let mut value = segment_slice.load_l(i - segment_offset);
2723 let mut segment_index = segment_index;
2724 while npartial != LIMB_BYTES && segment_index < self.segments.len() {
2725 let partial = &self.segments[segment_index].high_next_partial;
2726 if !partial.is_empty() {
2727 value |= partial.load_l(0) << (8 * npartial);
2728 npartial += partial.len();
2729 }
2730 segment_index += 1;
2731 }
2732 value
2733 }
2734 }
2735}
2736
2737impl<'a, ST: MpMutUIntSlice, const N_SEGMENTS: usize> CompositeLimbsBuffer<'a, ST, N_SEGMENTS> {
2738 pub fn store(&mut self, i: usize, value: LimbType) {
2749 let (segment_index, segment_offset) = self.limb_index_to_segment(i);
2750 let segment = &mut self.segments[segment_index];
2751 let segment_slice = &mut segment.segment;
2752 if i != segment.end - 1 || !ST::SUPPORTS_UNALIGNED_BUFFER_LENGTHS {
2753 segment_slice.store_l_full(i - segment_offset, value);
2754 } else if segment_index + 1 == N_SEGMENTS || segment_slice.len() % LIMB_BYTES == 0 {
2755 segment_slice.store_l(i - segment_offset, value)
2758 } else {
2759 let mut value = value;
2760 let mut npartial = segment_slice.len() % LIMB_BYTES;
2761 let value_mask = ct_lsb_mask_l(8 * npartial as u32);
2762 segment_slice.store_l(i - segment_offset, value & value_mask);
2763 value >>= 8 * npartial;
2764 let mut segment_index = segment_index;
2765 while npartial != LIMB_BYTES && segment_index < self.segments.len() {
2766 let partial = &mut self.segments[segment_index].high_next_partial;
2767 if !partial.is_empty() {
2768 let value_mask = ct_lsb_mask_l(8 * partial.len() as u32);
2769 partial.store_l(0, value & value_mask);
2770 value >>= 8 * partial.len();
2771 npartial += partial.len();
2772 }
2773 segment_index += 1;
2774 }
2775 debug_assert!(value == 0);
2776 }
2777 }
2778}
2779
2780#[test]
2781fn test_composite_limbs_buffer_load_be() {
2782 let mut buf0: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
2783 let buf1: [u8; 0] = [0; 0];
2784 let mut buf2: [u8; 2 * LIMB_BYTES + 2] = [0; 2 * LIMB_BYTES + 2];
2785
2786 buf0[LIMB_BYTES - 1 + LIMB_BYTES / 2] = 0x1;
2788 buf0[LIMB_BYTES - 1] = 0x2;
2789
2790 buf0[LIMB_BYTES / 2 - 1] = 0x3;
2792 buf0[0] = 0x4;
2793 buf2[1 + 2 * LIMB_BYTES] = 0x5;
2794
2795 buf2[1 + LIMB_BYTES + LIMB_BYTES / 2] = 0x6;
2797 buf2[1 + LIMB_BYTES] = 0x7;
2798
2799 buf2[1 + LIMB_BYTES / 2] = 0x8;
2801 buf2[1] = 0x9;
2802
2803 buf2[0] = 0xa;
2805
2806 let buf0 = MpBigEndianUIntByteSlice::from_bytes(buf0.as_slice());
2807 let buf1 = MpBigEndianUIntByteSlice::from_bytes(buf1.as_slice());
2808 let buf2 = MpBigEndianUIntByteSlice::from_bytes(buf2.as_slice());
2809 let limbs = CompositeLimbsBuffer::new([buf0, buf1, buf2]);
2810
2811 let l0 = limbs.load(0);
2812 assert_eq!(l0, 0x2 << LIMB_BITS - 8 | 0x1 << LIMB_BITS / 2 - 8);
2813 let l1 = limbs.load(1);
2814 assert_eq!(l1, 0x0504 << LIMB_BITS - 16 | 0x3 << LIMB_BITS / 2 - 8);
2815 let l2 = limbs.load(2);
2816 assert_eq!(l2, 0x7 << LIMB_BITS - 8 | 0x6 << LIMB_BITS / 2 - 8);
2817 let l3 = limbs.load(3);
2818 assert_eq!(l3, 0x9 << LIMB_BITS - 8 | 0x8 << LIMB_BITS / 2 - 8);
2819 let l4 = limbs.load(4);
2820 assert_eq!(l4, 0xa);
2821}
2822
2823#[test]
2824fn test_composite_limbs_buffer_load_le() {
2825 let mut buf0: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
2826 let buf1: [u8; 0] = [0; 0];
2827 let mut buf2: [u8; 2 * LIMB_BYTES + 2] = [0; 2 * LIMB_BYTES + 2];
2828
2829 buf0[LIMB_BYTES / 2 - 1] = 0x1;
2831 buf0[LIMB_BYTES - 1] = 0x2;
2832
2833 buf0[LIMB_BYTES + LIMB_BYTES / 2 - 1] = 0x3;
2835 buf0[2 * LIMB_BYTES - 2] = 0x4;
2836 buf2[0] = 0x5;
2837
2838 buf2[1 + LIMB_BYTES / 2 - 1] = 0x6;
2840 buf2[1 + LIMB_BYTES - 1] = 0x7;
2841
2842 buf2[1 + LIMB_BYTES + LIMB_BYTES / 2 - 1] = 0x8;
2844 buf2[1 + 2 * LIMB_BYTES - 1] = 0x9;
2845
2846 buf2[1 + 2 * LIMB_BYTES] = 0xa;
2848
2849 let buf0 = MpLittleEndianUIntByteSlice::from_bytes(buf0.as_slice());
2850 let buf1 = MpLittleEndianUIntByteSlice::from_bytes(buf1.as_slice());
2851 let buf2 = MpLittleEndianUIntByteSlice::from_bytes(buf2.as_slice());
2852 let limbs = CompositeLimbsBuffer::new([buf0, buf1, buf2]);
2853
2854 let l0 = limbs.load(0);
2855 assert_eq!(l0, 0x2 << LIMB_BITS - 8 | 0x1 << LIMB_BITS / 2 - 8);
2856 let l1 = limbs.load(1);
2857 assert_eq!(l1, 0x0504 << LIMB_BITS - 16 | 0x3 << LIMB_BITS / 2 - 8);
2858 let l2 = limbs.load(2);
2859 assert_eq!(l2, 0x7 << LIMB_BITS - 8 | 0x6 << LIMB_BITS / 2 - 8);
2860 let l3 = limbs.load(3);
2861 assert_eq!(l3, 0x9 << LIMB_BITS - 8 | 0x8 << LIMB_BITS / 2 - 8);
2862 let l4 = limbs.load(4);
2863 assert_eq!(l4, 0xa);
2864}
2865
2866#[test]
2867fn test_composite_limbs_buffer_load_ne() {
2868 let mut buf0 = [0 as LimbType; 2];
2869 let buf1 = [0 as LimbType; 0];
2870 let mut buf2 = [0 as LimbType; 2 * LIMB_BYTES];
2871
2872 buf0[0] = 0x1;
2873 buf0[1] = 0x2;
2874 buf2[0] = 0x3;
2875 buf2[1] = 0x4;
2876
2877 let buf0 = MpNativeEndianUIntLimbsSlice::from_limbs(buf0.as_slice());
2878 let buf1 = MpNativeEndianUIntLimbsSlice::from_limbs(buf1.as_slice());
2879 let buf2 = MpNativeEndianUIntLimbsSlice::from_limbs(buf2.as_slice());
2880 let limbs = CompositeLimbsBuffer::new([buf0, buf1, buf2]);
2881
2882 let l0 = limbs.load(0);
2883 assert_eq!(l0, 0x1);
2884 let l1 = limbs.load(1);
2885 assert_eq!(l1, 0x2);
2886 let l2 = limbs.load(2);
2887 assert_eq!(l2, 0x3);
2888 let l3 = limbs.load(3);
2889 assert_eq!(l3, 0x4);
2890}
2891
2892#[cfg(test)]
2893fn test_composite_limbs_buffer_store_with_unaligned_lengths<
2894 ST: MpMutUIntSlice<BackingSliceElementType = u8>,
2895>() {
2896 debug_assert_eq!(ST::SUPPORTS_UNALIGNED_BUFFER_LENGTHS, true);
2897
2898 let mut buf0: [u8; 2 * LIMB_BYTES - 1] = [0; 2 * LIMB_BYTES - 1];
2899 let mut buf1: [u8; 0] = [0; 0];
2900 let mut buf2: [u8; 2 * LIMB_BYTES + 2] = [0; 2 * LIMB_BYTES + 2];
2901 let buf0 = ST::from_slice(&mut buf0).unwrap();
2902 let buf1 = ST::from_slice(&mut buf1).unwrap();
2903 let buf2 = ST::from_slice(&mut buf2).unwrap();
2904 let mut limbs = CompositeLimbsBuffer::new([buf0, buf1, buf2]);
2905
2906 let l0 = 0x2 << LIMB_BITS - 8 | 0x1 << LIMB_BITS / 2 - 8;
2907 let l1 = 0x0504 << LIMB_BITS - 16 | 0x3 << LIMB_BITS / 2 - 8;
2908 let l2 = 0x7 << LIMB_BITS - 8 | 0x6 << LIMB_BITS / 2 - 8;
2909 let l3 = 0x9 << LIMB_BITS - 8 | 0x8 << LIMB_BITS / 2 - 8;
2910 let l4 = 0xa;
2911
2912 limbs.store(0, l0);
2913 limbs.store(1, l1);
2914 limbs.store(2, l2);
2915 limbs.store(3, l3);
2916 limbs.store(4, l4);
2917 assert_eq!(l0, limbs.load(0));
2918 assert_eq!(l1, limbs.load(1));
2919 assert_eq!(l2, limbs.load(2));
2920 assert_eq!(l3, limbs.load(3));
2921 assert_eq!(l4, limbs.load(4));
2922}
2923
2924#[cfg(test)]
2925fn test_composite_limbs_buffer_store_with_aligned_lengths<ST: MpMutUIntSlice>() {
2926 let mut buf0 = tst_mk_mp_backing_vec!(ST, 2 * LIMB_BYTES);
2927 let mut buf1 = tst_mk_mp_backing_vec!(ST, 0);
2928 let mut buf2 = tst_mk_mp_backing_vec!(ST, 2 * LIMB_BYTES);
2929 let buf0 = ST::from_slice(&mut buf0).unwrap();
2930 let buf1 = ST::from_slice(&mut buf1).unwrap();
2931 let buf2 = ST::from_slice(&mut buf2).unwrap();
2932 let mut limbs = CompositeLimbsBuffer::new([buf0, buf1, buf2]);
2933
2934 let l0 = 0x2 << LIMB_BITS - 8 | 0x1 << LIMB_BITS / 2 - 8;
2935 let l1 = 0x0504 << LIMB_BITS - 16 | 0x3 << LIMB_BITS / 2 - 8;
2936 let l2 = 0x7 << LIMB_BITS - 8 | 0x6 << LIMB_BITS / 2 - 8;
2937 let l3 = 0x9 << LIMB_BITS - 8 | 0x8 << LIMB_BITS / 2 - 8;
2938
2939 limbs.store(0, l0);
2940 limbs.store(1, l1);
2941 limbs.store(2, l2);
2942 limbs.store(3, l3);
2943 assert_eq!(l0, limbs.load(0));
2944 assert_eq!(l1, limbs.load(1));
2945 assert_eq!(l2, limbs.load(2));
2946 assert_eq!(l3, limbs.load(3));
2947}
2948
2949#[test]
2950fn test_composite_limbs_buffer_store_be() {
2951 test_composite_limbs_buffer_store_with_unaligned_lengths::<MpMutBigEndianUIntByteSlice>();
2952 test_composite_limbs_buffer_store_with_aligned_lengths::<MpMutBigEndianUIntByteSlice>();
2953}
2954
2955#[test]
2956fn test_composite_limbs_buffer_store_le() {
2957 test_composite_limbs_buffer_store_with_unaligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2958 test_composite_limbs_buffer_store_with_aligned_lengths::<MpMutLittleEndianUIntByteSlice>();
2959}
2960
2961#[test]
2962fn test_composite_limbs_buffer_store_ne() {
2963 test_composite_limbs_buffer_store_with_aligned_lengths::<MpMutNativeEndianUIntLimbsSlice>();
2964}
2965
2966pub fn limb_slice_as_bytes_mut(limbs: &mut [LimbType]) -> &mut [u8] {
2967 let len = mem::size_of_val(limbs);
2968 let ptr = limbs.as_mut_ptr() as *mut u8;
2969 unsafe { slice::from_raw_parts_mut(ptr, len) }
2970}
2971
2972pub fn limb_slice_as_bytes(limbs: &[LimbType]) -> &[u8] {
2973 let len = mem::size_of_val(limbs);
2974 let ptr = limbs.as_ptr() as *const u8;
2975 unsafe { slice::from_raw_parts(ptr, len) }
2976}