scrypt_opt/
pipeline.rs

1use crate::{
2    Align64, RoMix,
3    fixed_r::{Block, BufferSet},
4    pbkdf2_1::{CreatePbkdf2HmacSha256State, Pbkdf2HmacSha256State},
5};
6use core::num::{NonZeroU8, NonZeroU32, NonZeroU64};
7use generic_array::{
8    ArrayLength,
9    typenum::{NonZero, U1},
10};
11
12/// Comparison operators for the pipeline
13pub const CMP_EQ: u32 = 0x00000001;
14/// Less than
15pub const CMP_LT: u32 = 0x00000002;
16/// Greater than
17pub const CMP_GT: u32 = 0x00000004;
18/// Less than or equal to
19pub const CMP_LE: u32 = CMP_EQ | CMP_LT;
20/// Greater than or equal to
21pub const CMP_GE: u32 = CMP_EQ | CMP_GT;
22
23#[cold]
24fn unlikely() {}
25
26/// A context for a pipeline computation.
27///
28/// It is already implemented for `(&'a Align64<Block<R>>, &'b mut BlockU8<R>)` and `(&'a Align64<Block<R>>, &'b mut Align64<BlockU8<R>>)`
29pub trait PipelineContext<
30    S,
31    Q: AsRef<[Align64<Block<R>>]> + AsMut<[Align64<Block<R>>]>,
32    R: ArrayLength + NonZero,
33    K,
34>
35{
36    /// Called to initialize each computation.
37    fn begin(&mut self, state: &mut S, buffer_set: &mut BufferSet<Q, R>);
38
39    /// Called to process the result of each computation.
40    ///
41    /// Returns `Some(K)` if the computation should be terminated.
42    fn drain(self, state: &mut S, buffer_set: &mut BufferSet<Q, R>) -> Option<K>;
43}
44
45impl<
46    'a,
47    'b,
48    S,
49    Q: AsRef<[Align64<Block<R>>]> + AsMut<[Align64<Block<R>>]>,
50    R: ArrayLength + NonZero,
51> PipelineContext<S, Q, R, ()> for (&'a Align64<Block<R>>, &'b mut Align64<Block<R>>)
52{
53    #[inline(always)]
54    fn begin(&mut self, _state: &mut S, buffer_set: &mut BufferSet<Q, R>) {
55        buffer_set.input_buffer_mut().copy_from_slice(self.0);
56    }
57
58    #[inline(always)]
59    fn drain(self, _state: &mut S, buffer_set: &mut BufferSet<Q, R>) -> Option<()> {
60        self.1.copy_from_slice(buffer_set.raw_salt_output());
61        None
62    }
63}
64
65/// Brute force the scrypt function for a masked test for a given target and nonce generator at a given offset with a compile-time R and a fixed P of 1.
66fn test_static_p1<
67    const OP: u32,
68    Q: AsRef<[Align64<crate::fixed_r::Block<R>>]> + AsMut<[Align64<crate::fixed_r::Block<R>>]>,
69    R: ArrayLength + NonZero,
70    N: CreatePbkdf2HmacSha256State,
71>(
72    buffer_sets: [&mut crate::fixed_r::BufferSet<Q, R>; 2],
73    salt: &[u8],
74    mask: NonZeroU64,
75    target: u64,
76    offset: usize,
77    nonce_generator: impl IntoIterator<Item = N>,
78) -> Option<(N, Pbkdf2HmacSha256State)> {
79    match OP {
80        CMP_EQ | CMP_LT | CMP_GT | CMP_LE | CMP_GE => {}
81        _ => panic!("invalid OP: {}", OP),
82    }
83
84    struct State<'a, R: ArrayLength + NonZero> {
85        mask: NonZeroU64,
86        target: u64,
87        offset: usize,
88        salt: &'a [u8],
89        _marker: core::marker::PhantomData<R>,
90    }
91
92    let mut state = State {
93        mask,
94        target,
95        offset,
96        salt,
97        _marker: core::marker::PhantomData,
98    };
99
100    struct NonceState<R: ArrayLength + NonZero, const OP: u32, N> {
101        nonce: N,
102        hmac_state: Pbkdf2HmacSha256State,
103        _marker: core::marker::PhantomData<R>,
104    }
105
106    impl<
107        'a,
108        const OP: u32,
109        Q: AsRef<[Align64<crate::fixed_r::Block<R>>]> + AsMut<[Align64<crate::fixed_r::Block<R>>]>,
110        R: ArrayLength + NonZero,
111        N: CreatePbkdf2HmacSha256State,
112    > PipelineContext<State<'a, R>, Q, R, (N, Pbkdf2HmacSha256State)> for NonceState<R, OP, N>
113    {
114        #[inline(always)]
115        fn begin(
116            &mut self,
117            pipeline_state: &mut State<'a, R>,
118            buffer_set: &mut crate::fixed_r::BufferSet<Q, R>,
119        ) {
120            buffer_set.set_input(&self.hmac_state, &pipeline_state.salt);
121        }
122
123        #[inline(always)]
124        fn drain(
125            self,
126            pipeline_state: &mut State<'a, R>,
127            buffer_set: &mut crate::fixed_r::BufferSet<Q, R>,
128        ) -> Option<(N, Pbkdf2HmacSha256State)> {
129            let mut output = [0u8; 8];
130            self.hmac_state.partial_gather(
131                [buffer_set.raw_salt_output()],
132                pipeline_state.offset,
133                &mut output,
134            );
135
136            let t = u64::from_be_bytes(output) & pipeline_state.mask.get();
137
138            let succeeded = match OP {
139                CMP_EQ => t == pipeline_state.target,
140                CMP_LT => t < pipeline_state.target,
141                CMP_GT => t > pipeline_state.target,
142                CMP_LE => t <= pipeline_state.target,
143                CMP_GE => t >= pipeline_state.target,
144                _ => unreachable!(),
145            };
146            if succeeded {
147                unlikely();
148                let mut output_hmac_state = self.hmac_state.clone();
149                output_hmac_state.ingest_salt(unsafe {
150                    core::slice::from_raw_parts(
151                        buffer_set
152                            .raw_salt_output()
153                            .as_ptr()
154                            .cast::<Align64<crate::fixed_r::Block<R>>>(),
155                        1,
156                    )
157                });
158                return Some((self.nonce, output_hmac_state));
159            }
160
161            None
162        }
163    }
164
165    let [buffer_set0, buffer_set1] = buffer_sets;
166
167    buffer_set0.pipeline(
168        buffer_set1,
169        nonce_generator.into_iter().map(|i| NonceState::<R, OP, N> {
170            hmac_state: i.create_pbkdf2_hmac_sha256_state(),
171            nonce: i,
172            _marker: core::marker::PhantomData,
173        }),
174        &mut state,
175    )
176}
177
178/// Brute force the scrypt function for a masked test for a given target and nonce generator at a given offset with a compile-time R and a runtime P.
179pub fn test_static<
180    const OP: u32,
181    Q: AsRef<[Align64<crate::fixed_r::Block<R>>]> + AsMut<[Align64<crate::fixed_r::Block<R>>]>,
182    R: ArrayLength + NonZero,
183    N: CreatePbkdf2HmacSha256State,
184>(
185    buffer_sets: [&mut crate::fixed_r::BufferSet<Q, R>; 2],
186    p: NonZeroU32,
187    salt: &[u8],
188    mask: NonZeroU64,
189    target: u64,
190    offset: usize,
191    nonce_generator: impl IntoIterator<Item = N>,
192) -> Option<(N, Pbkdf2HmacSha256State)> {
193    match OP {
194        CMP_EQ | CMP_LT | CMP_GT | CMP_LE | CMP_GE => {}
195        _ => panic!("invalid OP: {}", OP),
196    }
197
198    if p.get() == 1 {
199        return test_static_p1::<OP, Q, R, N>(
200            buffer_sets,
201            salt,
202            mask,
203            target,
204            offset,
205            nonce_generator,
206        );
207    }
208
209    let [mut buffer_set0, mut buffer_set1] = buffer_sets;
210
211    let mut nonce_generator = nonce_generator.into_iter();
212
213    let mut current_nonce = nonce_generator.next()?;
214    let mut current_hmac_state = current_nonce.create_pbkdf2_hmac_sha256_state();
215    let mut output_hmac_state = current_hmac_state.clone();
216
217    // prologue of the global pipeline - hydrate the leading buffer set
218    current_hmac_state.emit_scatter(salt, [buffer_set0.input_buffer_mut()]);
219    buffer_set0.ro_mix_front();
220
221    loop {
222        // complete the current chunk except the last RoMixBack to join it with the first RoMixFront of the next chunk
223        for chunk_idx in 1..p.get() {
224            current_hmac_state.emit_scatter_offset(
225                salt,
226                [buffer_set1.input_buffer_mut()],
227                chunk_idx * 4 * R::U32,
228            );
229
230            buffer_set0.ro_mix_interleaved(&mut buffer_set1);
231
232            output_hmac_state.ingest_salt(std::slice::from_ref(buffer_set0.raw_salt_output()));
233
234            (buffer_set0, buffer_set1) = (buffer_set1, buffer_set0);
235        }
236
237        // figure out the next nonce and hmac state
238        let new_state = if let Some(next_nonce) = nonce_generator.next() {
239            let new_hmac_state = next_nonce.create_pbkdf2_hmac_sha256_state();
240            new_hmac_state.emit_scatter(salt, [buffer_set1.input_buffer_mut()]);
241
242            buffer_set0.ro_mix_interleaved(&mut buffer_set1);
243
244            Some((next_nonce, new_hmac_state))
245        } else {
246            buffer_set0.ro_mix_back();
247            None
248        };
249
250        // check the output of the current chunk
251        let mut tmp_output = [0u8; 8];
252
253        output_hmac_state.partial_gather([buffer_set0.raw_salt_output()], offset, &mut tmp_output);
254        let t = u64::from_be_bytes(tmp_output) & mask.get();
255
256        if match OP {
257            CMP_EQ => t == target,
258            CMP_LT => t < target,
259            CMP_GT => t > target,
260            CMP_LE => t <= target,
261            CMP_GE => t >= target,
262            _ => unreachable!(),
263        } {
264            unlikely();
265            output_hmac_state.ingest_salt(std::slice::from_ref(buffer_set0.raw_salt_output()));
266            return Some((current_nonce, output_hmac_state));
267        }
268
269        let Some((next_nonce, new_hmac_state)) = new_state else {
270            return None;
271        };
272
273        // rearrange variables for the next iteration
274        {
275            current_nonce = next_nonce;
276            current_hmac_state = new_hmac_state;
277            output_hmac_state = current_hmac_state.clone();
278
279            (buffer_set0, buffer_set1) = (buffer_set1, buffer_set0);
280        }
281    }
282}
283
284/// Brute force the scrypt function for a masked test for a given target and nonce generator at a given offset with a runtime R and P.
285pub fn test<const OP: u32, N: CreatePbkdf2HmacSha256State>(
286    buffer_sets: &mut [Align64<crate::fixed_r::Block<U1>>],
287    cf: NonZeroU8,
288    r: NonZeroU32,
289    p: NonZeroU32,
290    salt: &[u8],
291    mask: NonZeroU64,
292    target: u64,
293    offset: usize,
294    nonce_generator: impl IntoIterator<Item = N>,
295) -> Option<(N, Pbkdf2HmacSha256State)> {
296    match OP {
297        CMP_EQ | CMP_LT | CMP_GT | CMP_LE | CMP_GE => {}
298        _ => panic!("invalid OP: {}", OP),
299    }
300
301    let expected_len = (r.get() * ((1 << cf.get()) + 2)).try_into().unwrap();
302    let [mut buffer_set0, mut buffer_set1] = buffer_sets
303        .get_disjoint_mut([0..expected_len, expected_len..(expected_len * 2)])
304        .expect("buffer_sets is not large enough, at least 2 * r * ((1 << cf) + 2) elements are required");
305
306    let mut nonce_generator = nonce_generator.into_iter();
307
308    let mut current_nonce = nonce_generator.next()?;
309    let mut current_hmac_state = current_nonce.create_pbkdf2_hmac_sha256_state();
310    let mut output_hmac_state = current_hmac_state.clone();
311
312    // prologue of the global pipeline - hydrate the leading buffer set
313    current_hmac_state.emit_scatter(
314        salt,
315        buffer_set0
316            .ro_mix_input_buffer(r)
317            .chunks_exact_mut(core::mem::size_of::<Align64<crate::fixed_r::Block<U1>>>())
318            .map(|chunk| unsafe {
319                chunk
320                    .as_mut_ptr()
321                    .cast::<Align64<crate::fixed_r::Block<U1>>>()
322                    .as_mut()
323                    .unwrap()
324            }),
325    );
326    buffer_set0.ro_mix_front(r, cf);
327
328    loop {
329        // complete the current chunk except the last RoMixBack to join it with the first RoMixFront of the next chunk
330        for chunk_idx in 1..p.get() {
331            current_hmac_state.emit_scatter_offset(
332                salt,
333                buffer_set1
334                    .ro_mix_input_buffer(r)
335                    .chunks_exact_mut(core::mem::size_of::<Align64<crate::fixed_r::Block<U1>>>())
336                    .map(|chunk| unsafe {
337                        chunk
338                            .as_mut_ptr()
339                            .cast::<Align64<crate::fixed_r::Block<U1>>>()
340                            .as_mut()
341                            .unwrap()
342                    }),
343                chunk_idx * 4 * r.get(),
344            );
345
346            let salt = buffer_set0.ro_mix_interleaved(&mut buffer_set1, r, cf);
347
348            output_hmac_state.ingest_salt(unsafe {
349                core::slice::from_raw_parts(
350                    salt.as_ptr().cast::<Align64<crate::fixed_r::Block<U1>>>(),
351                    salt.len() / core::mem::size_of::<Align64<crate::fixed_r::Block<U1>>>(),
352                )
353            });
354
355            (buffer_set0, buffer_set1) = (buffer_set1, buffer_set0);
356        }
357
358        // figure out the next nonce and hmac state
359        let (salt, new_state) = if let Some(next_nonce) = nonce_generator.next() {
360            let new_hmac_state = next_nonce.create_pbkdf2_hmac_sha256_state();
361            new_hmac_state.emit_scatter(
362                salt,
363                buffer_set1
364                    .ro_mix_input_buffer(r)
365                    .chunks_exact_mut(core::mem::size_of::<Align64<crate::fixed_r::Block<U1>>>())
366                    .map(|chunk| unsafe {
367                        chunk
368                            .as_mut_ptr()
369                            .cast::<Align64<crate::fixed_r::Block<U1>>>()
370                            .as_mut()
371                            .unwrap()
372                    }),
373            );
374
375            (
376                buffer_set0.ro_mix_interleaved(&mut buffer_set1, r, cf),
377                Some((next_nonce, new_hmac_state)),
378            )
379        } else {
380            (buffer_set0.ro_mix_back(r, cf), None)
381        };
382
383        // check the output of the current chunk
384        let mut tmp_output = [0u8; 8];
385
386        output_hmac_state.partial_gather(
387            salt.chunks_exact(core::mem::size_of::<Align64<crate::fixed_r::Block<U1>>>())
388                .map(|block| unsafe {
389                    block
390                        .as_ptr()
391                        .cast::<Align64<crate::fixed_r::Block<U1>>>()
392                        .as_ref()
393                        .unwrap()
394                }),
395            offset,
396            &mut tmp_output,
397        );
398        let t = u64::from_be_bytes(tmp_output) & mask.get();
399
400        if match OP {
401            CMP_EQ => t == target,
402            CMP_LT => t < target,
403            CMP_GT => t > target,
404            CMP_LE => t <= target,
405            CMP_GE => t >= target,
406            _ => unreachable!(),
407        } {
408            unlikely();
409            unsafe {
410                output_hmac_state.ingest_salt(core::slice::from_raw_parts(
411                    salt.as_ptr().cast::<Align64<crate::fixed_r::Block<U1>>>(),
412                    salt.len() / core::mem::size_of::<Align64<crate::fixed_r::Block<U1>>>(),
413                ));
414            }
415            return Some((current_nonce, output_hmac_state));
416        }
417
418        let Some((next_nonce, new_hmac_state)) = new_state else {
419            return None;
420        };
421
422        // rearrange variables for the next iteration
423        {
424            current_nonce = next_nonce;
425            current_hmac_state = new_hmac_state;
426            output_hmac_state = current_hmac_state.clone();
427
428            (buffer_set0, buffer_set1) = (buffer_set1, buffer_set0);
429        }
430    }
431}
432
433#[cfg(test)]
434mod tests {
435    use generic_array::typenum::{U1, U2, U3, U4, U8, U16};
436
437    use super::*;
438
439    #[test]
440    fn test_pow_kat() {
441        let target = "0002";
442
443        let cf = NonZeroU8::new(3).unwrap();
444        let r = NonZeroU32::new(8).unwrap();
445        let p = NonZeroU32::new(1).unwrap();
446
447        let mut target_u64 = 0u64;
448        let mut target_mask = 0u64;
449
450        for nibble in target.as_bytes().iter() {
451            let addend = match nibble {
452                b'0'..=b'9' => nibble - b'0',
453                b'A'..=b'F' => nibble - b'A' + 10,
454                b'a'..=b'f' => nibble - b'a' + 10,
455                _ => panic!("invalid nibble: {}", nibble),
456            } as u64;
457
458            target_u64 <<= 4;
459            target_u64 |= addend;
460            target_mask <<= 4;
461            target_mask |= 15;
462        }
463
464        target_u64 <<= (16 - target.len()) * 4;
465        target_mask <<= (16 - target.len()) * 4;
466
467        let expected_nonce = u64::from_le_bytes([0x11, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]);
468
469        let mut buffer_sets = vec![
470            Align64::<crate::fixed_r::Block<U1>>::default();
471            2 * r.get() as usize * p.get() as usize * ((1 << cf.get()) + 2)
472        ];
473        for offset in -5..=5 {
474            for len in 0..=8 {
475                let includes_nonce = ((expected_nonce as i64 + offset)
476                    ..(expected_nonce as i64 + offset + len as i64))
477                    .contains(&(expected_nonce as i64));
478
479                let static_result = test_static::<CMP_LE, _, U8, _>(
480                    [
481                        &mut *crate::fixed_r::BufferSet::new_boxed(cf),
482                        &mut *crate::fixed_r::BufferSet::new_boxed(cf),
483                    ],
484                    NonZeroU32::new(1).unwrap(),
485                    &[0x29, 0x39, 0x66, 0x3c, 0x6f, 0x46, 0x15, 0xc3],
486                    NonZeroU64::new(target_mask).unwrap(),
487                    target_u64,
488                    28 / 2,
489                    ((expected_nonce as i64 + offset)..)
490                        .map(|i| i as u64)
491                        .take(len),
492                );
493
494                let dynamic_result = test::<CMP_LE, _>(
495                    &mut buffer_sets,
496                    cf,
497                    r,
498                    p,
499                    &[0x29, 0x39, 0x66, 0x3c, 0x6f, 0x46, 0x15, 0xc3],
500                    NonZeroU64::new(target_mask).unwrap(),
501                    target_u64,
502                    28 / 2,
503                    ((expected_nonce as i64 + offset)..)
504                        .map(|i| i as u64)
505                        .take(len),
506                );
507
508                if !includes_nonce {
509                    assert!(static_result.is_none(), "static_result is not none");
510                    assert!(dynamic_result.is_none(), "dynamic_result is not none");
511                    continue;
512                }
513
514                let (nonce, hmac_state_static) = static_result.unwrap();
515
516                assert_eq!(nonce, expected_nonce);
517
518                let (nonce, hmac_state_dynamic) = dynamic_result.unwrap();
519
520                assert_eq!(nonce, expected_nonce);
521
522                assert_eq!(hmac_state_static, hmac_state_dynamic);
523            }
524        }
525    }
526
527    #[test]
528    fn test_pow_high_p() {
529        let target = "002";
530        const SALT: &[u8] = &[0x29, 0x39, 0x66, 0x3c, 0x6f, 0x46, 0x15, 0xc3];
531        for p in 1..=6 {
532            let cf = NonZeroU8::new(3).unwrap();
533            let r = NonZeroU32::new(8).unwrap();
534            let p = NonZeroU32::new(p).unwrap();
535            let params = scrypt::Params::new(cf.get(), r.get(), p.get(), 16).unwrap();
536
537            let mut buffer_sets =
538                vec![
539                    Align64::<crate::fixed_r::Block<U1>>::default();
540                    2 * r.get() as usize * p.get() as usize * ((1 << cf.get()) + 2)
541                ];
542
543            let mut target_u64 = 0u64;
544            let mut target_mask = 0u64;
545
546            for nibble in target.as_bytes().iter() {
547                let addend = match nibble {
548                    b'0'..=b'9' => nibble - b'0',
549                    b'A'..=b'F' => nibble - b'A' + 10,
550                    b'a'..=b'f' => nibble - b'a' + 10,
551                    _ => panic!("invalid nibble: {}", nibble),
552                } as u64;
553
554                target_u64 <<= 4;
555                target_u64 |= addend;
556                target_mask <<= 4;
557                target_mask |= 15;
558            }
559
560            let expected_iterations = target_mask.div_ceil(target_u64 + 1);
561
562            target_u64 <<= (16 - target.len()) * 4;
563            target_mask <<= (16 - target.len()) * 4;
564
565            let (nonce, hmac_state) = test::<CMP_LE, _>(
566                &mut buffer_sets,
567                cf,
568                r,
569                p,
570                SALT,
571                NonZeroU64::new(target_mask).unwrap(),
572                target_u64,
573                0,
574                0..expected_iterations * 100,
575            )
576            .unwrap();
577
578            let mut expected_output = [0u8; 16];
579
580            scrypt::scrypt(&nonce.to_le_bytes(), SALT, &params, &mut expected_output).unwrap();
581
582            let mut output = [0u8; 16];
583            hmac_state.emit(&mut output);
584
585            assert_eq!(output, expected_output);
586            assert!(
587                u64::from_be_bytes(output[0..8].try_into().unwrap()) & target_mask
588                    <= u64::from_be_bytes([0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
589            );
590        }
591    }
592
593    fn test_pow_consistency<R: ArrayLength + NonZero>() {
594        for target in ["03", "007", "0070"] {
595            let cf = NonZeroU8::new(3).unwrap();
596
597            let mut target_u64 = 0u64;
598            let mut target_mask = 0u64;
599
600            for nibble in target.as_bytes().iter() {
601                let addend = match nibble {
602                    b'0'..=b'9' => nibble - b'0',
603                    b'A'..=b'F' => nibble - b'A' + 10,
604                    b'a'..=b'f' => nibble - b'a' + 10,
605                    _ => panic!("invalid nibble: {}", nibble),
606                } as u64;
607
608                target_u64 <<= 4;
609                target_u64 |= addend;
610                target_mask <<= 4;
611                target_mask |= 15;
612            }
613
614            let expected_iterations = target_mask.div_ceil(target_u64 + 1);
615
616            target_u64 <<= (16 - target.len()) * 4;
617            target_mask <<= (16 - target.len()) * 4;
618
619            let mut buffer_sets = vec![
620                Align64::<crate::fixed_r::Block<U1>>::default();
621                2 * R::USIZE * 1 as usize * ((1 << cf.get()) + 2)
622            ];
623            for p in 1..=3 {
624                let static_result = test_static::<CMP_LE, _, R, _>(
625                    [
626                        &mut *crate::fixed_r::BufferSet::new_boxed(cf),
627                        &mut *crate::fixed_r::BufferSet::new_boxed(cf),
628                    ],
629                    NonZeroU32::new(p).unwrap(),
630                    &[0x29, 0x39, 0x66, 0x3c, 0x6f, 0x46, 0x15, 0xc3],
631                    NonZeroU64::new(target_mask).unwrap(),
632                    target_u64,
633                    28 / 2,
634                    0..expected_iterations * 100,
635                );
636
637                let dynamic_result = test::<CMP_LE, _>(
638                    &mut buffer_sets,
639                    cf,
640                    R::U32.try_into().unwrap(),
641                    p.try_into().unwrap(),
642                    &[0x29, 0x39, 0x66, 0x3c, 0x6f, 0x46, 0x15, 0xc3],
643                    NonZeroU64::new(target_mask).unwrap(),
644                    target_u64,
645                    28 / 2,
646                    0..expected_iterations * 100,
647                );
648
649                let (nonce_static, hmac_state_static) = static_result.unwrap();
650
651                let (nonce_dynamic, hmac_state_dynamic) = dynamic_result.unwrap();
652
653                assert_eq!(nonce_static, nonce_dynamic);
654                assert_eq!(hmac_state_static, hmac_state_dynamic);
655            }
656        }
657    }
658
659    #[test]
660    fn test_pow_consistency_r1() {
661        test_pow_consistency::<U1>();
662    }
663
664    #[test]
665    fn test_pow_consistency_r2() {
666        test_pow_consistency::<U2>();
667    }
668
669    #[test]
670    fn test_pow_consistency_r3() {
671        test_pow_consistency::<U3>();
672    }
673
674    #[test]
675    fn test_pow_consistency_r4() {
676        test_pow_consistency::<U4>();
677    }
678
679    #[test]
680    fn test_pow_consistency_r8() {
681        test_pow_consistency::<U8>();
682    }
683
684    #[test]
685    fn test_pow_consistency_r16() {
686        test_pow_consistency::<U16>();
687    }
688}