embassy_stm32/cryp/
mod.rs

1//! Crypto Accelerator (CRYP)
2#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
3use core::cmp::min;
4use core::marker::PhantomData;
5
6use embassy_hal_internal::{Peri, PeripheralType};
7use embassy_sync::waitqueue::AtomicWaker;
8
9use crate::dma::{ChannelAndRequest, TransferOptions};
10use crate::interrupt::typelevel::Interrupt;
11use crate::mode::{Async, Blocking, Mode};
12use crate::{interrupt, pac, peripherals, rcc};
13
14const DES_BLOCK_SIZE: usize = 8; // 64 bits
15const AES_BLOCK_SIZE: usize = 16; // 128 bits
16
17static CRYP_WAKER: AtomicWaker = AtomicWaker::new();
18
19/// CRYP interrupt handler.
20pub struct InterruptHandler<T: Instance> {
21    _phantom: PhantomData<T>,
22}
23
24impl<T: Instance> interrupt::typelevel::Handler<T::Interrupt> for InterruptHandler<T> {
25    unsafe fn on_interrupt() {
26        let bits = T::regs().misr().read();
27        if bits.inmis() {
28            T::regs().imscr().modify(|w| w.set_inim(false));
29            CRYP_WAKER.wake();
30        }
31        if bits.outmis() {
32            T::regs().imscr().modify(|w| w.set_outim(false));
33            CRYP_WAKER.wake();
34        }
35    }
36}
37
38/// This trait encapsulates all cipher-specific behavior/
39pub trait Cipher<'c> {
40    /// Processing block size. Determined by the processor and the algorithm.
41    const BLOCK_SIZE: usize;
42
43    /// Indicates whether the cipher requires the application to provide padding.
44    /// If `true`, no partial blocks will be accepted (a panic will occur).
45    const REQUIRES_PADDING: bool = false;
46
47    /// Returns the symmetric key.
48    fn key(&self) -> &[u8];
49
50    /// Returns the initialization vector.
51    fn iv(&self) -> &[u8];
52
53    /// Sets the processor algorithm mode according to the associated cipher.
54    fn set_algomode(&self, p: pac::cryp::Cryp);
55
56    /// Performs any key preparation within the processor, if necessary.
57    fn prepare_key(&self, _p: pac::cryp::Cryp) {}
58
59    /// Performs any cipher-specific initialization.
60    fn init_phase_blocking<T: Instance, M: Mode>(&self, _p: pac::cryp::Cryp, _cryp: &Cryp<T, M>) {}
61
62    /// Performs any cipher-specific initialization.
63    async fn init_phase<T: Instance>(&self, _p: pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, Async>) {}
64
65    /// Called prior to processing the last data block for cipher-specific operations.
66    fn pre_final(&self, _p: pac::cryp::Cryp, _dir: Direction, _padding_len: usize) -> [u32; 4] {
67        return [0; 4];
68    }
69
70    /// Called after processing the last data block for cipher-specific operations.
71    fn post_final_blocking<T: Instance, M: Mode>(
72        &self,
73        _p: pac::cryp::Cryp,
74        _cryp: &Cryp<T, M>,
75        _dir: Direction,
76        _int_data: &mut [u8; AES_BLOCK_SIZE],
77        _temp1: [u32; 4],
78        _padding_mask: [u8; 16],
79    ) {
80    }
81
82    /// Called after processing the last data block for cipher-specific operations.
83    async fn post_final<T: Instance>(
84        &self,
85        _p: pac::cryp::Cryp,
86        _cryp: &mut Cryp<'_, T, Async>,
87        _dir: Direction,
88        _int_data: &mut [u8; AES_BLOCK_SIZE],
89        _temp1: [u32; 4],
90        _padding_mask: [u8; 16],
91    ) {
92    }
93
94    /// Returns the AAD header block as required by the cipher.
95    fn get_header_block(&self) -> &[u8] {
96        return [0; 0].as_slice();
97    }
98}
99
100/// This trait enables restriction of ciphers to specific key sizes.
101pub trait CipherSized {}
102
103/// This trait enables restriction of initialization vectors to sizes compatibile with a cipher mode.
104pub trait IVSized {}
105
106/// This trait enables restriction of a header phase to authenticated ciphers only.
107pub trait CipherAuthenticated<const TAG_SIZE: usize> {
108    /// Defines the authentication tag size.
109    const TAG_SIZE: usize = TAG_SIZE;
110}
111
112/// TDES-ECB Cipher Mode
113pub struct TdesEcb<'c, const KEY_SIZE: usize> {
114    iv: &'c [u8; 0],
115    key: &'c [u8; KEY_SIZE],
116}
117
118impl<'c, const KEY_SIZE: usize> TdesEcb<'c, KEY_SIZE> {
119    /// Constructs a new AES-ECB cipher for a cryptographic operation.
120    pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
121        return Self { key: key, iv: &[0; 0] };
122    }
123}
124
125impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesEcb<'c, KEY_SIZE> {
126    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
127    const REQUIRES_PADDING: bool = true;
128
129    fn key(&self) -> &'c [u8] {
130        self.key
131    }
132
133    fn iv(&self) -> &'c [u8] {
134        self.iv
135    }
136
137    fn set_algomode(&self, p: pac::cryp::Cryp) {
138        #[cfg(cryp_v1)]
139        {
140            p.cr().modify(|w| w.set_algomode(0));
141        }
142        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
143        {
144            p.cr().modify(|w| w.set_algomode0(0));
145            p.cr().modify(|w| w.set_algomode3(false));
146        }
147    }
148}
149
150impl<'c> CipherSized for TdesEcb<'c, { 112 / 8 }> {}
151impl<'c> CipherSized for TdesEcb<'c, { 168 / 8 }> {}
152impl<'c, const KEY_SIZE: usize> IVSized for TdesEcb<'c, KEY_SIZE> {}
153
154/// TDES-CBC Cipher Mode
155pub struct TdesCbc<'c, const KEY_SIZE: usize> {
156    iv: &'c [u8; 8],
157    key: &'c [u8; KEY_SIZE],
158}
159
160impl<'c, const KEY_SIZE: usize> TdesCbc<'c, KEY_SIZE> {
161    /// Constructs a new TDES-CBC cipher for a cryptographic operation.
162    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self {
163        return Self { key: key, iv: iv };
164    }
165}
166
167impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesCbc<'c, KEY_SIZE> {
168    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
169    const REQUIRES_PADDING: bool = true;
170
171    fn key(&self) -> &'c [u8] {
172        self.key
173    }
174
175    fn iv(&self) -> &'c [u8] {
176        self.iv
177    }
178
179    fn set_algomode(&self, p: pac::cryp::Cryp) {
180        #[cfg(cryp_v1)]
181        {
182            p.cr().modify(|w| w.set_algomode(1));
183        }
184        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
185        {
186            p.cr().modify(|w| w.set_algomode0(1));
187            p.cr().modify(|w| w.set_algomode3(false));
188        }
189    }
190}
191
192impl<'c> CipherSized for TdesCbc<'c, { 112 / 8 }> {}
193impl<'c> CipherSized for TdesCbc<'c, { 168 / 8 }> {}
194impl<'c, const KEY_SIZE: usize> IVSized for TdesCbc<'c, KEY_SIZE> {}
195
196/// DES-ECB Cipher Mode
197pub struct DesEcb<'c, const KEY_SIZE: usize> {
198    iv: &'c [u8; 0],
199    key: &'c [u8; KEY_SIZE],
200}
201
202impl<'c, const KEY_SIZE: usize> DesEcb<'c, KEY_SIZE> {
203    /// Constructs a new AES-ECB cipher for a cryptographic operation.
204    pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
205        return Self { key: key, iv: &[0; 0] };
206    }
207}
208
209impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesEcb<'c, KEY_SIZE> {
210    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
211    const REQUIRES_PADDING: bool = true;
212
213    fn key(&self) -> &'c [u8] {
214        self.key
215    }
216
217    fn iv(&self) -> &'c [u8] {
218        self.iv
219    }
220
221    fn set_algomode(&self, p: pac::cryp::Cryp) {
222        #[cfg(cryp_v1)]
223        {
224            p.cr().modify(|w| w.set_algomode(2));
225        }
226        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
227        {
228            p.cr().modify(|w| w.set_algomode0(2));
229            p.cr().modify(|w| w.set_algomode3(false));
230        }
231    }
232}
233
234impl<'c> CipherSized for DesEcb<'c, { 56 / 8 }> {}
235impl<'c, const KEY_SIZE: usize> IVSized for DesEcb<'c, KEY_SIZE> {}
236
237/// DES-CBC Cipher Mode
238pub struct DesCbc<'c, const KEY_SIZE: usize> {
239    iv: &'c [u8; 8],
240    key: &'c [u8; KEY_SIZE],
241}
242
243impl<'c, const KEY_SIZE: usize> DesCbc<'c, KEY_SIZE> {
244    /// Constructs a new AES-CBC cipher for a cryptographic operation.
245    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self {
246        return Self { key: key, iv: iv };
247    }
248}
249
250impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesCbc<'c, KEY_SIZE> {
251    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
252    const REQUIRES_PADDING: bool = true;
253
254    fn key(&self) -> &'c [u8] {
255        self.key
256    }
257
258    fn iv(&self) -> &'c [u8] {
259        self.iv
260    }
261
262    fn set_algomode(&self, p: pac::cryp::Cryp) {
263        #[cfg(cryp_v1)]
264        {
265            p.cr().modify(|w| w.set_algomode(3));
266        }
267        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
268        {
269            p.cr().modify(|w| w.set_algomode0(3));
270            p.cr().modify(|w| w.set_algomode3(false));
271        }
272    }
273}
274
275impl<'c> CipherSized for DesCbc<'c, { 56 / 8 }> {}
276impl<'c, const KEY_SIZE: usize> IVSized for DesCbc<'c, KEY_SIZE> {}
277
278/// AES-ECB Cipher Mode
279pub struct AesEcb<'c, const KEY_SIZE: usize> {
280    iv: &'c [u8; 0],
281    key: &'c [u8; KEY_SIZE],
282}
283
284impl<'c, const KEY_SIZE: usize> AesEcb<'c, KEY_SIZE> {
285    /// Constructs a new AES-ECB cipher for a cryptographic operation.
286    pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
287        return Self { key: key, iv: &[0; 0] };
288    }
289}
290
291impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> {
292    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
293    const REQUIRES_PADDING: bool = true;
294
295    fn key(&self) -> &'c [u8] {
296        self.key
297    }
298
299    fn iv(&self) -> &'c [u8] {
300        self.iv
301    }
302
303    fn prepare_key(&self, p: pac::cryp::Cryp) {
304        #[cfg(cryp_v1)]
305        {
306            p.cr().modify(|w| w.set_algomode(7));
307        }
308        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
309        {
310            p.cr().modify(|w| w.set_algomode0(7));
311            p.cr().modify(|w| w.set_algomode3(false));
312        }
313        p.cr().modify(|w| w.set_crypen(true));
314        while p.sr().read().busy() {}
315    }
316
317    fn set_algomode(&self, p: pac::cryp::Cryp) {
318        #[cfg(cryp_v1)]
319        {
320            p.cr().modify(|w| w.set_algomode(2));
321        }
322        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
323        {
324            p.cr().modify(|w| w.set_algomode0(2));
325            p.cr().modify(|w| w.set_algomode3(false));
326        }
327    }
328}
329
330impl<'c> CipherSized for AesEcb<'c, { 128 / 8 }> {}
331impl<'c> CipherSized for AesEcb<'c, { 192 / 8 }> {}
332impl<'c> CipherSized for AesEcb<'c, { 256 / 8 }> {}
333impl<'c, const KEY_SIZE: usize> IVSized for AesEcb<'c, KEY_SIZE> {}
334
335/// AES-CBC Cipher Mode
336pub struct AesCbc<'c, const KEY_SIZE: usize> {
337    iv: &'c [u8; 16],
338    key: &'c [u8; KEY_SIZE],
339}
340
341impl<'c, const KEY_SIZE: usize> AesCbc<'c, KEY_SIZE> {
342    /// Constructs a new AES-CBC cipher for a cryptographic operation.
343    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self {
344        return Self { key: key, iv: iv };
345    }
346}
347
348impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> {
349    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
350    const REQUIRES_PADDING: bool = true;
351
352    fn key(&self) -> &'c [u8] {
353        self.key
354    }
355
356    fn iv(&self) -> &'c [u8] {
357        self.iv
358    }
359
360    fn prepare_key(&self, p: pac::cryp::Cryp) {
361        #[cfg(cryp_v1)]
362        {
363            p.cr().modify(|w| w.set_algomode(7));
364        }
365        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
366        {
367            p.cr().modify(|w| w.set_algomode0(7));
368            p.cr().modify(|w| w.set_algomode3(false));
369        }
370        p.cr().modify(|w| w.set_crypen(true));
371        while p.sr().read().busy() {}
372    }
373
374    fn set_algomode(&self, p: pac::cryp::Cryp) {
375        #[cfg(cryp_v1)]
376        {
377            p.cr().modify(|w| w.set_algomode(5));
378        }
379        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
380        {
381            p.cr().modify(|w| w.set_algomode0(5));
382            p.cr().modify(|w| w.set_algomode3(false));
383        }
384    }
385}
386
387impl<'c> CipherSized for AesCbc<'c, { 128 / 8 }> {}
388impl<'c> CipherSized for AesCbc<'c, { 192 / 8 }> {}
389impl<'c> CipherSized for AesCbc<'c, { 256 / 8 }> {}
390impl<'c, const KEY_SIZE: usize> IVSized for AesCbc<'c, KEY_SIZE> {}
391
392/// AES-CTR Cipher Mode
393pub struct AesCtr<'c, const KEY_SIZE: usize> {
394    iv: &'c [u8; 16],
395    key: &'c [u8; KEY_SIZE],
396}
397
398impl<'c, const KEY_SIZE: usize> AesCtr<'c, KEY_SIZE> {
399    /// Constructs a new AES-CTR cipher for a cryptographic operation.
400    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self {
401        return Self { key: key, iv: iv };
402    }
403}
404
405impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> {
406    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
407
408    fn key(&self) -> &'c [u8] {
409        self.key
410    }
411
412    fn iv(&self) -> &'c [u8] {
413        self.iv
414    }
415
416    fn set_algomode(&self, p: pac::cryp::Cryp) {
417        #[cfg(cryp_v1)]
418        {
419            p.cr().modify(|w| w.set_algomode(6));
420        }
421        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
422        {
423            p.cr().modify(|w| w.set_algomode0(6));
424            p.cr().modify(|w| w.set_algomode3(false));
425        }
426    }
427}
428
429impl<'c> CipherSized for AesCtr<'c, { 128 / 8 }> {}
430impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {}
431impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {}
432impl<'c, const KEY_SIZE: usize> IVSized for AesCtr<'c, KEY_SIZE> {}
433
434#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
435///AES-GCM Cipher Mode
436pub struct AesGcm<'c, const KEY_SIZE: usize> {
437    iv: [u8; 16],
438    key: &'c [u8; KEY_SIZE],
439}
440
441#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
442impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> {
443    /// Constucts a new AES-GCM cipher for a cryptographic operation.
444    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self {
445        let mut new_gcm = Self { key: key, iv: [0; 16] };
446        new_gcm.iv[..12].copy_from_slice(iv);
447        new_gcm.iv[15] = 2;
448        new_gcm
449    }
450}
451
452#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
453impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> {
454    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
455
456    fn key(&self) -> &'c [u8] {
457        self.key
458    }
459
460    fn iv(&self) -> &[u8] {
461        self.iv.as_slice()
462    }
463
464    fn set_algomode(&self, p: pac::cryp::Cryp) {
465        p.cr().modify(|w| w.set_algomode0(0));
466        p.cr().modify(|w| w.set_algomode3(true));
467    }
468
469    fn init_phase_blocking<T: Instance, M: Mode>(&self, p: pac::cryp::Cryp, _cryp: &Cryp<T, M>) {
470        p.cr().modify(|w| w.set_gcm_ccmph(0));
471        p.cr().modify(|w| w.set_crypen(true));
472        while p.cr().read().crypen() {}
473    }
474
475    async fn init_phase<T: Instance>(&self, p: pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, Async>) {
476        p.cr().modify(|w| w.set_gcm_ccmph(0));
477        p.cr().modify(|w| w.set_crypen(true));
478        while p.cr().read().crypen() {}
479    }
480
481    #[cfg(cryp_v2)]
482    fn pre_final(&self, p: pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
483        //Handle special GCM partial block process.
484        if dir == Direction::Encrypt {
485            p.cr().modify(|w| w.set_crypen(false));
486            p.cr().modify(|w| w.set_algomode3(false));
487            p.cr().modify(|w| w.set_algomode0(6));
488            let iv1r = p.csgcmccmr(7).read() - 1;
489            p.init(1).ivrr().write_value(iv1r);
490            p.cr().modify(|w| w.set_crypen(true));
491        }
492        [0; 4]
493    }
494
495    #[cfg(any(cryp_v3, cryp_v4))]
496    fn pre_final(&self, p: pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
497        //Handle special GCM partial block process.
498        p.cr().modify(|w| w.set_npblb(padding_len as u8));
499        [0; 4]
500    }
501
502    #[cfg(cryp_v2)]
503    fn post_final_blocking<T: Instance, M: Mode>(
504        &self,
505        p: pac::cryp::Cryp,
506        cryp: &Cryp<T, M>,
507        dir: Direction,
508        int_data: &mut [u8; AES_BLOCK_SIZE],
509        _temp1: [u32; 4],
510        padding_mask: [u8; AES_BLOCK_SIZE],
511    ) {
512        if dir == Direction::Encrypt {
513            //Handle special GCM partial block process.
514            p.cr().modify(|w| w.set_crypen(false));
515            p.cr().modify(|w| w.set_algomode3(true));
516            p.cr().modify(|w| w.set_algomode0(0));
517            for i in 0..AES_BLOCK_SIZE {
518                int_data[i] = int_data[i] & padding_mask[i];
519            }
520            p.cr().modify(|w| w.set_crypen(true));
521            p.cr().modify(|w| w.set_gcm_ccmph(3));
522
523            cryp.write_bytes_blocking(Self::BLOCK_SIZE, int_data);
524            cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
525        }
526    }
527
528    #[cfg(cryp_v2)]
529    async fn post_final<T: Instance>(
530        &self,
531        p: pac::cryp::Cryp,
532        cryp: &mut Cryp<'_, T, Async>,
533        dir: Direction,
534        int_data: &mut [u8; AES_BLOCK_SIZE],
535        _temp1: [u32; 4],
536        padding_mask: [u8; AES_BLOCK_SIZE],
537    ) {
538        if dir == Direction::Encrypt {
539            // Handle special GCM partial block process.
540            p.cr().modify(|w| w.set_crypen(false));
541            p.cr().modify(|w| w.set_algomode3(true));
542            p.cr().modify(|w| w.set_algomode0(0));
543            for i in 0..AES_BLOCK_SIZE {
544                int_data[i] = int_data[i] & padding_mask[i];
545            }
546            p.cr().modify(|w| w.set_crypen(true));
547            p.cr().modify(|w| w.set_gcm_ccmph(3));
548
549            let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
550
551            let read = Cryp::<T, Async>::read_bytes(cryp.outdma.as_mut().unwrap(), Self::BLOCK_SIZE, &mut out_data);
552            let write = Cryp::<T, Async>::write_bytes(cryp.indma.as_mut().unwrap(), Self::BLOCK_SIZE, int_data);
553
554            embassy_futures::join::join(read, write).await;
555
556            int_data.copy_from_slice(&out_data);
557        }
558    }
559}
560
561#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
562impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {}
563#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
564impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {}
565#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
566impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {}
567#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
568impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGcm<'c, KEY_SIZE> {}
569#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
570impl<'c, const KEY_SIZE: usize> IVSized for AesGcm<'c, KEY_SIZE> {}
571
572#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
573/// AES-GMAC Cipher Mode
574pub struct AesGmac<'c, const KEY_SIZE: usize> {
575    iv: [u8; 16],
576    key: &'c [u8; KEY_SIZE],
577}
578
579#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
580impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> {
581    /// Constructs a new AES-GMAC cipher for a cryptographic operation.
582    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self {
583        let mut new_gmac = Self { key: key, iv: [0; 16] };
584        new_gmac.iv[..12].copy_from_slice(iv);
585        new_gmac.iv[15] = 2;
586        new_gmac
587    }
588}
589
590#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
591impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> {
592    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
593
594    fn key(&self) -> &'c [u8] {
595        self.key
596    }
597
598    fn iv(&self) -> &[u8] {
599        self.iv.as_slice()
600    }
601
602    fn set_algomode(&self, p: pac::cryp::Cryp) {
603        p.cr().modify(|w| w.set_algomode0(0));
604        p.cr().modify(|w| w.set_algomode3(true));
605    }
606
607    fn init_phase_blocking<T: Instance, M: Mode>(&self, p: pac::cryp::Cryp, _cryp: &Cryp<T, M>) {
608        p.cr().modify(|w| w.set_gcm_ccmph(0));
609        p.cr().modify(|w| w.set_crypen(true));
610        while p.cr().read().crypen() {}
611    }
612
613    async fn init_phase<T: Instance>(&self, p: pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, Async>) {
614        p.cr().modify(|w| w.set_gcm_ccmph(0));
615        p.cr().modify(|w| w.set_crypen(true));
616        while p.cr().read().crypen() {}
617    }
618
619    #[cfg(cryp_v2)]
620    fn pre_final(&self, p: pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
621        //Handle special GCM partial block process.
622        if dir == Direction::Encrypt {
623            p.cr().modify(|w| w.set_crypen(false));
624            p.cr().modify(|w| w.set_algomode3(false));
625            p.cr().modify(|w| w.set_algomode0(6));
626            let iv1r = p.csgcmccmr(7).read() - 1;
627            p.init(1).ivrr().write_value(iv1r);
628            p.cr().modify(|w| w.set_crypen(true));
629        }
630        [0; 4]
631    }
632
633    #[cfg(any(cryp_v3, cryp_v4))]
634    fn pre_final(&self, p: pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
635        //Handle special GCM partial block process.
636        p.cr().modify(|w| w.set_npblb(padding_len as u8));
637        [0; 4]
638    }
639
640    #[cfg(cryp_v2)]
641    fn post_final_blocking<T: Instance, M: Mode>(
642        &self,
643        p: pac::cryp::Cryp,
644        cryp: &Cryp<T, M>,
645        dir: Direction,
646        int_data: &mut [u8; AES_BLOCK_SIZE],
647        _temp1: [u32; 4],
648        padding_mask: [u8; AES_BLOCK_SIZE],
649    ) {
650        if dir == Direction::Encrypt {
651            //Handle special GCM partial block process.
652            p.cr().modify(|w| w.set_crypen(false));
653            p.cr().modify(|w| w.set_algomode3(true));
654            p.cr().modify(|w| w.set_algomode0(0));
655            for i in 0..AES_BLOCK_SIZE {
656                int_data[i] = int_data[i] & padding_mask[i];
657            }
658            p.cr().modify(|w| w.set_crypen(true));
659            p.cr().modify(|w| w.set_gcm_ccmph(3));
660
661            cryp.write_bytes_blocking(Self::BLOCK_SIZE, int_data);
662            cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
663        }
664    }
665
666    #[cfg(cryp_v2)]
667    async fn post_final<T: Instance>(
668        &self,
669        p: pac::cryp::Cryp,
670        cryp: &mut Cryp<'_, T, Async>,
671        dir: Direction,
672        int_data: &mut [u8; AES_BLOCK_SIZE],
673        _temp1: [u32; 4],
674        padding_mask: [u8; AES_BLOCK_SIZE],
675    ) {
676        if dir == Direction::Encrypt {
677            // Handle special GCM partial block process.
678            p.cr().modify(|w| w.set_crypen(false));
679            p.cr().modify(|w| w.set_algomode3(true));
680            p.cr().modify(|w| w.set_algomode0(0));
681            for i in 0..AES_BLOCK_SIZE {
682                int_data[i] = int_data[i] & padding_mask[i];
683            }
684            p.cr().modify(|w| w.set_crypen(true));
685            p.cr().modify(|w| w.set_gcm_ccmph(3));
686
687            let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
688
689            let read = Cryp::<T, Async>::read_bytes(cryp.outdma.as_mut().unwrap(), Self::BLOCK_SIZE, &mut out_data);
690            let write = Cryp::<T, Async>::write_bytes(cryp.indma.as_mut().unwrap(), Self::BLOCK_SIZE, int_data);
691
692            embassy_futures::join::join(read, write).await;
693        }
694    }
695}
696
697#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
698impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {}
699#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
700impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {}
701#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
702impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {}
703#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
704impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGmac<'c, KEY_SIZE> {}
705#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
706impl<'c, const KEY_SIZE: usize> IVSized for AesGmac<'c, KEY_SIZE> {}
707
708#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
709/// AES-CCM Cipher Mode
710pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> {
711    key: &'c [u8; KEY_SIZE],
712    aad_header: [u8; 6],
713    aad_header_len: usize,
714    block0: [u8; 16],
715    ctr: [u8; 16],
716}
717
718#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
719impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> {
720    /// Constructs a new AES-CCM cipher for a cryptographic operation.
721    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; IV_SIZE], aad_len: usize, payload_len: usize) -> Self {
722        let mut aad_header: [u8; 6] = [0; 6];
723        let mut aad_header_len = 0;
724        let mut block0: [u8; 16] = [0; 16];
725        if aad_len != 0 {
726            if aad_len < 65280 {
727                aad_header[0] = (aad_len >> 8) as u8 & 0xFF;
728                aad_header[1] = aad_len as u8 & 0xFF;
729                aad_header_len = 2;
730            } else {
731                aad_header[0] = 0xFF;
732                aad_header[1] = 0xFE;
733                let aad_len_bytes: [u8; 4] = (aad_len as u32).to_be_bytes();
734                aad_header[2] = aad_len_bytes[0];
735                aad_header[3] = aad_len_bytes[1];
736                aad_header[4] = aad_len_bytes[2];
737                aad_header[5] = aad_len_bytes[3];
738                aad_header_len = 6;
739            }
740        }
741        let total_aad_len = aad_header_len + aad_len;
742        let mut aad_padding_len = 16 - (total_aad_len % 16);
743        if aad_padding_len == 16 {
744            aad_padding_len = 0;
745        }
746        aad_header_len += aad_padding_len;
747        let total_aad_len_padded = aad_header_len + aad_len;
748        if total_aad_len_padded > 0 {
749            block0[0] = 0x40;
750        }
751        block0[0] |= ((((TAG_SIZE as u8) - 2) >> 1) & 0x07) << 3;
752        block0[0] |= ((15 - (iv.len() as u8)) - 1) & 0x07;
753        block0[1..1 + iv.len()].copy_from_slice(iv);
754        let payload_len_bytes: [u8; 4] = (payload_len as u32).to_be_bytes();
755        if iv.len() <= 11 {
756            block0[12] = payload_len_bytes[0];
757        } else if payload_len_bytes[0] > 0 {
758            panic!("Message is too large for given IV size.");
759        }
760        if iv.len() <= 12 {
761            block0[13] = payload_len_bytes[1];
762        } else if payload_len_bytes[1] > 0 {
763            panic!("Message is too large for given IV size.");
764        }
765        block0[14] = payload_len_bytes[2];
766        block0[15] = payload_len_bytes[3];
767        let mut ctr: [u8; 16] = [0; 16];
768        ctr[0] = block0[0] & 0x07;
769        ctr[1..1 + iv.len()].copy_from_slice(&block0[1..1 + iv.len()]);
770        ctr[15] = 0x01;
771
772        return Self {
773            key: key,
774            aad_header: aad_header,
775            aad_header_len: aad_header_len,
776            block0: block0,
777            ctr: ctr,
778        };
779    }
780}
781
782#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
783impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cipher<'c>
784    for AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE>
785{
786    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
787
788    fn key(&self) -> &'c [u8] {
789        self.key
790    }
791
792    fn iv(&self) -> &[u8] {
793        self.ctr.as_slice()
794    }
795
796    fn set_algomode(&self, p: pac::cryp::Cryp) {
797        p.cr().modify(|w| w.set_algomode0(1));
798        p.cr().modify(|w| w.set_algomode3(true));
799    }
800
801    fn init_phase_blocking<T: Instance, M: Mode>(&self, p: pac::cryp::Cryp, cryp: &Cryp<T, M>) {
802        p.cr().modify(|w| w.set_gcm_ccmph(0));
803
804        cryp.write_bytes_blocking(Self::BLOCK_SIZE, &self.block0);
805
806        p.cr().modify(|w| w.set_crypen(true));
807        while p.cr().read().crypen() {}
808    }
809
810    async fn init_phase<T: Instance>(&self, p: pac::cryp::Cryp, cryp: &mut Cryp<'_, T, Async>) {
811        p.cr().modify(|w| w.set_gcm_ccmph(0));
812
813        Cryp::<T, Async>::write_bytes(cryp.indma.as_mut().unwrap(), Self::BLOCK_SIZE, &self.block0).await;
814
815        p.cr().modify(|w| w.set_crypen(true));
816        while p.cr().read().crypen() {}
817    }
818
819    fn get_header_block(&self) -> &[u8] {
820        return &self.aad_header[0..self.aad_header_len];
821    }
822
823    #[cfg(cryp_v2)]
824    fn pre_final(&self, p: pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
825        //Handle special CCM partial block process.
826        let mut temp1 = [0; 4];
827        if dir == Direction::Decrypt {
828            p.cr().modify(|w| w.set_crypen(false));
829            let iv1temp = p.init(1).ivrr().read();
830            temp1[0] = p.csgcmccmr(0).read().swap_bytes();
831            temp1[1] = p.csgcmccmr(1).read().swap_bytes();
832            temp1[2] = p.csgcmccmr(2).read().swap_bytes();
833            temp1[3] = p.csgcmccmr(3).read().swap_bytes();
834            p.init(1).ivrr().write_value(iv1temp);
835            p.cr().modify(|w| w.set_algomode3(false));
836            p.cr().modify(|w| w.set_algomode0(6));
837            p.cr().modify(|w| w.set_crypen(true));
838        }
839        return temp1;
840    }
841
842    #[cfg(any(cryp_v3, cryp_v4))]
843    fn pre_final(&self, p: pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
844        //Handle special GCM partial block process.
845        p.cr().modify(|w| w.set_npblb(padding_len as u8));
846        [0; 4]
847    }
848
849    #[cfg(cryp_v2)]
850    fn post_final_blocking<T: Instance, M: Mode>(
851        &self,
852        p: pac::cryp::Cryp,
853        cryp: &Cryp<T, M>,
854        dir: Direction,
855        int_data: &mut [u8; AES_BLOCK_SIZE],
856        temp1: [u32; 4],
857        padding_mask: [u8; 16],
858    ) {
859        if dir == Direction::Decrypt {
860            //Handle special CCM partial block process.
861            let mut temp2 = [0; 4];
862            temp2[0] = p.csgcmccmr(0).read().swap_bytes();
863            temp2[1] = p.csgcmccmr(1).read().swap_bytes();
864            temp2[2] = p.csgcmccmr(2).read().swap_bytes();
865            temp2[3] = p.csgcmccmr(3).read().swap_bytes();
866            p.cr().modify(|w| w.set_algomode3(true));
867            p.cr().modify(|w| w.set_algomode0(1));
868            p.cr().modify(|w| w.set_gcm_ccmph(3));
869            // Header phase
870            p.cr().modify(|w| w.set_gcm_ccmph(1));
871            for i in 0..AES_BLOCK_SIZE {
872                int_data[i] = int_data[i] & padding_mask[i];
873            }
874            let mut in_data: [u32; 4] = [0; 4];
875            for i in 0..in_data.len() {
876                let mut int_bytes: [u8; 4] = [0; 4];
877                int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]);
878                let int_word = u32::from_le_bytes(int_bytes);
879                in_data[i] = int_word;
880                in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i];
881            }
882            cryp.write_words_blocking(Self::BLOCK_SIZE, &in_data);
883        }
884    }
885
886    #[cfg(cryp_v2)]
887    async fn post_final<T: Instance>(
888        &self,
889        p: pac::cryp::Cryp,
890        cryp: &mut Cryp<'_, T, Async>,
891        dir: Direction,
892        int_data: &mut [u8; AES_BLOCK_SIZE],
893        temp1: [u32; 4],
894        padding_mask: [u8; 16],
895    ) {
896        if dir == Direction::Decrypt {
897            //Handle special CCM partial block process.
898            let mut temp2 = [0; 4];
899            temp2[0] = p.csgcmccmr(0).read().swap_bytes();
900            temp2[1] = p.csgcmccmr(1).read().swap_bytes();
901            temp2[2] = p.csgcmccmr(2).read().swap_bytes();
902            temp2[3] = p.csgcmccmr(3).read().swap_bytes();
903            p.cr().modify(|w| w.set_algomode3(true));
904            p.cr().modify(|w| w.set_algomode0(1));
905            p.cr().modify(|w| w.set_gcm_ccmph(3));
906            // Header phase
907            p.cr().modify(|w| w.set_gcm_ccmph(1));
908            for i in 0..AES_BLOCK_SIZE {
909                int_data[i] = int_data[i] & padding_mask[i];
910            }
911            let mut in_data: [u32; 4] = [0; 4];
912            for i in 0..in_data.len() {
913                let mut int_bytes: [u8; 4] = [0; 4];
914                int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]);
915                let int_word = u32::from_le_bytes(int_bytes);
916                in_data[i] = int_word;
917                in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i];
918            }
919            Cryp::<T, Async>::write_words(cryp.indma.as_mut().unwrap(), Self::BLOCK_SIZE, &in_data).await;
920        }
921    }
922}
923
924#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
925impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 128 / 8 }, TAG_SIZE, IV_SIZE> {}
926#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
927impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 192 / 8 }, TAG_SIZE, IV_SIZE> {}
928#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
929impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 256 / 8 }, TAG_SIZE, IV_SIZE> {}
930#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
931impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<4> for AesCcm<'c, KEY_SIZE, 4, IV_SIZE> {}
932#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
933impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<6> for AesCcm<'c, KEY_SIZE, 6, IV_SIZE> {}
934#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
935impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<8> for AesCcm<'c, KEY_SIZE, 8, IV_SIZE> {}
936#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
937impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<10> for AesCcm<'c, KEY_SIZE, 10, IV_SIZE> {}
938#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
939impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<12> for AesCcm<'c, KEY_SIZE, 12, IV_SIZE> {}
940#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
941impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<14> for AesCcm<'c, KEY_SIZE, 14, IV_SIZE> {}
942#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
943impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<16> for AesCcm<'c, KEY_SIZE, 16, IV_SIZE> {}
944#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
945impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 7> {}
946#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
947impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 8> {}
948#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
949impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 9> {}
950#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
951impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 10> {}
952#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
953impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 11> {}
954#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
955impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 12> {}
956#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
957impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 13> {}
958
959#[allow(dead_code)]
960/// Holds the state information for a cipher operation.
961/// Allows suspending/resuming of cipher operations.
962pub struct Context<'c, C: Cipher<'c> + CipherSized> {
963    phantom_data: PhantomData<&'c C>,
964    cipher: &'c C,
965    dir: Direction,
966    last_block_processed: bool,
967    header_processed: bool,
968    aad_complete: bool,
969    cr: u32,
970    iv: [u32; 4],
971    csgcmccm: [u32; 8],
972    csgcm: [u32; 8],
973    header_len: u64,
974    payload_len: u64,
975    aad_buffer: [u8; 16],
976    aad_buffer_len: usize,
977}
978
979/// Selects whether the crypto processor operates in encryption or decryption mode.
980#[derive(PartialEq, Clone, Copy)]
981pub enum Direction {
982    /// Encryption mode
983    Encrypt,
984    /// Decryption mode
985    Decrypt,
986}
987
988/// Crypto Accelerator Driver
989pub struct Cryp<'d, T: Instance, M: Mode> {
990    _peripheral: Peri<'d, T>,
991    _phantom: PhantomData<M>,
992    indma: Option<ChannelAndRequest<'d>>,
993    outdma: Option<ChannelAndRequest<'d>>,
994}
995
996impl<'d, T: Instance> Cryp<'d, T, Blocking> {
997    /// Create a new CRYP driver in blocking mode.
998    pub fn new_blocking(
999        peri: Peri<'d, T>,
1000        _irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd,
1001    ) -> Self {
1002        rcc::enable_and_reset::<T>();
1003        let instance = Self {
1004            _peripheral: peri,
1005            _phantom: PhantomData,
1006            indma: None,
1007            outdma: None,
1008        };
1009
1010        T::Interrupt::unpend();
1011        unsafe { T::Interrupt::enable() };
1012
1013        instance
1014    }
1015}
1016
1017impl<'d, T: Instance, M: Mode> Cryp<'d, T, M> {
1018    /// Start a new encrypt or decrypt operation for the given cipher.
1019    pub fn start_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>(
1020        &self,
1021        cipher: &'c C,
1022        dir: Direction,
1023    ) -> Context<'c, C> {
1024        let mut ctx: Context<'c, C> = Context {
1025            dir,
1026            last_block_processed: false,
1027            cr: 0,
1028            iv: [0; 4],
1029            csgcmccm: [0; 8],
1030            csgcm: [0; 8],
1031            aad_complete: false,
1032            header_len: 0,
1033            payload_len: 0,
1034            cipher: cipher,
1035            phantom_data: PhantomData,
1036            header_processed: false,
1037            aad_buffer: [0; 16],
1038            aad_buffer_len: 0,
1039        };
1040
1041        T::regs().cr().modify(|w| w.set_crypen(false));
1042
1043        let key = ctx.cipher.key();
1044
1045        if key.len() == (128 / 8) {
1046            T::regs().cr().modify(|w| w.set_keysize(0));
1047        } else if key.len() == (192 / 8) {
1048            T::regs().cr().modify(|w| w.set_keysize(1));
1049        } else if key.len() == (256 / 8) {
1050            T::regs().cr().modify(|w| w.set_keysize(2));
1051        }
1052
1053        self.load_key(key);
1054
1055        // Set data type to 8-bit. This will match software implementations.
1056        T::regs().cr().modify(|w| w.set_datatype(2));
1057
1058        ctx.cipher.prepare_key(T::regs());
1059
1060        ctx.cipher.set_algomode(T::regs());
1061
1062        // Set encrypt/decrypt
1063        if dir == Direction::Encrypt {
1064            T::regs().cr().modify(|w| w.set_algodir(false));
1065        } else {
1066            T::regs().cr().modify(|w| w.set_algodir(true));
1067        }
1068
1069        // Load the IV into the registers.
1070        let iv = ctx.cipher.iv();
1071        let mut full_iv: [u8; 16] = [0; 16];
1072        full_iv[0..iv.len()].copy_from_slice(iv);
1073        let mut iv_idx = 0;
1074        let mut iv_word: [u8; 4] = [0; 4];
1075        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1076        iv_idx += 4;
1077        T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
1078        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1079        iv_idx += 4;
1080        T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
1081        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1082        iv_idx += 4;
1083        T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
1084        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1085        T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
1086
1087        // Flush in/out FIFOs
1088        T::regs().cr().modify(|w| w.fflush());
1089
1090        ctx.cipher.init_phase_blocking(T::regs(), self);
1091
1092        self.store_context(&mut ctx);
1093
1094        ctx
1095    }
1096
1097    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1098    /// Controls the header phase of cipher processing.
1099    /// This function is only valid for authenticated ciphers including GCM, CCM, and GMAC.
1100    /// All additional associated data (AAD) must be supplied to this function prior to starting the payload phase with `payload_blocking`.
1101    /// The AAD must be supplied in multiples of the block size (128-bits for AES, 64-bits for DES), except when supplying the last block.
1102    /// When supplying the last block of AAD, `last_aad_block` must be `true`.
1103    pub fn aad_blocking<
1104        'c,
1105        const TAG_SIZE: usize,
1106        C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1107    >(
1108        &self,
1109        ctx: &mut Context<'c, C>,
1110        aad: &[u8],
1111        last_aad_block: bool,
1112    ) {
1113        self.load_context(ctx);
1114
1115        // Perform checks for correctness.
1116        if ctx.aad_complete {
1117            panic!("Cannot update AAD after starting payload!")
1118        }
1119
1120        ctx.header_len += aad.len() as u64;
1121
1122        // Header phase
1123        T::regs().cr().modify(|w| w.set_crypen(false));
1124        T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
1125        T::regs().cr().modify(|w| w.set_crypen(true));
1126
1127        // First write the header B1 block if not yet written.
1128        if !ctx.header_processed {
1129            ctx.header_processed = true;
1130            let header = ctx.cipher.get_header_block();
1131            ctx.aad_buffer[0..header.len()].copy_from_slice(header);
1132            ctx.aad_buffer_len += header.len();
1133        }
1134
1135        // Fill the header block to make a full block.
1136        let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len);
1137        ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]);
1138        ctx.aad_buffer_len += len_to_copy;
1139        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1140        let mut aad_len_remaining = aad.len() - len_to_copy;
1141
1142        if ctx.aad_buffer_len < C::BLOCK_SIZE {
1143            // The buffer isn't full and this is the last buffer, so process it as is (already padded).
1144            if last_aad_block {
1145                self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
1146                // Block until input FIFO is empty.
1147                while !T::regs().sr().read().ifem() {}
1148
1149                // Switch to payload phase.
1150                ctx.aad_complete = true;
1151                T::regs().cr().modify(|w| w.set_crypen(false));
1152                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1153                T::regs().cr().modify(|w| w.fflush());
1154            } else {
1155                // Just return because we don't yet have a full block to process.
1156                return;
1157            }
1158        } else {
1159            // Load the full block from the buffer.
1160            self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
1161            // Block until input FIFO is empty.
1162            while !T::regs().sr().read().ifem() {}
1163        }
1164
1165        // Handle a partial block that is passed in.
1166        ctx.aad_buffer_len = 0;
1167        let leftovers = aad_len_remaining % C::BLOCK_SIZE;
1168        ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]);
1169        ctx.aad_buffer_len += leftovers;
1170        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1171        aad_len_remaining -= leftovers;
1172        assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0);
1173
1174        // Load full data blocks into core.
1175        let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE;
1176        let start_index = len_to_copy;
1177        let end_index = start_index + (C::BLOCK_SIZE * num_full_blocks);
1178        self.write_bytes_blocking(C::BLOCK_SIZE, &aad[start_index..end_index]);
1179
1180        if last_aad_block {
1181            if leftovers > 0 {
1182                self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
1183            }
1184            // Switch to payload phase.
1185            ctx.aad_complete = true;
1186            T::regs().cr().modify(|w| w.set_crypen(false));
1187            T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1188            T::regs().cr().modify(|w| w.fflush());
1189        }
1190
1191        self.store_context(ctx);
1192    }
1193
1194    /// Performs encryption/decryption on the provided context.
1195    /// The context determines algorithm, mode, and state of the crypto accelerator.
1196    /// When the last piece of data is supplied, `last_block` should be `true`.
1197    /// This function panics under various mismatches of parameters.
1198    /// Output buffer must be at least as long as the input buffer.
1199    /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
1200    /// Padding or ciphertext stealing must be managed by the application for these modes.
1201    /// Data must also be a multiple of block size unless `last_block` is `true`.
1202    pub fn payload_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>(
1203        &self,
1204        ctx: &mut Context<'c, C>,
1205        input: &[u8],
1206        output: &mut [u8],
1207        last_block: bool,
1208    ) {
1209        self.load_context(ctx);
1210
1211        let last_block_remainder = input.len() % C::BLOCK_SIZE;
1212
1213        // Perform checks for correctness.
1214        if !ctx.aad_complete && ctx.header_len > 0 {
1215            panic!("Additional associated data must be processed first!");
1216        } else if !ctx.aad_complete {
1217            #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1218            {
1219                ctx.aad_complete = true;
1220                T::regs().cr().modify(|w| w.set_crypen(false));
1221                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1222                T::regs().cr().modify(|w| w.fflush());
1223                T::regs().cr().modify(|w| w.set_crypen(true));
1224            }
1225        }
1226        if ctx.last_block_processed {
1227            panic!("The last block has already been processed!");
1228        }
1229        if input.len() > output.len() {
1230            panic!("Output buffer length must match input length.");
1231        }
1232        if !last_block {
1233            if last_block_remainder != 0 {
1234                panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE);
1235            }
1236        }
1237        if C::REQUIRES_PADDING {
1238            if last_block_remainder != 0 {
1239                panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE);
1240            }
1241        }
1242        if last_block {
1243            ctx.last_block_processed = true;
1244        }
1245
1246        // Load data into core, block by block.
1247        let num_full_blocks = input.len() / C::BLOCK_SIZE;
1248        for block in 0..num_full_blocks {
1249            let index = block * C::BLOCK_SIZE;
1250            // Write block in
1251            self.write_bytes_blocking(C::BLOCK_SIZE, &input[index..index + C::BLOCK_SIZE]);
1252            // Read block out
1253            self.read_bytes_blocking(C::BLOCK_SIZE, &mut output[index..index + C::BLOCK_SIZE]);
1254        }
1255
1256        // Handle the final block, which is incomplete.
1257        if last_block_remainder > 0 {
1258            let padding_len = C::BLOCK_SIZE - last_block_remainder;
1259            let temp1 = ctx.cipher.pre_final(T::regs(), ctx.dir, padding_len);
1260
1261            let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1262            let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1263            last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
1264            self.write_bytes_blocking(C::BLOCK_SIZE, &last_block);
1265            self.read_bytes_blocking(C::BLOCK_SIZE, &mut intermediate_data);
1266
1267            // Handle the last block depending on mode.
1268            let output_len = output.len();
1269            output[output_len - last_block_remainder..output_len]
1270                .copy_from_slice(&intermediate_data[0..last_block_remainder]);
1271
1272            let mut mask: [u8; 16] = [0; 16];
1273            mask[..last_block_remainder].fill(0xFF);
1274            ctx.cipher
1275                .post_final_blocking(T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask);
1276        }
1277
1278        ctx.payload_len += input.len() as u64;
1279
1280        self.store_context(ctx);
1281    }
1282
1283    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1284    /// Generates an authentication tag for authenticated ciphers including GCM, CCM, and GMAC.
1285    /// Called after the all data has been encrypted/decrypted by `payload`.
1286    pub fn finish_blocking<
1287        'c,
1288        const TAG_SIZE: usize,
1289        C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1290    >(
1291        &self,
1292        mut ctx: Context<'c, C>,
1293    ) -> [u8; TAG_SIZE] {
1294        self.load_context(&mut ctx);
1295
1296        T::regs().cr().modify(|w| w.set_crypen(false));
1297        T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
1298        T::regs().cr().modify(|w| w.set_crypen(true));
1299
1300        let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32;
1301        let headerlen2: u32 = (ctx.header_len * 8) as u32;
1302        let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32;
1303        let payloadlen2: u32 = (ctx.payload_len * 8) as u32;
1304
1305        #[cfg(cryp_v2)]
1306        let footer: [u32; 4] = [
1307            headerlen1.swap_bytes(),
1308            headerlen2.swap_bytes(),
1309            payloadlen1.swap_bytes(),
1310            payloadlen2.swap_bytes(),
1311        ];
1312        #[cfg(any(cryp_v3, cryp_v4))]
1313        let footer: [u32; 4] = [headerlen1, headerlen2, payloadlen1, payloadlen2];
1314
1315        self.write_words_blocking(C::BLOCK_SIZE, &footer);
1316
1317        while !T::regs().sr().read().ofne() {}
1318
1319        let mut full_tag: [u8; 16] = [0; 16];
1320        self.read_bytes_blocking(C::BLOCK_SIZE, &mut full_tag);
1321        let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE];
1322        tag.copy_from_slice(&full_tag[0..TAG_SIZE]);
1323
1324        T::regs().cr().modify(|w| w.set_crypen(false));
1325
1326        tag
1327    }
1328
1329    fn load_key(&self, key: &[u8]) {
1330        // Load the key into the registers.
1331        let mut keyidx = 0;
1332        let mut keyword: [u8; 4] = [0; 4];
1333        let keylen = key.len() * 8;
1334        if keylen > 192 {
1335            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1336            keyidx += 4;
1337            T::regs().key(0).klr().write_value(u32::from_be_bytes(keyword));
1338            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1339            keyidx += 4;
1340            T::regs().key(0).krr().write_value(u32::from_be_bytes(keyword));
1341        }
1342        if keylen > 128 {
1343            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1344            keyidx += 4;
1345            T::regs().key(1).klr().write_value(u32::from_be_bytes(keyword));
1346            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1347            keyidx += 4;
1348            T::regs().key(1).krr().write_value(u32::from_be_bytes(keyword));
1349        }
1350        if keylen > 64 {
1351            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1352            keyidx += 4;
1353            T::regs().key(2).klr().write_value(u32::from_be_bytes(keyword));
1354            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1355            keyidx += 4;
1356            T::regs().key(2).krr().write_value(u32::from_be_bytes(keyword));
1357        }
1358        keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1359        keyidx += 4;
1360        T::regs().key(3).klr().write_value(u32::from_be_bytes(keyword));
1361        keyword = [0; 4];
1362        keyword[0..key.len() - keyidx].copy_from_slice(&key[keyidx..key.len()]);
1363        T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword));
1364    }
1365
1366    fn store_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &mut Context<'c, C>) {
1367        // Wait for data block processing to finish.
1368        while !T::regs().sr().read().ifem() {}
1369        while T::regs().sr().read().ofne() {}
1370        while T::regs().sr().read().busy() {}
1371
1372        // Disable crypto processor.
1373        T::regs().cr().modify(|w| w.set_crypen(false));
1374
1375        // Save the peripheral state.
1376        ctx.cr = T::regs().cr().read().0;
1377        ctx.iv[0] = T::regs().init(0).ivlr().read();
1378        ctx.iv[1] = T::regs().init(0).ivrr().read();
1379        ctx.iv[2] = T::regs().init(1).ivlr().read();
1380        ctx.iv[3] = T::regs().init(1).ivrr().read();
1381
1382        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1383        for i in 0..8 {
1384            ctx.csgcmccm[i] = T::regs().csgcmccmr(i).read();
1385            ctx.csgcm[i] = T::regs().csgcmr(i).read();
1386        }
1387    }
1388
1389    fn load_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &Context<'c, C>) {
1390        // Reload state registers.
1391        T::regs().cr().write(|w| w.0 = ctx.cr);
1392        T::regs().init(0).ivlr().write_value(ctx.iv[0]);
1393        T::regs().init(0).ivrr().write_value(ctx.iv[1]);
1394        T::regs().init(1).ivlr().write_value(ctx.iv[2]);
1395        T::regs().init(1).ivrr().write_value(ctx.iv[3]);
1396
1397        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1398        for i in 0..8 {
1399            T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]);
1400            T::regs().csgcmr(i).write_value(ctx.csgcm[i]);
1401        }
1402        self.load_key(ctx.cipher.key());
1403
1404        // Prepare key if applicable.
1405        ctx.cipher.prepare_key(T::regs());
1406        T::regs().cr().write(|w| w.0 = ctx.cr);
1407
1408        // Enable crypto processor.
1409        T::regs().cr().modify(|w| w.set_crypen(true));
1410    }
1411
1412    fn write_bytes_blocking(&self, block_size: usize, blocks: &[u8]) {
1413        // Ensure input is a multiple of block size.
1414        assert_eq!(blocks.len() % block_size, 0);
1415        let mut index = 0;
1416        let end_index = blocks.len();
1417        while index < end_index {
1418            let mut in_word: [u8; 4] = [0; 4];
1419            in_word.copy_from_slice(&blocks[index..index + 4]);
1420            T::regs().din().write_value(u32::from_ne_bytes(in_word));
1421            index += 4;
1422            if index % block_size == 0 {
1423                // Block until input FIFO is empty.
1424                while !T::regs().sr().read().ifem() {}
1425            }
1426        }
1427    }
1428
1429    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1430    fn write_words_blocking(&self, block_size: usize, blocks: &[u32]) {
1431        assert_eq!((blocks.len() * 4) % block_size, 0);
1432        let mut byte_counter: usize = 0;
1433        for word in blocks {
1434            T::regs().din().write_value(*word);
1435            byte_counter += 4;
1436            if byte_counter % block_size == 0 {
1437                // Block until input FIFO is empty.
1438                while !T::regs().sr().read().ifem() {}
1439            }
1440        }
1441    }
1442
1443    fn read_bytes_blocking(&self, block_size: usize, blocks: &mut [u8]) {
1444        // Block until there is output to read.
1445        while !T::regs().sr().read().ofne() {}
1446        // Ensure input is a multiple of block size.
1447        assert_eq!(blocks.len() % block_size, 0);
1448        // Read block out
1449        let mut index = 0;
1450        let end_index = blocks.len();
1451        while index < end_index {
1452            let out_word: u32 = T::regs().dout().read();
1453            blocks[index..index + 4].copy_from_slice(u32::to_ne_bytes(out_word).as_slice());
1454            index += 4;
1455        }
1456    }
1457}
1458
1459impl<'d, T: Instance> Cryp<'d, T, Async> {
1460    /// Create a new CRYP driver.
1461    pub fn new(
1462        peri: Peri<'d, T>,
1463        indma: Peri<'d, impl DmaIn<T>>,
1464        outdma: Peri<'d, impl DmaOut<T>>,
1465        _irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd,
1466    ) -> Self {
1467        rcc::enable_and_reset::<T>();
1468        let instance = Self {
1469            _peripheral: peri,
1470            _phantom: PhantomData,
1471            indma: new_dma!(indma),
1472            outdma: new_dma!(outdma),
1473        };
1474
1475        T::Interrupt::unpend();
1476        unsafe { T::Interrupt::enable() };
1477
1478        instance
1479    }
1480
1481    /// Start a new encrypt or decrypt operation for the given cipher.
1482    pub async fn start<'c, C: Cipher<'c> + CipherSized + IVSized>(
1483        &mut self,
1484        cipher: &'c C,
1485        dir: Direction,
1486    ) -> Context<'c, C> {
1487        let mut ctx: Context<'c, C> = Context {
1488            dir,
1489            last_block_processed: false,
1490            cr: 0,
1491            iv: [0; 4],
1492            csgcmccm: [0; 8],
1493            csgcm: [0; 8],
1494            aad_complete: false,
1495            header_len: 0,
1496            payload_len: 0,
1497            cipher: cipher,
1498            phantom_data: PhantomData,
1499            header_processed: false,
1500            aad_buffer: [0; 16],
1501            aad_buffer_len: 0,
1502        };
1503
1504        T::regs().cr().modify(|w| w.set_crypen(false));
1505
1506        let key = ctx.cipher.key();
1507
1508        if key.len() == (128 / 8) {
1509            T::regs().cr().modify(|w| w.set_keysize(0));
1510        } else if key.len() == (192 / 8) {
1511            T::regs().cr().modify(|w| w.set_keysize(1));
1512        } else if key.len() == (256 / 8) {
1513            T::regs().cr().modify(|w| w.set_keysize(2));
1514        }
1515
1516        self.load_key(key);
1517
1518        // Set data type to 8-bit. This will match software implementations.
1519        T::regs().cr().modify(|w| w.set_datatype(2));
1520
1521        ctx.cipher.prepare_key(T::regs());
1522
1523        ctx.cipher.set_algomode(T::regs());
1524
1525        // Set encrypt/decrypt
1526        if dir == Direction::Encrypt {
1527            T::regs().cr().modify(|w| w.set_algodir(false));
1528        } else {
1529            T::regs().cr().modify(|w| w.set_algodir(true));
1530        }
1531
1532        // Load the IV into the registers.
1533        let iv = ctx.cipher.iv();
1534        let mut full_iv: [u8; 16] = [0; 16];
1535        full_iv[0..iv.len()].copy_from_slice(iv);
1536        let mut iv_idx = 0;
1537        let mut iv_word: [u8; 4] = [0; 4];
1538        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1539        iv_idx += 4;
1540        T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
1541        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1542        iv_idx += 4;
1543        T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
1544        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1545        iv_idx += 4;
1546        T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
1547        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1548        T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
1549
1550        // Flush in/out FIFOs
1551        T::regs().cr().modify(|w| w.fflush());
1552
1553        ctx.cipher.init_phase(T::regs(), self).await;
1554
1555        self.store_context(&mut ctx);
1556
1557        ctx
1558    }
1559
1560    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1561    /// Controls the header phase of cipher processing.
1562    /// This function is only valid for authenticated ciphers including GCM, CCM, and GMAC.
1563    /// All additional associated data (AAD) must be supplied to this function prior to starting the payload phase with `payload`.
1564    /// The AAD must be supplied in multiples of the block size (128-bits for AES, 64-bits for DES), except when supplying the last block.
1565    /// When supplying the last block of AAD, `last_aad_block` must be `true`.
1566    pub async fn aad<
1567        'c,
1568        const TAG_SIZE: usize,
1569        C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1570    >(
1571        &mut self,
1572        ctx: &mut Context<'c, C>,
1573        aad: &[u8],
1574        last_aad_block: bool,
1575    ) {
1576        self.load_context(ctx);
1577
1578        // Perform checks for correctness.
1579        if ctx.aad_complete {
1580            panic!("Cannot update AAD after starting payload!")
1581        }
1582
1583        ctx.header_len += aad.len() as u64;
1584
1585        // Header phase
1586        T::regs().cr().modify(|w| w.set_crypen(false));
1587        T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
1588        T::regs().cr().modify(|w| w.set_crypen(true));
1589
1590        // First write the header B1 block if not yet written.
1591        if !ctx.header_processed {
1592            ctx.header_processed = true;
1593            let header = ctx.cipher.get_header_block();
1594            ctx.aad_buffer[0..header.len()].copy_from_slice(header);
1595            ctx.aad_buffer_len += header.len();
1596        }
1597
1598        // Fill the header block to make a full block.
1599        let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len);
1600        ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]);
1601        ctx.aad_buffer_len += len_to_copy;
1602        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1603        let mut aad_len_remaining = aad.len() - len_to_copy;
1604
1605        if ctx.aad_buffer_len < C::BLOCK_SIZE {
1606            // The buffer isn't full and this is the last buffer, so process it as is (already padded).
1607            if last_aad_block {
1608                Self::write_bytes(self.indma.as_mut().unwrap(), C::BLOCK_SIZE, &ctx.aad_buffer).await;
1609                assert_eq!(T::regs().sr().read().ifem(), true);
1610
1611                // Switch to payload phase.
1612                ctx.aad_complete = true;
1613                T::regs().cr().modify(|w| w.set_crypen(false));
1614                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1615                T::regs().cr().modify(|w| w.fflush());
1616            } else {
1617                // Just return because we don't yet have a full block to process.
1618                return;
1619            }
1620        } else {
1621            // Load the full block from the buffer.
1622            Self::write_bytes(self.indma.as_mut().unwrap(), C::BLOCK_SIZE, &ctx.aad_buffer).await;
1623            assert_eq!(T::regs().sr().read().ifem(), true);
1624        }
1625
1626        // Handle a partial block that is passed in.
1627        ctx.aad_buffer_len = 0;
1628        let leftovers = aad_len_remaining % C::BLOCK_SIZE;
1629        ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]);
1630        ctx.aad_buffer_len += leftovers;
1631        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1632        aad_len_remaining -= leftovers;
1633        assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0);
1634
1635        // Load full data blocks into core.
1636        let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE;
1637        let start_index = len_to_copy;
1638        let end_index = start_index + (C::BLOCK_SIZE * num_full_blocks);
1639        Self::write_bytes(
1640            self.indma.as_mut().unwrap(),
1641            C::BLOCK_SIZE,
1642            &aad[start_index..end_index],
1643        )
1644        .await;
1645
1646        if last_aad_block {
1647            if leftovers > 0 {
1648                Self::write_bytes(self.indma.as_mut().unwrap(), C::BLOCK_SIZE, &ctx.aad_buffer).await;
1649                assert_eq!(T::regs().sr().read().ifem(), true);
1650            }
1651            // Switch to payload phase.
1652            ctx.aad_complete = true;
1653            T::regs().cr().modify(|w| w.set_crypen(false));
1654            T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1655            T::regs().cr().modify(|w| w.fflush());
1656        }
1657
1658        self.store_context(ctx);
1659    }
1660
1661    /// Performs encryption/decryption on the provided context.
1662    /// The context determines algorithm, mode, and state of the crypto accelerator.
1663    /// When the last piece of data is supplied, `last_block` should be `true`.
1664    /// This function panics under various mismatches of parameters.
1665    /// Output buffer must be at least as long as the input buffer.
1666    /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
1667    /// Padding or ciphertext stealing must be managed by the application for these modes.
1668    /// Data must also be a multiple of block size unless `last_block` is `true`.
1669    pub async fn payload<'c, C: Cipher<'c> + CipherSized + IVSized>(
1670        &mut self,
1671        ctx: &mut Context<'c, C>,
1672        input: &[u8],
1673        output: &mut [u8],
1674        last_block: bool,
1675    ) {
1676        self.load_context(ctx);
1677
1678        let last_block_remainder = input.len() % C::BLOCK_SIZE;
1679
1680        // Perform checks for correctness.
1681        if !ctx.aad_complete && ctx.header_len > 0 {
1682            panic!("Additional associated data must be processed first!");
1683        } else if !ctx.aad_complete {
1684            #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1685            {
1686                ctx.aad_complete = true;
1687                T::regs().cr().modify(|w| w.set_crypen(false));
1688                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1689                T::regs().cr().modify(|w| w.fflush());
1690                T::regs().cr().modify(|w| w.set_crypen(true));
1691            }
1692        }
1693        if ctx.last_block_processed {
1694            panic!("The last block has already been processed!");
1695        }
1696        if input.len() > output.len() {
1697            panic!("Output buffer length must match input length.");
1698        }
1699        if !last_block {
1700            if last_block_remainder != 0 {
1701                panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE);
1702            }
1703        }
1704        if C::REQUIRES_PADDING {
1705            if last_block_remainder != 0 {
1706                panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE);
1707            }
1708        }
1709        if last_block {
1710            ctx.last_block_processed = true;
1711        }
1712
1713        // Load data into core, block by block.
1714        let num_full_blocks = input.len() / C::BLOCK_SIZE;
1715        for block in 0..num_full_blocks {
1716            let index = block * C::BLOCK_SIZE;
1717            // Read block out
1718            let read = Self::read_bytes(
1719                self.outdma.as_mut().unwrap(),
1720                C::BLOCK_SIZE,
1721                &mut output[index..index + C::BLOCK_SIZE],
1722            );
1723            // Write block in
1724            let write = Self::write_bytes(
1725                self.indma.as_mut().unwrap(),
1726                C::BLOCK_SIZE,
1727                &input[index..index + C::BLOCK_SIZE],
1728            );
1729            embassy_futures::join::join(read, write).await;
1730        }
1731
1732        // Handle the final block, which is incomplete.
1733        if last_block_remainder > 0 {
1734            let padding_len = C::BLOCK_SIZE - last_block_remainder;
1735            let temp1 = ctx.cipher.pre_final(T::regs(), ctx.dir, padding_len);
1736
1737            let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1738            let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1739            last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
1740            let read = Self::read_bytes(self.outdma.as_mut().unwrap(), C::BLOCK_SIZE, &mut intermediate_data);
1741            let write = Self::write_bytes(self.indma.as_mut().unwrap(), C::BLOCK_SIZE, &last_block);
1742            embassy_futures::join::join(read, write).await;
1743
1744            // Handle the last block depending on mode.
1745            let output_len = output.len();
1746            output[output_len - last_block_remainder..output_len]
1747                .copy_from_slice(&intermediate_data[0..last_block_remainder]);
1748
1749            let mut mask: [u8; 16] = [0; 16];
1750            mask[..last_block_remainder].fill(0xFF);
1751            ctx.cipher
1752                .post_final(T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask)
1753                .await;
1754        }
1755
1756        ctx.payload_len += input.len() as u64;
1757
1758        self.store_context(ctx);
1759    }
1760
1761    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1762    // Generates an authentication tag for authenticated ciphers including GCM, CCM, and GMAC.
1763    /// Called after the all data has been encrypted/decrypted by `payload`.
1764    pub async fn finish<
1765        'c,
1766        const TAG_SIZE: usize,
1767        C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1768    >(
1769        &mut self,
1770        mut ctx: Context<'c, C>,
1771    ) -> [u8; TAG_SIZE] {
1772        self.load_context(&mut ctx);
1773
1774        T::regs().cr().modify(|w| w.set_crypen(false));
1775        T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
1776        T::regs().cr().modify(|w| w.set_crypen(true));
1777
1778        let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32;
1779        let headerlen2: u32 = (ctx.header_len * 8) as u32;
1780        let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32;
1781        let payloadlen2: u32 = (ctx.payload_len * 8) as u32;
1782
1783        #[cfg(cryp_v2)]
1784        let footer: [u32; 4] = [
1785            headerlen1.swap_bytes(),
1786            headerlen2.swap_bytes(),
1787            payloadlen1.swap_bytes(),
1788            payloadlen2.swap_bytes(),
1789        ];
1790        #[cfg(any(cryp_v3, cryp_v4))]
1791        let footer: [u32; 4] = [headerlen1, headerlen2, payloadlen1, payloadlen2];
1792
1793        let write = Self::write_words(self.indma.as_mut().unwrap(), C::BLOCK_SIZE, &footer);
1794
1795        let mut full_tag: [u8; 16] = [0; 16];
1796        let read = Self::read_bytes(self.outdma.as_mut().unwrap(), C::BLOCK_SIZE, &mut full_tag);
1797
1798        embassy_futures::join::join(read, write).await;
1799
1800        let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE];
1801        tag.copy_from_slice(&full_tag[0..TAG_SIZE]);
1802
1803        T::regs().cr().modify(|w| w.set_crypen(false));
1804
1805        tag
1806    }
1807
1808    async fn write_bytes(dma: &mut ChannelAndRequest<'d>, block_size: usize, blocks: &[u8]) {
1809        if blocks.len() == 0 {
1810            return;
1811        }
1812        // Ensure input is a multiple of block size.
1813        assert_eq!(blocks.len() % block_size, 0);
1814        // Configure DMA to transfer input to crypto core.
1815        let dst_ptr: *mut u32 = T::regs().din().as_ptr();
1816        let options = TransferOptions {
1817            #[cfg(not(gpdma))]
1818            priority: crate::dma::Priority::High,
1819            ..Default::default()
1820        };
1821        let dma_transfer = unsafe { dma.write_raw(blocks, dst_ptr, options) };
1822        T::regs().dmacr().modify(|w| w.set_dien(true));
1823        // Wait for the transfer to complete.
1824        dma_transfer.await;
1825    }
1826
1827    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1828    async fn write_words(dma: &mut ChannelAndRequest<'d>, block_size: usize, blocks: &[u32]) {
1829        if blocks.len() == 0 {
1830            return;
1831        }
1832        // Ensure input is a multiple of block size.
1833        assert_eq!((blocks.len() * 4) % block_size, 0);
1834        // Configure DMA to transfer input to crypto core.
1835        let dst_ptr: *mut u32 = T::regs().din().as_ptr();
1836        let options = TransferOptions {
1837            #[cfg(not(gpdma))]
1838            priority: crate::dma::Priority::High,
1839            ..Default::default()
1840        };
1841        let dma_transfer = unsafe { dma.write_raw(blocks, dst_ptr, options) };
1842        T::regs().dmacr().modify(|w| w.set_dien(true));
1843        // Wait for the transfer to complete.
1844        dma_transfer.await;
1845    }
1846
1847    async fn read_bytes(dma: &mut ChannelAndRequest<'d>, block_size: usize, blocks: &mut [u8]) {
1848        if blocks.len() == 0 {
1849            return;
1850        }
1851        // Ensure input is a multiple of block size.
1852        assert_eq!(blocks.len() % block_size, 0);
1853        // Configure DMA to get output from crypto core.
1854        let src_ptr = T::regs().dout().as_ptr();
1855        let options = TransferOptions {
1856            #[cfg(not(gpdma))]
1857            priority: crate::dma::Priority::VeryHigh,
1858            ..Default::default()
1859        };
1860        let dma_transfer = unsafe { dma.read_raw(src_ptr, blocks, options) };
1861        T::regs().dmacr().modify(|w| w.set_doen(true));
1862        // Wait for the transfer to complete.
1863        dma_transfer.await;
1864    }
1865}
1866
1867trait SealedInstance {
1868    fn regs() -> pac::cryp::Cryp;
1869}
1870
1871/// CRYP instance trait.
1872#[allow(private_bounds)]
1873pub trait Instance: SealedInstance + PeripheralType + crate::rcc::RccPeripheral + 'static + Send {
1874    /// Interrupt for this CRYP instance.
1875    type Interrupt: interrupt::typelevel::Interrupt;
1876}
1877
1878foreach_interrupt!(
1879    ($inst:ident, cryp, CRYP, GLOBAL, $irq:ident) => {
1880        impl Instance for peripherals::$inst {
1881            type Interrupt = crate::interrupt::typelevel::$irq;
1882        }
1883
1884        impl SealedInstance for peripherals::$inst {
1885            fn regs() -> crate::pac::cryp::Cryp {
1886                crate::pac::$inst
1887            }
1888        }
1889    };
1890);
1891
1892dma_trait!(DmaIn, Instance);
1893dma_trait!(DmaOut, Instance);