embassy_stm32/hash/
mod.rs

1//! Hash generator (HASH)
2use core::cmp::min;
3#[cfg(hash_v2)]
4use core::future::poll_fn;
5use core::marker::PhantomData;
6#[cfg(hash_v2)]
7use core::ptr;
8#[cfg(hash_v2)]
9use core::task::Poll;
10
11use embassy_hal_internal::PeripheralType;
12use embassy_sync::waitqueue::AtomicWaker;
13use stm32_metapac::hash::regs::*;
14
15#[cfg(hash_v2)]
16use crate::dma::ChannelAndRequest;
17use crate::interrupt::typelevel::Interrupt;
18#[cfg(hash_v2)]
19use crate::mode::Async;
20use crate::mode::{Blocking, Mode};
21use crate::peripherals::HASH;
22use crate::{interrupt, pac, peripherals, rcc, Peri};
23
24#[cfg(hash_v1)]
25const NUM_CONTEXT_REGS: usize = 51;
26#[cfg(hash_v3)]
27const NUM_CONTEXT_REGS: usize = 103;
28#[cfg(any(hash_v2, hash_v4))]
29const NUM_CONTEXT_REGS: usize = 54;
30
31const HASH_BUFFER_LEN: usize = 132;
32const DIGEST_BLOCK_SIZE: usize = 128;
33
34static HASH_WAKER: AtomicWaker = AtomicWaker::new();
35
36/// HASH interrupt handler.
37pub struct InterruptHandler<T: Instance> {
38    _phantom: PhantomData<T>,
39}
40
41impl<T: Instance> interrupt::typelevel::Handler<T::Interrupt> for InterruptHandler<T> {
42    unsafe fn on_interrupt() {
43        let bits = T::regs().sr().read();
44        if bits.dinis() {
45            T::regs().imr().modify(|reg| reg.set_dinie(false));
46            HASH_WAKER.wake();
47        }
48        if bits.dcis() {
49            T::regs().imr().modify(|reg| reg.set_dcie(false));
50            HASH_WAKER.wake();
51        }
52    }
53}
54
55///Hash algorithm selection
56#[derive(Clone, Copy, PartialEq)]
57pub enum Algorithm {
58    /// SHA-1 Algorithm
59    SHA1 = 0,
60
61    #[cfg(any(hash_v1, hash_v2, hash_v4))]
62    /// MD5 Algorithm
63    MD5 = 1,
64
65    /// SHA-224 Algorithm
66    SHA224 = 2,
67
68    /// SHA-256 Algorithm
69    SHA256 = 3,
70
71    #[cfg(hash_v3)]
72    /// SHA-384 Algorithm
73    SHA384 = 12,
74
75    #[cfg(hash_v3)]
76    /// SHA-512/224 Algorithm
77    SHA512_224 = 13,
78
79    #[cfg(hash_v3)]
80    /// SHA-512/256 Algorithm
81    SHA512_256 = 14,
82
83    #[cfg(hash_v3)]
84    /// SHA-256 Algorithm
85    SHA512 = 15,
86}
87
88/// Input data width selection
89#[repr(u8)]
90#[derive(Clone, Copy)]
91pub enum DataType {
92    ///32-bit data, no data is swapped.
93    Width32 = 0,
94    ///16-bit data, each half-word is swapped.
95    Width16 = 1,
96    ///8-bit data, all bytes are swapped.
97    Width8 = 2,
98    ///1-bit data, all bits are swapped.
99    Width1 = 3,
100}
101
102/// Stores the state of the HASH peripheral for suspending/resuming
103/// digest calculation.
104#[derive(Clone)]
105pub struct Context<'c> {
106    first_word_sent: bool,
107    key_sent: bool,
108    buffer: [u8; HASH_BUFFER_LEN],
109    buflen: usize,
110    algo: Algorithm,
111    format: DataType,
112    imr: u32,
113    str: u32,
114    cr: u32,
115    csr: [u32; NUM_CONTEXT_REGS],
116    key: HmacKey<'c>,
117}
118
119type HmacKey<'k> = Option<&'k [u8]>;
120
121/// HASH driver.
122pub struct Hash<'d, T: Instance, M: Mode> {
123    _peripheral: Peri<'d, T>,
124    _phantom: PhantomData<M>,
125    #[cfg(hash_v2)]
126    dma: Option<ChannelAndRequest<'d>>,
127}
128
129impl<'d, T: Instance> Hash<'d, T, Blocking> {
130    /// Instantiates, resets, and enables the HASH peripheral.
131    pub fn new_blocking(
132        peripheral: Peri<'d, T>,
133        _irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd,
134    ) -> Self {
135        rcc::enable_and_reset::<HASH>();
136        let instance = Self {
137            _peripheral: peripheral,
138            _phantom: PhantomData,
139            #[cfg(hash_v2)]
140            dma: None,
141        };
142
143        T::Interrupt::unpend();
144        unsafe { T::Interrupt::enable() };
145
146        instance
147    }
148}
149
150impl<'d, T: Instance, M: Mode> Hash<'d, T, M> {
151    /// Starts computation of a new hash and returns the saved peripheral state.
152    pub fn start<'c>(&mut self, algorithm: Algorithm, format: DataType, key: HmacKey<'c>) -> Context<'c> {
153        // Define a context for this new computation.
154        let mut ctx = Context {
155            first_word_sent: false,
156            key_sent: false,
157            buffer: [0; HASH_BUFFER_LEN],
158            buflen: 0,
159            algo: algorithm,
160            format: format,
161            imr: 0,
162            str: 0,
163            cr: 0,
164            csr: [0; NUM_CONTEXT_REGS],
165            key,
166        };
167
168        // Set the data type in the peripheral.
169        T::regs().cr().modify(|w| w.set_datatype(ctx.format as u8));
170
171        // Select the algorithm.
172        #[cfg(hash_v1)]
173        if ctx.algo == Algorithm::MD5 {
174            T::regs().cr().modify(|w| w.set_algo(true));
175        }
176
177        #[cfg(hash_v2)]
178        {
179            // Select the algorithm.
180            let mut algo0 = false;
181            let mut algo1 = false;
182            if ctx.algo == Algorithm::MD5 || ctx.algo == Algorithm::SHA256 {
183                algo0 = true;
184            }
185            if ctx.algo == Algorithm::SHA224 || ctx.algo == Algorithm::SHA256 {
186                algo1 = true;
187            }
188            T::regs().cr().modify(|w| w.set_algo0(algo0));
189            T::regs().cr().modify(|w| w.set_algo1(algo1));
190        }
191
192        #[cfg(any(hash_v3, hash_v4))]
193        T::regs().cr().modify(|w| w.set_algo(ctx.algo as u8));
194
195        // Configure HMAC mode if a key is provided.
196        if let Some(key) = ctx.key {
197            T::regs().cr().modify(|w| w.set_mode(true));
198            if key.len() > 64 {
199                T::regs().cr().modify(|w| w.set_lkey(true));
200            }
201        } else {
202            T::regs().cr().modify(|w| w.set_mode(false));
203        }
204
205        T::regs().cr().modify(|w| w.set_init(true));
206
207        // Store and return the state of the peripheral.
208        self.store_context(&mut ctx);
209        ctx
210    }
211
212    /// Restores the peripheral state using the given context,
213    /// then updates the state with the provided data.
214    /// Peripheral state is saved upon return.
215    pub fn update_blocking<'c>(&mut self, ctx: &mut Context<'c>, input: &[u8]) {
216        // Restore the peripheral state.
217        self.load_context(&ctx);
218
219        // Load the HMAC key if provided.
220        if !ctx.key_sent {
221            if let Some(key) = ctx.key {
222                self.accumulate_blocking(key);
223                T::regs().str().write(|w| w.set_dcal(true));
224                // Block waiting for digest.
225                while !T::regs().sr().read().dinis() {}
226            }
227            ctx.key_sent = true;
228        }
229
230        let mut data_waiting = input.len() + ctx.buflen;
231        if data_waiting < DIGEST_BLOCK_SIZE || (data_waiting < ctx.buffer.len() && !ctx.first_word_sent) {
232            // There isn't enough data to digest a block, so append it to the buffer.
233            ctx.buffer[ctx.buflen..ctx.buflen + input.len()].copy_from_slice(input);
234            ctx.buflen += input.len();
235            self.store_context(ctx);
236            return;
237        }
238
239        let mut ilen_remaining = input.len();
240        let mut input_start = 0;
241
242        // Handle first block.
243        if !ctx.first_word_sent {
244            let empty_len = ctx.buffer.len() - ctx.buflen;
245            let copy_len = min(empty_len, ilen_remaining);
246            // Fill the buffer.
247            if copy_len > 0 {
248                ctx.buffer[ctx.buflen..ctx.buflen + copy_len].copy_from_slice(&input[0..copy_len]);
249                ctx.buflen += copy_len;
250                ilen_remaining -= copy_len;
251                input_start += copy_len;
252            }
253            self.accumulate_blocking(ctx.buffer.as_slice());
254            data_waiting -= ctx.buflen;
255            ctx.buflen = 0;
256            ctx.first_word_sent = true;
257        }
258
259        if data_waiting < DIGEST_BLOCK_SIZE {
260            // There isn't enough data remaining to process another block, so store it.
261            ctx.buffer[0..ilen_remaining].copy_from_slice(&input[input_start..input_start + ilen_remaining]);
262            ctx.buflen += ilen_remaining;
263        } else {
264            // First ingest the data in the buffer.
265            let empty_len = DIGEST_BLOCK_SIZE - ctx.buflen;
266            if empty_len > 0 {
267                let copy_len = min(empty_len, ilen_remaining);
268                ctx.buffer[ctx.buflen..ctx.buflen + copy_len]
269                    .copy_from_slice(&input[input_start..input_start + copy_len]);
270                ctx.buflen += copy_len;
271                ilen_remaining -= copy_len;
272                input_start += copy_len;
273            }
274            self.accumulate_blocking(&ctx.buffer[0..DIGEST_BLOCK_SIZE]);
275            ctx.buflen = 0;
276
277            // Move any extra data to the now-empty buffer.
278            let leftovers = ilen_remaining % 64;
279            if leftovers > 0 {
280                ctx.buffer[0..leftovers].copy_from_slice(&input[input.len() - leftovers..input.len()]);
281                ctx.buflen += leftovers;
282                ilen_remaining -= leftovers;
283            }
284
285            // Hash the remaining data.
286            self.accumulate_blocking(&input[input_start..input_start + ilen_remaining]);
287        }
288
289        // Save the peripheral context.
290        self.store_context(ctx);
291    }
292
293    /// Computes a digest for the given context.
294    /// The digest buffer must be large enough to accomodate a digest for the selected algorithm.
295    /// The largest returned digest size is 128 bytes for SHA-512.
296    /// Panics if the supplied digest buffer is too short.
297    pub fn finish_blocking<'c>(&mut self, mut ctx: Context<'c>, digest: &mut [u8]) -> usize {
298        // Restore the peripheral state.
299        self.load_context(&ctx);
300
301        // Hash the leftover bytes, if any.
302        self.accumulate_blocking(&ctx.buffer[0..ctx.buflen]);
303        ctx.buflen = 0;
304
305        //Start the digest calculation.
306        T::regs().str().write(|w| w.set_dcal(true));
307
308        // Load the HMAC key if provided.
309        if let Some(key) = ctx.key {
310            while !T::regs().sr().read().dinis() {}
311            self.accumulate_blocking(key);
312            T::regs().str().write(|w| w.set_dcal(true));
313        }
314
315        // Block until digest computation is complete.
316        while !T::regs().sr().read().dcis() {}
317
318        // Return the digest.
319        let digest_words = match ctx.algo {
320            Algorithm::SHA1 => 5,
321            #[cfg(any(hash_v1, hash_v2, hash_v4))]
322            Algorithm::MD5 => 4,
323            Algorithm::SHA224 => 7,
324            Algorithm::SHA256 => 8,
325            #[cfg(hash_v3)]
326            Algorithm::SHA384 => 12,
327            #[cfg(hash_v3)]
328            Algorithm::SHA512_224 => 7,
329            #[cfg(hash_v3)]
330            Algorithm::SHA512_256 => 8,
331            #[cfg(hash_v3)]
332            Algorithm::SHA512 => 16,
333        };
334
335        let digest_len_bytes = digest_words * 4;
336        // Panics if the supplied digest buffer is too short.
337        if digest.len() < digest_len_bytes {
338            panic!("Digest buffer must be at least {} bytes long.", digest_words * 4);
339        }
340
341        let mut i = 0;
342        while i < digest_words {
343            let word = T::regs().hr(i).read();
344            digest[(i * 4)..((i * 4) + 4)].copy_from_slice(word.to_be_bytes().as_slice());
345            i += 1;
346        }
347        digest_len_bytes
348    }
349
350    /// Push data into the hash core.
351    fn accumulate_blocking(&mut self, input: &[u8]) {
352        // Set the number of valid bits.
353        let num_valid_bits: u8 = (8 * (input.len() % 4)) as u8;
354        T::regs().str().modify(|w| w.set_nblw(num_valid_bits));
355
356        let mut chunks = input.chunks_exact(4);
357        for chunk in &mut chunks {
358            T::regs()
359                .din()
360                .write_value(u32::from_ne_bytes(chunk.try_into().unwrap()));
361        }
362        let rem = chunks.remainder();
363        if !rem.is_empty() {
364            let mut word: [u8; 4] = [0; 4];
365            word[0..rem.len()].copy_from_slice(rem);
366            T::regs().din().write_value(u32::from_ne_bytes(word));
367        }
368    }
369
370    /// Save the peripheral state to a context.
371    fn store_context<'c>(&mut self, ctx: &mut Context<'c>) {
372        // Block waiting for data in ready.
373        while !T::regs().sr().read().dinis() {}
374
375        // Store peripheral context.
376        ctx.imr = T::regs().imr().read().0;
377        ctx.str = T::regs().str().read().0;
378        ctx.cr = T::regs().cr().read().0;
379        let mut i = 0;
380        while i < NUM_CONTEXT_REGS {
381            ctx.csr[i] = T::regs().csr(i).read();
382            i += 1;
383        }
384    }
385
386    /// Restore the peripheral state from a context.
387    fn load_context(&mut self, ctx: &Context) {
388        // Restore the peripheral state from the context.
389        T::regs().imr().write_value(Imr { 0: ctx.imr });
390        T::regs().str().write_value(Str { 0: ctx.str });
391        T::regs().cr().write_value(Cr { 0: ctx.cr });
392        T::regs().cr().modify(|w| w.set_init(true));
393        let mut i = 0;
394        while i < NUM_CONTEXT_REGS {
395            T::regs().csr(i).write_value(ctx.csr[i]);
396            i += 1;
397        }
398    }
399}
400
401#[cfg(hash_v2)]
402impl<'d, T: Instance> Hash<'d, T, Async> {
403    /// Instantiates, resets, and enables the HASH peripheral.
404    pub fn new(
405        peripheral: Peri<'d, T>,
406        dma: Peri<'d, impl Dma<T>>,
407        _irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd,
408    ) -> Self {
409        rcc::enable_and_reset::<HASH>();
410        let instance = Self {
411            _peripheral: peripheral,
412            _phantom: PhantomData,
413            dma: new_dma!(dma),
414        };
415
416        T::Interrupt::unpend();
417        unsafe { T::Interrupt::enable() };
418
419        instance
420    }
421
422    /// Restores the peripheral state using the given context,
423    /// then updates the state with the provided data.
424    /// Peripheral state is saved upon return.
425    pub async fn update(&mut self, ctx: &mut Context<'_>, input: &[u8]) {
426        // Restore the peripheral state.
427        self.load_context(&ctx);
428
429        // Load the HMAC key if provided.
430        if !ctx.key_sent {
431            if let Some(key) = ctx.key {
432                self.accumulate(key).await;
433            }
434            ctx.key_sent = true;
435        }
436
437        let data_waiting = input.len() + ctx.buflen;
438        if data_waiting < DIGEST_BLOCK_SIZE {
439            // There isn't enough data to digest a block, so append it to the buffer.
440            ctx.buffer[ctx.buflen..ctx.buflen + input.len()].copy_from_slice(input);
441            ctx.buflen += input.len();
442            self.store_context(ctx);
443            return;
444        }
445
446        // Enable multiple DMA transfers.
447        T::regs().cr().modify(|w| w.set_mdmat(true));
448
449        let mut ilen_remaining = input.len();
450        let mut input_start = 0;
451
452        // First ingest the data in the buffer.
453        let empty_len = DIGEST_BLOCK_SIZE - ctx.buflen;
454        if empty_len > 0 {
455            let copy_len = min(empty_len, ilen_remaining);
456            ctx.buffer[ctx.buflen..ctx.buflen + copy_len].copy_from_slice(&input[input_start..input_start + copy_len]);
457            ctx.buflen += copy_len;
458            ilen_remaining -= copy_len;
459            input_start += copy_len;
460        }
461        self.accumulate(&ctx.buffer[..DIGEST_BLOCK_SIZE]).await;
462        ctx.buflen = 0;
463
464        // Move any extra data to the now-empty buffer.
465        let leftovers = ilen_remaining % DIGEST_BLOCK_SIZE;
466        if leftovers > 0 {
467            assert!(ilen_remaining >= leftovers);
468            ctx.buffer[0..leftovers].copy_from_slice(&input[input.len() - leftovers..input.len()]);
469            ctx.buflen += leftovers;
470            ilen_remaining -= leftovers;
471        } else {
472            ctx.buffer
473                .copy_from_slice(&input[input.len() - DIGEST_BLOCK_SIZE..input.len()]);
474            ctx.buflen += DIGEST_BLOCK_SIZE;
475            ilen_remaining -= DIGEST_BLOCK_SIZE;
476        }
477
478        // Hash the remaining data.
479        self.accumulate(&input[input_start..input_start + ilen_remaining]).await;
480
481        // Save the peripheral context.
482        self.store_context(ctx);
483    }
484
485    /// Computes a digest for the given context.
486    /// The digest buffer must be large enough to accomodate a digest for the selected algorithm.
487    /// The largest returned digest size is 128 bytes for SHA-512.
488    /// Panics if the supplied digest buffer is too short.
489    pub async fn finish<'c>(&mut self, mut ctx: Context<'c>, digest: &mut [u8]) -> usize {
490        // Restore the peripheral state.
491        self.load_context(&ctx);
492
493        // Must be cleared prior to the last DMA transfer.
494        T::regs().cr().modify(|w| w.set_mdmat(false));
495
496        // Hash the leftover bytes, if any.
497        self.accumulate(&ctx.buffer[0..ctx.buflen]).await;
498        ctx.buflen = 0;
499
500        // Load the HMAC key if provided.
501        if let Some(key) = ctx.key {
502            self.accumulate(key).await;
503        }
504
505        // Wait for completion.
506        poll_fn(|cx| {
507            // Check if already done.
508            let bits = T::regs().sr().read();
509            if bits.dcis() {
510                return Poll::Ready(());
511            }
512            // Register waker, then enable interrupts.
513            HASH_WAKER.register(cx.waker());
514            T::regs().imr().modify(|reg| reg.set_dcie(true));
515            // Check for completion.
516            let bits = T::regs().sr().read();
517            if bits.dcis() {
518                Poll::Ready(())
519            } else {
520                Poll::Pending
521            }
522        })
523        .await;
524
525        // Return the digest.
526        let digest_words = match ctx.algo {
527            Algorithm::SHA1 => 5,
528            #[cfg(any(hash_v1, hash_v2, hash_v4))]
529            Algorithm::MD5 => 4,
530            Algorithm::SHA224 => 7,
531            Algorithm::SHA256 => 8,
532            #[cfg(hash_v3)]
533            Algorithm::SHA384 => 12,
534            #[cfg(hash_v3)]
535            Algorithm::SHA512_224 => 7,
536            #[cfg(hash_v3)]
537            Algorithm::SHA512_256 => 8,
538            #[cfg(hash_v3)]
539            Algorithm::SHA512 => 16,
540        };
541
542        let digest_len_bytes = digest_words * 4;
543        // Panics if the supplied digest buffer is too short.
544        if digest.len() < digest_len_bytes {
545            panic!("Digest buffer must be at least {} bytes long.", digest_words * 4);
546        }
547
548        let mut i = 0;
549        while i < digest_words {
550            let word = T::regs().hr(i).read();
551            digest[(i * 4)..((i * 4) + 4)].copy_from_slice(word.to_be_bytes().as_slice());
552            i += 1;
553        }
554        digest_len_bytes
555    }
556
557    /// Push data into the hash core.
558    async fn accumulate(&mut self, input: &[u8]) {
559        // Ignore an input length of 0.
560        if input.len() == 0 {
561            return;
562        }
563
564        // Set the number of valid bits.
565        let num_valid_bits: u8 = (8 * (input.len() % 4)) as u8;
566        T::regs().str().modify(|w| w.set_nblw(num_valid_bits));
567
568        // Configure DMA to transfer input to hash core.
569        let dst_ptr: *mut u32 = T::regs().din().as_ptr();
570        let mut num_words = input.len() / 4;
571        if input.len() % 4 > 0 {
572            num_words += 1;
573        }
574        let src_ptr: *const [u8] = ptr::slice_from_raw_parts(input.as_ptr().cast(), num_words);
575
576        let dma = self.dma.as_mut().unwrap();
577        let dma_transfer = unsafe { dma.write_raw(src_ptr, dst_ptr as *mut u32, Default::default()) };
578        T::regs().cr().modify(|w| w.set_dmae(true));
579
580        // Wait for the transfer to complete.
581        dma_transfer.await;
582    }
583}
584
585trait SealedInstance {
586    fn regs() -> pac::hash::Hash;
587}
588
589/// HASH instance trait.
590#[allow(private_bounds)]
591pub trait Instance: SealedInstance + PeripheralType + crate::rcc::RccPeripheral + 'static + Send {
592    /// Interrupt for this HASH instance.
593    type Interrupt: interrupt::typelevel::Interrupt;
594}
595
596foreach_interrupt!(
597    ($inst:ident, hash, HASH, GLOBAL, $irq:ident) => {
598        impl Instance for peripherals::$inst {
599            type Interrupt = crate::interrupt::typelevel::$irq;
600        }
601
602        impl SealedInstance for peripherals::$inst {
603            fn regs() -> crate::pac::hash::Hash {
604                crate::pac::$inst
605            }
606        }
607    };
608);
609
610dma_trait!(Dma, Instance);