chksum_hash_sha2_224/
lib.rs

1//! This crate provides an implementation of the SHA-2 224 hash function based on [FIPS PUB 180-4: Secure Hash Standard](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf).
2//!
3//! # Setup
4//!
5//! To use this crate, add the following entry to your `Cargo.toml` file in the `dependencies` section:
6//!
7//! ```toml
8//! [dependencies]
9//! chksum-hash-sha2-224 = "0.0.1"
10//! ```
11//!
12//! Alternatively, you can use the [`cargo add`](https://doc.rust-lang.org/cargo/commands/cargo-add.html) subcommand:
13//!
14//! ```sh
15//! cargo add chksum-hash-sha2-224
16//! ```     
17//!
18//! # Batch Processing
19//!
20//! The digest of known-size data can be calculated with the [`hash`] function.
21//!
22//! ```rust
23//! use chksum_hash_sha2_224 as sha2_224;
24//!
25//! let digest = sha2_224::hash("example data");
26//! assert_eq!(
27//!     digest.to_hex_lowercase(),
28//!     "90382cbfda2656313ad61fd74b32ddfa4bcc118f660bd4fba9228ced"
29//! );
30//! ```
31//!
32//! # Stream Processing
33//!
34//! The digest of data streams can be calculated chunk-by-chunk with a consumer created by calling the [`default`] function.
35//!
36//! ```rust
37//! // Import all necessary items
38//! # use std::io;
39//! # use std::path::PathBuf;
40//! use std::fs::File;
41//! use std::io::Read;
42//!
43//! use chksum_hash_sha2_224 as sha2_224;
44//!
45//! # fn wrapper(path: PathBuf) -> io::Result<()> {
46//! // Create a hash instance
47//! let mut hash = sha2_224::default();
48//!
49//! // Open a file and create a buffer for incoming data
50//! let mut file = File::open(path)?;
51//! let mut buffer = vec![0; 64];
52//!
53//! // Iterate chunk by chunk
54//! while let Ok(count) = file.read(&mut buffer) {
55//!     // EOF reached, exit loop
56//!     if count == 0 {
57//!         break;
58//!     }
59//!
60//!     // Update the hash with data
61//!     hash.update(&buffer[..count]);
62//! }
63//!
64//! // Calculate the digest
65//! let digest = hash.digest();
66//! // Cast the digest to hex and compare
67//! assert_eq!(
68//!     digest.to_hex_lowercase(),
69//!     "90382cbfda2656313ad61fd74b32ddfa4bcc118f660bd4fba9228ced"
70//! );
71//! # Ok(())
72//! # }
73//! ```
74//!
75//! # Internal Buffering
76//!
77//! An internal buffer is utilized due to the unknown size of data chunks.
78//!
79//! The size of this buffer is at least as large as one hash block of data processed at a time.
80//!
81//! To mitigate buffering-related performance issues, ensure the length of processed chunks is a multiple of the block size.
82//!
83//! # Input Type
84//!
85//! Anything that implements `AsRef<[u8]>` can be passed as input.
86//!
87//! ```rust
88//! use chksum_hash_sha2_224 as sha2_224;
89//!
90//! let digest = sha2_224::default()
91//!     .update("str")
92//!     .update(b"bytes")
93//!     .update([0x75, 0x38])
94//!     .digest();
95//! assert_eq!(
96//!     digest.to_hex_lowercase(),
97//!     "af6ee2ebec203dbcc06e946e693bdd154dfde44aaccc978508d3ac50"
98//! );
99//! ```
100//!
101//! Since [`Digest`] implements `AsRef<[u8]>`, digests can be chained to calculate hash of a hash digest.
102//!
103//! ```rust
104//! use chksum_hash_sha2_224 as sha2_224;
105//!
106//! let digest = sha2_224::hash(b"example data");
107//! let digest = sha2_224::hash(digest);
108//! assert_eq!(
109//!     digest.to_hex_lowercase(),
110//!     "46c601153de95e6eff06f5a9da3d81fac2c51d23930f8117ec3e36a2"
111//! );
112//! ```
113//!
114//! # License
115//!
116//! This crate is licensed under the MIT License.
117
118#![cfg_attr(docsrs, feature(doc_auto_cfg))]
119#![forbid(unsafe_code)]
120
121pub mod block;
122pub mod digest;
123pub mod state;
124
125use chksum_hash_core as core;
126
127use crate::block::Block;
128#[doc(inline)]
129pub use crate::block::LENGTH_BYTES as BLOCK_LENGTH_BYTES;
130#[doc(inline)]
131pub use crate::digest::{Digest, LENGTH_BYTES as DIGEST_LENGTH_BYTES};
132#[doc(inline)]
133pub use crate::state::State;
134
135/// Creates a new hash.
136///
137/// # Example
138///
139/// ```rust
140/// use chksum_hash_sha2_224 as sha2_224;
141///
142/// let digest = sha2_224::new().digest();
143/// assert_eq!(
144///     digest.to_hex_lowercase(),
145///     "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f"
146/// );
147///
148/// let digest = sha2_224::new().update("data").digest();
149/// assert_eq!(
150///     digest.to_hex_lowercase(),
151///     "f4739673acc03c424343b452787ee23dd62999a8a9f14f4250995769"
152/// );
153/// ```
154#[must_use]
155pub fn new() -> Update {
156    Update::new()
157}
158
159/// Creates a default hash.
160///
161/// # Example
162///
163/// ```rust
164/// use chksum_hash_sha2_224 as sha2_224;
165///
166/// let digest = sha2_224::default().digest();
167/// assert_eq!(
168///     digest.to_hex_lowercase(),
169///     "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f"
170/// );
171///
172/// let digest = sha2_224::default().update("data").digest();
173/// assert_eq!(
174///     digest.to_hex_lowercase(),
175///     "f4739673acc03c424343b452787ee23dd62999a8a9f14f4250995769"
176/// );
177/// ```
178#[must_use]
179pub fn default() -> Update {
180    core::default()
181}
182
183/// Computes the hash of the given input.
184///
185/// # Example
186///
187/// ```rust
188/// use chksum_hash_sha2_224 as sha2_224;
189///
190/// let digest = sha2_224::hash("data");
191/// assert_eq!(
192///     digest.to_hex_lowercase(),
193///     "f4739673acc03c424343b452787ee23dd62999a8a9f14f4250995769"
194/// );
195/// ```
196pub fn hash(data: impl AsRef<[u8]>) -> Digest {
197    core::hash::<Update>(data)
198}
199
200/// A hash state containing an internal buffer that can handle an unknown amount of input data.
201///
202/// # Example
203///
204/// ```rust
205/// use chksum_hash_sha2_224 as sha2_224;
206///
207/// // Create a new hash instance
208/// let mut hash = sha2_224::Update::new();
209///
210/// // Fill with data
211/// hash.update("data");
212///
213/// // Finalize and create a digest
214/// let digest = hash.finalize().digest();
215/// assert_eq!(
216///     digest.to_hex_lowercase(),
217///     "f4739673acc03c424343b452787ee23dd62999a8a9f14f4250995769"
218/// );
219///
220/// // Reset to default values
221/// hash.reset();
222///
223/// // Produce a hash digest using internal finalization
224/// let digest = hash.digest();
225/// assert_eq!(
226///     digest.to_hex_lowercase(),
227///     "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f"
228/// );
229/// ```
230#[derive(Clone, Debug, Eq, PartialEq)]
231#[cfg_attr(feature = "fuzzing", derive(arbitrary::Arbitrary))]
232pub struct Update {
233    state: State,
234    unprocessed: Vec<u8>,
235    processed: usize,
236}
237
238impl Update {
239    /// Creates a new hash.
240    #[must_use]
241    pub fn new() -> Self {
242        let state = state::new();
243        let unprocessed = Vec::with_capacity(BLOCK_LENGTH_BYTES);
244        let processed = 0;
245        Self {
246            state,
247            unprocessed,
248            processed,
249        }
250    }
251
252    /// Updates the internal state with an input data.
253    ///
254    /// # Performance issues
255    ///
256    /// To achieve maximum performance, the length of incoming data parts should be a multiple of the block length.
257    ///
258    /// In any other case, an internal buffer is used, which can cause a speed decrease in performance.
259    pub fn update<T>(&mut self, data: T) -> &mut Self
260    where
261        T: AsRef<[u8]>,
262    {
263        let data = data.as_ref();
264
265        // The `chunks_exact` method doesn't drain original vector so it needs to be handled manually
266        for _ in 0..(self.unprocessed.len() / BLOCK_LENGTH_BYTES) {
267            let block = {
268                let chunk = self.unprocessed.drain(..BLOCK_LENGTH_BYTES);
269                let chunk = chunk.as_slice();
270                Block::try_from(chunk)
271                    .expect("chunk length must be exact size as block")
272                    .into()
273            };
274            self.state = self.state.update(block);
275            self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
276        }
277
278        if self.unprocessed.is_empty() {
279            // Internal buffer is empty, incoming data can be processed without buffering.
280            let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
281            for chunk in chunks.by_ref() {
282                let block = Block::try_from(chunk)
283                    .expect("chunk length must be exact size as block")
284                    .into();
285                self.state = self.state.update(block);
286                self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
287            }
288            let remainder = chunks.remainder();
289            if !remainder.is_empty() {
290                self.unprocessed.extend(remainder);
291            }
292        } else if (self.unprocessed.len() + data.len()) < BLOCK_LENGTH_BYTES {
293            // Not enough data even for one block.
294            self.unprocessed.extend(data);
295        } else {
296            // Create the first block from the buffer, create the second (and every other) block from incoming data.
297            let unprocessed = self.unprocessed.len() % BLOCK_LENGTH_BYTES;
298            let missing = BLOCK_LENGTH_BYTES - unprocessed;
299            let (fillment, data) = data.split_at(missing);
300            let block = {
301                let mut block = [0u8; BLOCK_LENGTH_BYTES];
302                let (first_part, second_part) = block.split_at_mut(self.unprocessed.len());
303                first_part.copy_from_slice(self.unprocessed.drain(..self.unprocessed.len()).as_slice());
304                second_part[..missing].copy_from_slice(fillment);
305                block
306            };
307            let mut chunks = block.chunks_exact(BLOCK_LENGTH_BYTES);
308            for chunk in chunks.by_ref() {
309                let block = Block::try_from(chunk)
310                    .expect("chunk length must be exact size as block")
311                    .into();
312                self.state = self.state.update(block);
313                self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
314            }
315            let remainder = chunks.remainder();
316            assert!(remainder.is_empty(), "chunks remainder must be empty");
317
318            let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
319            for chunk in chunks.by_ref() {
320                let block = Block::try_from(chunk)
321                    .expect("chunk length must be exact size as block")
322                    .into();
323                self.state = self.state.update(block);
324                self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
325            }
326            let remainder = chunks.remainder();
327            self.unprocessed.extend(remainder);
328        }
329
330        self
331    }
332
333    /// Applies padding and produces the finalized state.
334    #[must_use]
335    pub fn finalize(&self) -> Finalize {
336        let mut state = self.state;
337        let mut processed = self.processed;
338        let unprocessed = {
339            let mut chunks = self.unprocessed.chunks_exact(BLOCK_LENGTH_BYTES);
340            for chunk in chunks.by_ref() {
341                let block = Block::try_from(chunk)
342                    .expect("chunk length must be exact size as block")
343                    .into();
344                state = state.update(block);
345                processed = processed.wrapping_add(BLOCK_LENGTH_BYTES);
346            }
347            chunks.remainder()
348        };
349
350        let length = {
351            let length = unprocessed.len().wrapping_add(processed) as u64;
352            let length = length.wrapping_mul(8); // convert byte-length into bits-length
353            length.to_be_bytes()
354        };
355
356        if (unprocessed.len() + 1 + length.len()) <= BLOCK_LENGTH_BYTES {
357            let padding = {
358                let mut padding = [0u8; BLOCK_LENGTH_BYTES];
359                padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
360                padding[unprocessed.len()] = 0x80;
361                padding[(BLOCK_LENGTH_BYTES - length.len())..].copy_from_slice(&length);
362                padding
363            };
364
365            let block = {
366                let block = &padding[..];
367                Block::try_from(block)
368                    .expect("padding length must exact size as block")
369                    .into()
370            };
371            state = state.update(block);
372        } else {
373            let padding = {
374                let mut padding = [0u8; BLOCK_LENGTH_BYTES * 2];
375                padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
376                padding[unprocessed.len()] = 0x80;
377                padding[(BLOCK_LENGTH_BYTES * 2 - length.len())..].copy_from_slice(&length);
378                padding
379            };
380
381            let block = {
382                let block = &padding[..BLOCK_LENGTH_BYTES];
383                Block::try_from(block)
384                    .expect("padding length must exact size as block")
385                    .into()
386            };
387            state = state.update(block);
388
389            let block = {
390                let block = &padding[BLOCK_LENGTH_BYTES..];
391                Block::try_from(block)
392                    .expect("padding length must exact size as block")
393                    .into()
394            };
395            state = state.update(block);
396        }
397
398        Finalize { state }
399    }
400
401    /// Resets the internal state to default values.
402    pub fn reset(&mut self) -> &mut Self {
403        self.state = self.state.reset();
404        self.unprocessed.clear();
405        self.processed = 0;
406        self
407    }
408
409    /// Produces the hash digest using internal finalization.
410    #[must_use]
411    pub fn digest(&self) -> Digest {
412        self.finalize().digest()
413    }
414}
415
416impl core::Update for Update {
417    type Digest = Digest;
418    type Finalize = Finalize;
419
420    fn update(&mut self, data: impl AsRef<[u8]>) {
421        self.update(data);
422    }
423
424    fn finalize(&self) -> Self::Finalize {
425        self.finalize()
426    }
427
428    fn reset(&mut self) {
429        self.reset();
430    }
431}
432
433impl Default for Update {
434    fn default() -> Self {
435        Self::new()
436    }
437}
438
439/// A finalized hash state.
440#[derive(Clone, Copy, Debug, Eq, PartialEq)]
441pub struct Finalize {
442    state: State,
443}
444
445impl Finalize {
446    /// Creates and returns the hash digest.
447    #[must_use]
448    #[rustfmt::skip]
449    pub fn digest(&self) -> Digest {
450        let State { a, b, c, d, e, f, g, .. } = self.state;
451        let [a, b, c, d, e, f, g] = [
452            a.to_be_bytes(),
453            b.to_be_bytes(),
454            c.to_be_bytes(),
455            d.to_be_bytes(),
456            e.to_be_bytes(),
457            f.to_be_bytes(),
458            g.to_be_bytes(),
459        ];
460        Digest::new([
461            a[0], a[1], a[2], a[3],
462            b[0], b[1], b[2], b[3],
463            c[0], c[1], c[2], c[3],
464            d[0], d[1], d[2], d[3],
465            e[0], e[1], e[2], e[3],
466            f[0], f[1], f[2], f[3],
467            g[0], g[1], g[2], g[3],
468        ])
469    }
470
471    /// Resets the hash state to the in-progress state.
472    #[must_use]
473    pub fn reset(&self) -> Update {
474        Update::new()
475    }
476}
477
478impl core::Finalize for Finalize {
479    type Digest = Digest;
480    type Update = Update;
481
482    fn digest(&self) -> Self::Digest {
483        self.digest()
484    }
485
486    fn reset(&self) -> Self::Update {
487        self.reset()
488    }
489}
490
491#[cfg(test)]
492mod tests {
493    use super::*;
494
495    #[test]
496    fn empty() {
497        let digest = default().digest().to_hex_lowercase();
498        assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
499
500        let digest = new().digest().to_hex_lowercase();
501        assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
502    }
503
504    #[test]
505    fn reset() {
506        let digest = new().update("data").reset().digest().to_hex_lowercase();
507        assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
508
509        let digest = new().update("data").finalize().reset().digest().to_hex_lowercase();
510        assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
511    }
512
513    #[test]
514    fn hello_world() {
515        let digest = new().update("Hello World").digest().to_hex_lowercase();
516        assert_eq!(digest, "c4890faffdb0105d991a461e668e276685401b02eab1ef4372795047");
517
518        let digest = new()
519            .update("Hello")
520            .update(" ")
521            .update("World")
522            .digest()
523            .to_hex_lowercase();
524        assert_eq!(digest, "c4890faffdb0105d991a461e668e276685401b02eab1ef4372795047");
525    }
526
527    #[test]
528    fn rust_book() {
529        let phrase = "Welcome to The Rust Programming Language, an introductory book about Rust. The Rust programming \
530                      language helps you write faster, more reliable software. High-level ergonomics and low-level \
531                      control are often at odds in programming language design; Rust challenges that conflict. \
532                      Through balancing powerful technical capacity and a great developer experience, Rust gives you \
533                      the option to control low-level details (such as memory usage) without all the hassle \
534                      traditionally associated with such control.";
535
536        let digest = hash(phrase).to_hex_lowercase();
537        assert_eq!(digest, "ed123a70f9bf57341c91260608e68ce2b483da4f5000a7db32d4e1cb");
538    }
539
540    #[test]
541    fn zeroes() {
542        let data = vec![0u8; 64];
543
544        let digest = new().update(&data[..60]).digest().to_hex_lowercase();
545        assert_eq!(digest, "3fe5b353056d4b16fce534d8de0651b38283d7ffc5b974d8b16346fe");
546
547        let digest = new()
548            .update(&data[..60])
549            .update(&data[60..])
550            .digest()
551            .to_hex_lowercase();
552        assert_eq!(digest, "750d81a39c18d3ce27ff3e5ece30b0088f12d8fd0450fe435326294b");
553    }
554}