chksum_hash_sha2_224/
lib.rs1#![cfg_attr(docsrs, feature(doc_auto_cfg))]
119#![forbid(unsafe_code)]
120
121pub mod block;
122pub mod digest;
123pub mod state;
124
125use chksum_hash_core as core;
126
127use crate::block::Block;
128#[doc(inline)]
129pub use crate::block::LENGTH_BYTES as BLOCK_LENGTH_BYTES;
130#[doc(inline)]
131pub use crate::digest::{Digest, LENGTH_BYTES as DIGEST_LENGTH_BYTES};
132#[doc(inline)]
133pub use crate::state::State;
134
135#[must_use]
155pub fn new() -> Update {
156 Update::new()
157}
158
159#[must_use]
179pub fn default() -> Update {
180 core::default()
181}
182
183pub fn hash(data: impl AsRef<[u8]>) -> Digest {
197 core::hash::<Update>(data)
198}
199
200#[derive(Clone, Debug, Eq, PartialEq)]
231#[cfg_attr(feature = "fuzzing", derive(arbitrary::Arbitrary))]
232pub struct Update {
233 state: State,
234 unprocessed: Vec<u8>,
235 processed: usize,
236}
237
238impl Update {
239 #[must_use]
241 pub fn new() -> Self {
242 let state = state::new();
243 let unprocessed = Vec::with_capacity(BLOCK_LENGTH_BYTES);
244 let processed = 0;
245 Self {
246 state,
247 unprocessed,
248 processed,
249 }
250 }
251
252 pub fn update<T>(&mut self, data: T) -> &mut Self
260 where
261 T: AsRef<[u8]>,
262 {
263 let data = data.as_ref();
264
265 for _ in 0..(self.unprocessed.len() / BLOCK_LENGTH_BYTES) {
267 let block = {
268 let chunk = self.unprocessed.drain(..BLOCK_LENGTH_BYTES);
269 let chunk = chunk.as_slice();
270 Block::try_from(chunk)
271 .expect("chunk length must be exact size as block")
272 .into()
273 };
274 self.state = self.state.update(block);
275 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
276 }
277
278 if self.unprocessed.is_empty() {
279 let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
281 for chunk in chunks.by_ref() {
282 let block = Block::try_from(chunk)
283 .expect("chunk length must be exact size as block")
284 .into();
285 self.state = self.state.update(block);
286 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
287 }
288 let remainder = chunks.remainder();
289 if !remainder.is_empty() {
290 self.unprocessed.extend(remainder);
291 }
292 } else if (self.unprocessed.len() + data.len()) < BLOCK_LENGTH_BYTES {
293 self.unprocessed.extend(data);
295 } else {
296 let unprocessed = self.unprocessed.len() % BLOCK_LENGTH_BYTES;
298 let missing = BLOCK_LENGTH_BYTES - unprocessed;
299 let (fillment, data) = data.split_at(missing);
300 let block = {
301 let mut block = [0u8; BLOCK_LENGTH_BYTES];
302 let (first_part, second_part) = block.split_at_mut(self.unprocessed.len());
303 first_part.copy_from_slice(self.unprocessed.drain(..self.unprocessed.len()).as_slice());
304 second_part[..missing].copy_from_slice(fillment);
305 block
306 };
307 let mut chunks = block.chunks_exact(BLOCK_LENGTH_BYTES);
308 for chunk in chunks.by_ref() {
309 let block = Block::try_from(chunk)
310 .expect("chunk length must be exact size as block")
311 .into();
312 self.state = self.state.update(block);
313 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
314 }
315 let remainder = chunks.remainder();
316 assert!(remainder.is_empty(), "chunks remainder must be empty");
317
318 let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
319 for chunk in chunks.by_ref() {
320 let block = Block::try_from(chunk)
321 .expect("chunk length must be exact size as block")
322 .into();
323 self.state = self.state.update(block);
324 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
325 }
326 let remainder = chunks.remainder();
327 self.unprocessed.extend(remainder);
328 }
329
330 self
331 }
332
333 #[must_use]
335 pub fn finalize(&self) -> Finalize {
336 let mut state = self.state;
337 let mut processed = self.processed;
338 let unprocessed = {
339 let mut chunks = self.unprocessed.chunks_exact(BLOCK_LENGTH_BYTES);
340 for chunk in chunks.by_ref() {
341 let block = Block::try_from(chunk)
342 .expect("chunk length must be exact size as block")
343 .into();
344 state = state.update(block);
345 processed = processed.wrapping_add(BLOCK_LENGTH_BYTES);
346 }
347 chunks.remainder()
348 };
349
350 let length = {
351 let length = unprocessed.len().wrapping_add(processed) as u64;
352 let length = length.wrapping_mul(8); length.to_be_bytes()
354 };
355
356 if (unprocessed.len() + 1 + length.len()) <= BLOCK_LENGTH_BYTES {
357 let padding = {
358 let mut padding = [0u8; BLOCK_LENGTH_BYTES];
359 padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
360 padding[unprocessed.len()] = 0x80;
361 padding[(BLOCK_LENGTH_BYTES - length.len())..].copy_from_slice(&length);
362 padding
363 };
364
365 let block = {
366 let block = &padding[..];
367 Block::try_from(block)
368 .expect("padding length must exact size as block")
369 .into()
370 };
371 state = state.update(block);
372 } else {
373 let padding = {
374 let mut padding = [0u8; BLOCK_LENGTH_BYTES * 2];
375 padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
376 padding[unprocessed.len()] = 0x80;
377 padding[(BLOCK_LENGTH_BYTES * 2 - length.len())..].copy_from_slice(&length);
378 padding
379 };
380
381 let block = {
382 let block = &padding[..BLOCK_LENGTH_BYTES];
383 Block::try_from(block)
384 .expect("padding length must exact size as block")
385 .into()
386 };
387 state = state.update(block);
388
389 let block = {
390 let block = &padding[BLOCK_LENGTH_BYTES..];
391 Block::try_from(block)
392 .expect("padding length must exact size as block")
393 .into()
394 };
395 state = state.update(block);
396 }
397
398 Finalize { state }
399 }
400
401 pub fn reset(&mut self) -> &mut Self {
403 self.state = self.state.reset();
404 self.unprocessed.clear();
405 self.processed = 0;
406 self
407 }
408
409 #[must_use]
411 pub fn digest(&self) -> Digest {
412 self.finalize().digest()
413 }
414}
415
416impl core::Update for Update {
417 type Digest = Digest;
418 type Finalize = Finalize;
419
420 fn update(&mut self, data: impl AsRef<[u8]>) {
421 self.update(data);
422 }
423
424 fn finalize(&self) -> Self::Finalize {
425 self.finalize()
426 }
427
428 fn reset(&mut self) {
429 self.reset();
430 }
431}
432
433impl Default for Update {
434 fn default() -> Self {
435 Self::new()
436 }
437}
438
439#[derive(Clone, Copy, Debug, Eq, PartialEq)]
441pub struct Finalize {
442 state: State,
443}
444
445impl Finalize {
446 #[must_use]
448 #[rustfmt::skip]
449 pub fn digest(&self) -> Digest {
450 let State { a, b, c, d, e, f, g, .. } = self.state;
451 let [a, b, c, d, e, f, g] = [
452 a.to_be_bytes(),
453 b.to_be_bytes(),
454 c.to_be_bytes(),
455 d.to_be_bytes(),
456 e.to_be_bytes(),
457 f.to_be_bytes(),
458 g.to_be_bytes(),
459 ];
460 Digest::new([
461 a[0], a[1], a[2], a[3],
462 b[0], b[1], b[2], b[3],
463 c[0], c[1], c[2], c[3],
464 d[0], d[1], d[2], d[3],
465 e[0], e[1], e[2], e[3],
466 f[0], f[1], f[2], f[3],
467 g[0], g[1], g[2], g[3],
468 ])
469 }
470
471 #[must_use]
473 pub fn reset(&self) -> Update {
474 Update::new()
475 }
476}
477
478impl core::Finalize for Finalize {
479 type Digest = Digest;
480 type Update = Update;
481
482 fn digest(&self) -> Self::Digest {
483 self.digest()
484 }
485
486 fn reset(&self) -> Self::Update {
487 self.reset()
488 }
489}
490
491#[cfg(test)]
492mod tests {
493 use super::*;
494
495 #[test]
496 fn empty() {
497 let digest = default().digest().to_hex_lowercase();
498 assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
499
500 let digest = new().digest().to_hex_lowercase();
501 assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
502 }
503
504 #[test]
505 fn reset() {
506 let digest = new().update("data").reset().digest().to_hex_lowercase();
507 assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
508
509 let digest = new().update("data").finalize().reset().digest().to_hex_lowercase();
510 assert_eq!(digest, "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f");
511 }
512
513 #[test]
514 fn hello_world() {
515 let digest = new().update("Hello World").digest().to_hex_lowercase();
516 assert_eq!(digest, "c4890faffdb0105d991a461e668e276685401b02eab1ef4372795047");
517
518 let digest = new()
519 .update("Hello")
520 .update(" ")
521 .update("World")
522 .digest()
523 .to_hex_lowercase();
524 assert_eq!(digest, "c4890faffdb0105d991a461e668e276685401b02eab1ef4372795047");
525 }
526
527 #[test]
528 fn rust_book() {
529 let phrase = "Welcome to The Rust Programming Language, an introductory book about Rust. The Rust programming \
530 language helps you write faster, more reliable software. High-level ergonomics and low-level \
531 control are often at odds in programming language design; Rust challenges that conflict. \
532 Through balancing powerful technical capacity and a great developer experience, Rust gives you \
533 the option to control low-level details (such as memory usage) without all the hassle \
534 traditionally associated with such control.";
535
536 let digest = hash(phrase).to_hex_lowercase();
537 assert_eq!(digest, "ed123a70f9bf57341c91260608e68ce2b483da4f5000a7db32d4e1cb");
538 }
539
540 #[test]
541 fn zeroes() {
542 let data = vec![0u8; 64];
543
544 let digest = new().update(&data[..60]).digest().to_hex_lowercase();
545 assert_eq!(digest, "3fe5b353056d4b16fce534d8de0651b38283d7ffc5b974d8b16346fe");
546
547 let digest = new()
548 .update(&data[..60])
549 .update(&data[60..])
550 .digest()
551 .to_hex_lowercase();
552 assert_eq!(digest, "750d81a39c18d3ce27ff3e5ece30b0088f12d8fd0450fe435326294b");
553 }
554}