1#![cfg_attr(docsrs, feature(doc_auto_cfg))]
119#![forbid(unsafe_code)]
120
121pub mod block;
122pub mod digest;
123pub mod state;
124
125use chksum_hash_core as core;
126
127use crate::block::Block;
128#[doc(inline)]
129pub use crate::block::LENGTH_BYTES as BLOCK_LENGTH_BYTES;
130#[doc(inline)]
131pub use crate::digest::{Digest, LENGTH_BYTES as DIGEST_LENGTH_BYTES};
132#[doc(inline)]
133pub use crate::state::State;
134
135#[must_use]
155pub fn new() -> Update {
156 Update::new()
157}
158
159#[must_use]
179pub fn default() -> Update {
180 core::default()
181}
182
183pub fn hash(data: impl AsRef<[u8]>) -> Digest {
197 core::hash::<Update>(data)
198}
199
200#[derive(Clone, Debug, Eq, PartialEq)]
231#[cfg_attr(feature = "fuzzing", derive(arbitrary::Arbitrary))]
232pub struct Update {
233 state: State,
234 unprocessed: Vec<u8>,
235 processed: usize,
236}
237
238impl Update {
239 #[must_use]
241 pub fn new() -> Self {
242 let state = state::new();
243 let unprocessed = Vec::with_capacity(BLOCK_LENGTH_BYTES);
244 let processed = 0;
245 Self {
246 state,
247 unprocessed,
248 processed,
249 }
250 }
251
252 pub fn update(&mut self, data: impl AsRef<[u8]>) -> &mut Self {
260 let data = data.as_ref();
261
262 for _ in 0..(self.unprocessed.len() / BLOCK_LENGTH_BYTES) {
264 let block = {
265 let chunk = self.unprocessed.drain(..BLOCK_LENGTH_BYTES);
266 let chunk = chunk.as_slice();
267 Block::try_from(chunk)
268 .expect("chunk length must be exact size as block")
269 .into()
270 };
271 self.state = self.state.update(block);
272 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
273 }
274
275 if self.unprocessed.is_empty() {
276 let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
278 for chunk in chunks.by_ref() {
279 let block = Block::try_from(chunk)
280 .expect("chunk length must be exact size as block")
281 .into();
282 self.state = self.state.update(block);
283 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
284 }
285 let remainder = chunks.remainder();
286 if !remainder.is_empty() {
287 self.unprocessed.extend(remainder);
288 }
289 } else if (self.unprocessed.len() + data.len()) < BLOCK_LENGTH_BYTES {
290 self.unprocessed.extend(data);
292 } else {
293 let unprocessed = self.unprocessed.len() % BLOCK_LENGTH_BYTES;
295 let missing = BLOCK_LENGTH_BYTES - unprocessed;
296 let (fillment, data) = data.split_at(missing);
297 let block = {
298 let mut block = [0u8; BLOCK_LENGTH_BYTES];
299 let (first_part, second_part) = block.split_at_mut(self.unprocessed.len());
300 first_part.copy_from_slice(self.unprocessed.drain(..self.unprocessed.len()).as_slice());
301 second_part[..missing].copy_from_slice(fillment);
302 block
303 };
304 let mut chunks = block.chunks_exact(BLOCK_LENGTH_BYTES);
305 for chunk in chunks.by_ref() {
306 let block = Block::try_from(chunk)
307 .expect("chunk length must be exact size as block")
308 .into();
309 self.state = self.state.update(block);
310 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
311 }
312 let remainder = chunks.remainder();
313 assert!(remainder.is_empty(), "chunks remainder must be empty");
314
315 let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
316 for chunk in chunks.by_ref() {
317 let block = Block::try_from(chunk)
318 .expect("chunk length must be exact size as block")
319 .into();
320 self.state = self.state.update(block);
321 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
322 }
323 let remainder = chunks.remainder();
324 self.unprocessed.extend(remainder);
325 }
326
327 self
328 }
329
330 #[must_use]
332 pub fn finalize(&self) -> Finalize {
333 let mut state = self.state;
334 let mut processed = self.processed;
335 let unprocessed = {
336 let mut chunks = self.unprocessed.chunks_exact(BLOCK_LENGTH_BYTES);
337 for chunk in chunks.by_ref() {
338 let block = Block::try_from(chunk)
339 .expect("chunk length must be exact size as block")
340 .into();
341 state = state.update(block);
342 processed = processed.wrapping_add(BLOCK_LENGTH_BYTES);
343 }
344 chunks.remainder()
345 };
346
347 let length = {
348 let length = unprocessed.len().wrapping_add(processed) as u128;
349 let length = length.wrapping_mul(8); length.to_be_bytes()
351 };
352
353 if (unprocessed.len() + 1 + length.len()) <= BLOCK_LENGTH_BYTES {
354 let padding = {
355 let mut padding = [0u8; BLOCK_LENGTH_BYTES];
356 padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
357 padding[unprocessed.len()] = 0x80;
358 padding[(BLOCK_LENGTH_BYTES - length.len())..].copy_from_slice(&length);
359 padding
360 };
361
362 let block = {
363 let block = &padding[..];
364 Block::try_from(block)
365 .expect("padding length must exact size as block")
366 .into()
367 };
368 state = state.update(block);
369 } else {
370 let padding = {
371 let mut padding = [0u8; BLOCK_LENGTH_BYTES * 2];
372 padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
373 padding[unprocessed.len()] = 0x80;
374 padding[(BLOCK_LENGTH_BYTES * 2 - length.len())..].copy_from_slice(&length);
375 padding
376 };
377
378 let block = {
379 let block = &padding[..BLOCK_LENGTH_BYTES];
380 Block::try_from(block)
381 .expect("padding length must exact size as block")
382 .into()
383 };
384 state = state.update(block);
385
386 let block = {
387 let block = &padding[BLOCK_LENGTH_BYTES..];
388 Block::try_from(block)
389 .expect("padding length must exact size as block")
390 .into()
391 };
392 state = state.update(block);
393 }
394
395 Finalize { state }
396 }
397
398 pub fn reset(&mut self) -> &mut Self {
400 self.state = self.state.reset();
401 self.unprocessed.clear();
402 self.processed = 0;
403 self
404 }
405
406 #[must_use]
408 pub fn digest(&self) -> Digest {
409 self.finalize().digest()
410 }
411}
412
413impl core::Update for Update {
414 type Digest = Digest;
415 type Finalize = Finalize;
416
417 fn update(&mut self, data: impl AsRef<[u8]>) {
418 self.update(data);
419 }
420
421 fn finalize(&self) -> Self::Finalize {
422 self.finalize()
423 }
424
425 fn reset(&mut self) {
426 self.reset();
427 }
428}
429
430impl Default for Update {
431 fn default() -> Self {
432 Self::new()
433 }
434}
435
436#[derive(Clone, Copy, Debug, Eq, PartialEq)]
438pub struct Finalize {
439 state: State,
440}
441
442impl Finalize {
443 #[must_use]
445 #[rustfmt::skip]
446 pub fn digest(&self) -> Digest {
447 let State { a, b, c, d, e, f, .. } = self.state;
448 let [a, b, c, d, e, f] = [
449 a.to_be_bytes(),
450 b.to_be_bytes(),
451 c.to_be_bytes(),
452 d.to_be_bytes(),
453 e.to_be_bytes(),
454 f.to_be_bytes(),
455 ];
456 Digest::new([
457 a[0], a[1], a[2], a[3],
458 a[4], a[5], a[6], a[7],
459 b[0], b[1], b[2], b[3],
460 b[4], b[5], b[6], b[7],
461 c[0], c[1], c[2], c[3],
462 c[4], c[5], c[6], c[7],
463 d[0], d[1], d[2], d[3],
464 d[4], d[5], d[6], d[7],
465 e[0], e[1], e[2], e[3],
466 e[4], e[5], e[6], e[7],
467 f[0], f[1], f[2], f[3],
468 f[4], f[5], f[6], f[7],
469 ])
470 }
471
472 #[must_use]
474 pub fn reset(&self) -> Update {
475 Update::new()
476 }
477}
478
479impl core::Finalize for Finalize {
480 type Digest = Digest;
481 type Update = Update;
482
483 fn digest(&self) -> Self::Digest {
484 self.digest()
485 }
486
487 fn reset(&self) -> Self::Update {
488 self.reset()
489 }
490}
491
492#[cfg(test)]
493mod tests {
494 use super::*;
495
496 #[test]
497 fn empty() {
498 let digest = default().digest().to_hex_lowercase();
499 assert_eq!(
500 digest,
501 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b"
502 );
503
504 let digest = new().digest().to_hex_lowercase();
505 assert_eq!(
506 digest,
507 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b"
508 );
509 }
510
511 #[test]
512 fn reset() {
513 let digest = new().update("data").reset().digest().to_hex_lowercase();
514 assert_eq!(
515 digest,
516 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b"
517 );
518
519 let digest = new().update("data").finalize().reset().digest().to_hex_lowercase();
520 assert_eq!(
521 digest,
522 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b"
523 );
524 }
525
526 #[test]
527 fn hello_world() {
528 let digest = new().update("Hello World").digest().to_hex_lowercase();
529 assert_eq!(
530 digest,
531 "99514329186b2f6ae4a1329e7ee6c610a729636335174ac6b740f9028396fcc803d0e93863a7c3d90f86beee782f4f3f"
532 );
533
534 let digest = new()
535 .update("Hello")
536 .update(" ")
537 .update("World")
538 .digest()
539 .to_hex_lowercase();
540 assert_eq!(
541 digest,
542 "99514329186b2f6ae4a1329e7ee6c610a729636335174ac6b740f9028396fcc803d0e93863a7c3d90f86beee782f4f3f"
543 );
544 }
545
546 #[test]
547 fn rust_book() {
548 let phrase = "Welcome to The Rust Programming Language, an introductory book about Rust. The Rust programming \
549 language helps you write faster, more reliable software. High-level ergonomics and low-level \
550 control are often at odds in programming language design; Rust challenges that conflict. \
551 Through balancing powerful technical capacity and a great developer experience, Rust gives you \
552 the option to control low-level details (such as memory usage) without all the hassle \
553 traditionally associated with such control.";
554
555 let digest = hash(phrase).to_hex_lowercase();
556 assert_eq!(
557 digest,
558 "219a81f21396aa67175bb507a6ddfb238c725c5aa61e99edf89bcfd9f119c2b00ac0614249eff0b1d41a7e98b9f9278c"
559 );
560 }
561
562 #[test]
563 fn zeroes() {
564 let data = vec![0u8; 128];
565
566 let digest = new().update(&data[..120]).digest().to_hex_lowercase();
567 assert_eq!(
568 digest,
569 "7212d895f4250ce1daa72e9e0caaef7132aed2e965885c55376818e45470de06fb6ebf7349c62fd342043f18010e46ac"
570 );
571
572 let digest = new()
573 .update(&data[..120])
574 .update(&data[120..])
575 .digest()
576 .to_hex_lowercase();
577 assert_eq!(
578 digest,
579 "f809b88323411f24a6f152e5e9d9d1b5466b77e0f3c7550f8b242c31b6e7b99bcb45bdecb6124bc23283db3b9fc4f5b3"
580 );
581 }
582}