1#![cfg_attr(docsrs, feature(doc_auto_cfg))]
119#![forbid(unsafe_code)]
120
121pub mod block;
122pub mod digest;
123pub mod state;
124
125use chksum_hash_core as core;
126
127use crate::block::Block;
128#[doc(inline)]
129pub use crate::block::LENGTH_BYTES as BLOCK_LENGTH_BYTES;
130#[doc(inline)]
131pub use crate::digest::{Digest, LENGTH_BYTES as DIGEST_LENGTH_BYTES};
132#[doc(inline)]
133pub use crate::state::State;
134
135#[must_use]
155pub fn new() -> Update {
156 Update::new()
157}
158
159#[must_use]
179pub fn default() -> Update {
180 core::default()
181}
182
183pub fn hash(data: impl AsRef<[u8]>) -> Digest {
197 core::hash::<Update>(data)
198}
199
200#[derive(Clone, Debug, Eq, PartialEq)]
231#[cfg_attr(feature = "fuzzing", derive(arbitrary::Arbitrary))]
232pub struct Update {
233 state: State,
234 unprocessed: Vec<u8>,
235 processed: usize,
236}
237
238impl Update {
239 #[must_use]
241 pub fn new() -> Self {
242 let state = state::new();
243 let unprocessed = Vec::with_capacity(BLOCK_LENGTH_BYTES);
244 let processed = 0;
245 Self {
246 state,
247 unprocessed,
248 processed,
249 }
250 }
251
252 pub fn update(&mut self, data: impl AsRef<[u8]>) -> &mut Self {
260 let data = data.as_ref();
261
262 for _ in 0..(self.unprocessed.len() / BLOCK_LENGTH_BYTES) {
264 let block = {
265 let chunk = self.unprocessed.drain(..BLOCK_LENGTH_BYTES);
266 let chunk = chunk.as_slice();
267 Block::try_from(chunk)
268 .expect("chunk length must be exact size as block")
269 .into()
270 };
271 self.state = self.state.update(block);
272 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
273 }
274
275 if self.unprocessed.is_empty() {
276 let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
278 for chunk in chunks.by_ref() {
279 let block = Block::try_from(chunk)
280 .expect("chunk length must be exact size as block")
281 .into();
282 self.state = self.state.update(block);
283 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
284 }
285 let remainder = chunks.remainder();
286 if !remainder.is_empty() {
287 self.unprocessed.extend(remainder);
288 }
289 } else if (self.unprocessed.len() + data.len()) < BLOCK_LENGTH_BYTES {
290 self.unprocessed.extend(data);
292 } else {
293 let unprocessed = self.unprocessed.len() % BLOCK_LENGTH_BYTES;
295 let missing = BLOCK_LENGTH_BYTES - unprocessed;
296 let (fillment, data) = data.split_at(missing);
297 let block = {
298 let mut block = [0u8; BLOCK_LENGTH_BYTES];
299 let (first_part, second_part) = block.split_at_mut(self.unprocessed.len());
300 first_part.copy_from_slice(self.unprocessed.drain(..self.unprocessed.len()).as_slice());
301 second_part[..missing].copy_from_slice(fillment);
302 block
303 };
304 let mut chunks = block.chunks_exact(BLOCK_LENGTH_BYTES);
305 for chunk in chunks.by_ref() {
306 let block = Block::try_from(chunk)
307 .expect("chunk length must be exact size as block")
308 .into();
309 self.state = self.state.update(block);
310 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
311 }
312 let remainder = chunks.remainder();
313 assert!(remainder.is_empty(), "chunks remainder must be empty");
314
315 let mut chunks = data.chunks_exact(BLOCK_LENGTH_BYTES);
316 for chunk in chunks.by_ref() {
317 let block = Block::try_from(chunk)
318 .expect("chunk length must be exact size as block")
319 .into();
320 self.state = self.state.update(block);
321 self.processed = self.processed.wrapping_add(BLOCK_LENGTH_BYTES);
322 }
323 let remainder = chunks.remainder();
324 self.unprocessed.extend(remainder);
325 }
326
327 self
328 }
329
330 #[must_use]
332 pub fn finalize(&self) -> Finalize {
333 let mut state = self.state;
334 let mut processed = self.processed;
335 let unprocessed = {
336 let mut chunks = self.unprocessed.chunks_exact(BLOCK_LENGTH_BYTES);
337 for chunk in chunks.by_ref() {
338 let block = Block::try_from(chunk)
339 .expect("chunk length must be exact size as block")
340 .into();
341 state = state.update(block);
342 processed = processed.wrapping_add(BLOCK_LENGTH_BYTES);
343 }
344 chunks.remainder()
345 };
346
347 let length = {
348 let length = unprocessed.len().wrapping_add(processed) as u64;
349 let length = length.wrapping_mul(8); length.to_be_bytes()
351 };
352
353 if (unprocessed.len() + 1 + length.len()) <= BLOCK_LENGTH_BYTES {
354 let padding = {
355 let mut padding = [0u8; BLOCK_LENGTH_BYTES];
356 padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
357 padding[unprocessed.len()] = 0x80;
358 padding[(BLOCK_LENGTH_BYTES - length.len())..].copy_from_slice(&length);
359 padding
360 };
361
362 let block = {
363 let block = &padding[..];
364 Block::try_from(block)
365 .expect("padding length must exact size as block")
366 .into()
367 };
368 state = state.update(block);
369 } else {
370 let padding = {
371 let mut padding = [0u8; BLOCK_LENGTH_BYTES * 2];
372 padding[..unprocessed.len()].copy_from_slice(&unprocessed[..unprocessed.len()]);
373 padding[unprocessed.len()] = 0x80;
374 padding[(BLOCK_LENGTH_BYTES * 2 - length.len())..].copy_from_slice(&length);
375 padding
376 };
377
378 let block = {
379 let block = &padding[..BLOCK_LENGTH_BYTES];
380 Block::try_from(block)
381 .expect("padding length must exact size as block")
382 .into()
383 };
384 state = state.update(block);
385
386 let block = {
387 let block = &padding[BLOCK_LENGTH_BYTES..];
388 Block::try_from(block)
389 .expect("padding length must exact size as block")
390 .into()
391 };
392 state = state.update(block);
393 }
394
395 Finalize { state }
396 }
397
398 pub fn reset(&mut self) -> &mut Self {
400 self.state = self.state.reset();
401 self.unprocessed.clear();
402 self.processed = 0;
403 self
404 }
405
406 #[must_use]
408 pub fn digest(&self) -> Digest {
409 self.finalize().digest()
410 }
411}
412
413impl core::Update for Update {
414 type Digest = Digest;
415 type Finalize = Finalize;
416
417 fn update(&mut self, data: impl AsRef<[u8]>) {
418 self.update(data);
419 }
420
421 fn finalize(&self) -> Self::Finalize {
422 self.finalize()
423 }
424
425 fn reset(&mut self) {
426 self.reset();
427 }
428}
429
430impl Default for Update {
431 fn default() -> Self {
432 Self::new()
433 }
434}
435
436#[derive(Clone, Copy, Debug, Eq, PartialEq)]
438pub struct Finalize {
439 state: State,
440}
441
442impl Finalize {
443 #[must_use]
445 #[rustfmt::skip]
446 pub fn digest(&self) -> Digest {
447 let State { a, b, c, d, e, f, g, h } = self.state;
448 let [a, b, c, d, e, f, g, h] = [
449 a.to_be_bytes(),
450 b.to_be_bytes(),
451 c.to_be_bytes(),
452 d.to_be_bytes(),
453 e.to_be_bytes(),
454 f.to_be_bytes(),
455 g.to_be_bytes(),
456 h.to_be_bytes(),
457 ];
458 Digest::new([
459 a[0], a[1], a[2], a[3],
460 b[0], b[1], b[2], b[3],
461 c[0], c[1], c[2], c[3],
462 d[0], d[1], d[2], d[3],
463 e[0], e[1], e[2], e[3],
464 f[0], f[1], f[2], f[3],
465 g[0], g[1], g[2], g[3],
466 h[0], h[1], h[2], h[3],
467 ])
468 }
469
470 #[must_use]
472 pub fn reset(&self) -> Update {
473 Update::new()
474 }
475}
476
477impl core::Finalize for Finalize {
478 type Digest = Digest;
479 type Update = Update;
480
481 fn digest(&self) -> Self::Digest {
482 self.digest()
483 }
484
485 fn reset(&self) -> Self::Update {
486 self.reset()
487 }
488}
489
490#[cfg(test)]
491mod tests {
492 use super::*;
493
494 #[test]
495 fn empty() {
496 let digest = default().digest().to_hex_lowercase();
497 assert_eq!(
498 digest,
499 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
500 );
501
502 let digest = new().digest().to_hex_lowercase();
503 assert_eq!(
504 digest,
505 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
506 );
507 }
508
509 #[test]
510 fn reset() {
511 let digest = new().update("data").reset().digest().to_hex_lowercase();
512 assert_eq!(
513 digest,
514 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
515 );
516
517 let digest = new().update("data").finalize().reset().digest().to_hex_lowercase();
518 assert_eq!(
519 digest,
520 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
521 );
522 }
523
524 #[test]
525 fn hello_world() {
526 let digest = new().update("Hello World").digest().to_hex_lowercase();
527 assert_eq!(
528 digest,
529 "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e"
530 );
531
532 let digest = new()
533 .update("Hello")
534 .update(" ")
535 .update("World")
536 .digest()
537 .to_hex_lowercase();
538 assert_eq!(
539 digest,
540 "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e"
541 );
542 }
543
544 #[test]
545 fn rust_book() {
546 let phrase = "Welcome to The Rust Programming Language, an introductory book about Rust. The Rust programming \
547 language helps you write faster, more reliable software. High-level ergonomics and low-level \
548 control are often at odds in programming language design; Rust challenges that conflict. \
549 Through balancing powerful technical capacity and a great developer experience, Rust gives you \
550 the option to control low-level details (such as memory usage) without all the hassle \
551 traditionally associated with such control.";
552
553 let digest = hash(phrase).to_hex_lowercase();
554 assert_eq!(
555 digest,
556 "b2de5395f39bf32376693a9cdccc13da1d705d0eb9e9ec8c566a91f604fcc942"
557 );
558 }
559
560 #[test]
561 fn zeroes() {
562 let data = vec![0u8; 64];
563
564 let digest = new().update(&data[..60]).digest().to_hex_lowercase();
565 assert_eq!(
566 digest,
567 "5dcc1b5872dd9ff1c234501f1fefda01f664164e1583c3e1bb3dbea47588ab31"
568 );
569
570 let digest = new()
571 .update(&data[..60])
572 .update(&data[60..])
573 .digest()
574 .to_hex_lowercase();
575 assert_eq!(
576 digest,
577 "f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b"
578 );
579 }
580}