axhash_core/hash/
hasher_impl.rs1use super::AxHasher;
2
3use crate::bytes::hash_bytes_core;
4use crate::constants::SECRET;
5use crate::math::{avalanche, folded_multiply};
6
7use core::hash::Hasher;
8
9impl Hasher for AxHasher {
10 #[inline(always)]
11 fn finish(&self) -> u64 {
12 let mut hasher = self.clone();
14 hasher.flush_sponge();
15 avalanche(hasher.acc)
16 }
17
18 #[inline(always)]
19 fn write(&mut self, bytes: &[u8]) {
20 self.flush_sponge();
21 self.acc = hash_bytes_core(bytes, self.acc);
22 }
23
24 #[inline(always)]
25 fn write_u8(&mut self, i: u8) {
26 self.push_num(i, 8);
27 }
28
29 #[inline(always)]
30 fn write_u16(&mut self, i: u16) {
31 self.push_num(i, 16);
32 }
33
34 #[inline(always)]
35 fn write_u32(&mut self, i: u32) {
36 self.push_num(i, 32);
37 }
38
39 #[inline(always)]
40 fn write_u64(&mut self, i: u64) {
41 self.push_num(i, 64);
42 }
43
44 #[inline(always)]
45 fn write_u128(&mut self, i: u128) {
46 self.flush_sponge();
47 let lo = i as u64;
48 let hi = (i >> 64) as u64;
49 self.acc = folded_multiply(lo ^ self.acc, hi ^ SECRET[1]);
50 }
51
52 #[inline(always)]
53 fn write_usize(&mut self, i: usize) {
54 #[cfg(target_pointer_width = "32")]
55 self.write_u32(i as u32);
56 #[cfg(target_pointer_width = "64")]
57 self.write_u64(i as u64);
58 }
59
60 #[inline(always)]
61 fn write_i8(&mut self, i: i8) {
62 self.write_u8(i as u8);
63 }
64
65 #[inline(always)]
66 fn write_i16(&mut self, i: i16) {
67 self.write_u16(i as u16);
68 }
69
70 #[inline(always)]
71 fn write_i32(&mut self, i: i32) {
72 self.write_u32(i as u32);
73 }
74
75 #[inline(always)]
76 fn write_i64(&mut self, i: i64) {
77 self.write_u64(i as u64);
78 }
79
80 #[inline(always)]
81 fn write_i128(&mut self, i: i128) {
82 self.write_u128(i as u128);
83 }
84
85 #[inline(always)]
86 fn write_isize(&mut self, i: isize) {
87 self.write_usize(i as usize);
88 }
89}