density_rs/algorithms/lion/
lion.rs1use crate::algorithms::PLAIN_FLAG;
2use crate::codec::codec::Codec;
3use crate::codec::decoder::Decoder;
4use crate::codec::quad_encoder::QuadEncoder;
5use crate::errors::decode_error::DecodeError;
6use crate::errors::encode_error::EncodeError;
7use crate::io::read_buffer::ReadBuffer;
8use crate::io::read_signature::ReadSignature;
9use crate::io::write_buffer::WriteBuffer;
10use crate::io::write_signature::WriteSignature;
11use crate::{BIT_SIZE_U16, BIT_SIZE_U32, BYTE_SIZE_U16, BYTE_SIZE_U32};
12use std::slice::{from_raw_parts, from_raw_parts_mut};
13
14pub(crate) const LION_HASH_BITS: usize = BIT_SIZE_U16;
15pub(crate) const LION_HASH_MULTIPLIER: u32 = 0x9D6EF916;
16
17
18pub(crate) const FLAG_SIZE_BITS: u8 = 3;
19pub(crate) const PREDICTED_A_FLAG: u64 = 0x1;
20pub(crate) const PREDICTED_B_FLAG: u64 = 0x2;
21pub(crate) const PREDICTED_C_FLAG: u64 = 0x3;
22pub(crate) const PREDICTED_D_FLAG: u64 = 0x4;
23pub(crate) const PREDICTED_E_FLAG: u64 = 0x5;
24pub(crate) const MAP_A_FLAG: u64 = 0x6;
25pub(crate) const MAP_B_FLAG: u64 = 0x7;
26pub(crate) const DECODE_FLAG_MASK: u64 = 0x7;
27pub(crate) const DECODE_FLAG_MASK_BITS: u8 = 3;
28
29pub struct State {
30 pub(crate) last_hash: u16,
31 pub(crate) chunk_map: Vec<ChunkData>,
32 pub(crate) prediction_map: Vec<PredictionData>,
33}
34
35#[derive(Copy, Clone)]
36pub struct ChunkData {
37 pub(crate) chunk_a: u32,
38 pub(crate) chunk_b: u32,
39}
40
41#[derive(Copy, Clone)]
42pub struct PredictionData {
43 pub(crate) next_a: u32,
44 pub(crate) next_b: u32,
45 pub(crate) next_c: u32,
46 pub(crate) next_d: u32,
47 pub(crate) next_e: u32,
48}
49
50#[inline(always)]
51fn shift_predictions(prediction_data: &mut PredictionData, quad: u32) {
52 prediction_data.next_e = prediction_data.next_d;
53 prediction_data.next_d = prediction_data.next_c;
54 prediction_data.next_c = prediction_data.next_b;
55 prediction_data.next_b = prediction_data.next_a;
56 prediction_data.next_a = quad;
57}
58
59pub struct Lion {
60 pub state: State,
61}
62
63impl Lion {
64 pub fn new() -> Self {
65 Lion {
66 state: State {
67 last_hash: 0,
68 chunk_map: vec![ChunkData { chunk_a: 0, chunk_b: 0 }; 1 << LION_HASH_BITS],
69 prediction_map: vec![PredictionData { next_a: 0, next_b: 0, next_c: 0, next_d: 0, next_e: 0 }; 1 << LION_HASH_BITS],
70 },
71 }
72 }
73
74 pub fn encode(input: &[u8], output: &mut [u8]) -> Result<usize, EncodeError> {
75 let mut lion = Lion::new();
76 lion.encode(input, output)
77 }
78
79 pub fn decode(input: &[u8], output: &mut [u8]) -> Result<usize, DecodeError> {
80 let mut lion = Lion::new();
81 lion.decode(input, output)
82 }
83
84 #[inline(always)]
85 fn decode_plain(&mut self, in_buffer: &mut ReadBuffer) -> (u16, u32) {
86 let quad = in_buffer.read_u32_le();
87 let hash = (quad.wrapping_mul(LION_HASH_MULTIPLIER) >> (BIT_SIZE_U32 - LION_HASH_BITS)) as u16;
88
89 let chunk_data = &mut self.state.chunk_map[hash as usize];
90 chunk_data.chunk_b = chunk_data.chunk_a;
91 chunk_data.chunk_a = quad;
92
93 shift_predictions(&mut self.state.prediction_map[self.state.last_hash as usize], quad);
94
95 (hash, quad)
96 }
97
98 #[inline(always)]
99 fn decode_map_a(&mut self, in_buffer: &mut ReadBuffer) -> (u16, u32) {
100 let hash = in_buffer.read_u16_le();
101
102 let chunk_data = &mut self.state.chunk_map[hash as usize];
103 let quad = chunk_data.chunk_a;
104
105 shift_predictions(&mut self.state.prediction_map[self.state.last_hash as usize], quad);
106
107 (hash, quad)
108 }
109
110 #[inline(always)]
111 fn decode_map_b(&mut self, in_buffer: &mut ReadBuffer) -> (u16, u32) {
112 let hash = in_buffer.read_u16_le();
113
114 let chunk_data = &mut self.state.chunk_map[hash as usize];
115 let quad = chunk_data.chunk_b;
116
117 chunk_data.chunk_b = chunk_data.chunk_a;
118 chunk_data.chunk_a = quad;
119
120 shift_predictions(&mut self.state.prediction_map[self.state.last_hash as usize], quad);
121
122 (hash, quad)
123 }
124
125 #[inline(always)]
126 fn decode_predicted_a(&mut self) -> (u16, u32) {
127 let prediction_data = &mut self.state.prediction_map[self.state.last_hash as usize];
128 let quad = prediction_data.next_a;
129 let hash = (quad.wrapping_mul(LION_HASH_MULTIPLIER) >> (BIT_SIZE_U32 - LION_HASH_BITS)) as u16;
130
131 (hash, quad)
132 }
133
134 #[inline(always)]
135 fn decode_predicted_b(&mut self) -> (u16, u32) {
136 let prediction_data = &mut self.state.prediction_map[self.state.last_hash as usize];
137 let quad = prediction_data.next_b;
138 let hash = (quad.wrapping_mul(LION_HASH_MULTIPLIER) >> (BIT_SIZE_U32 - LION_HASH_BITS)) as u16;
139
140 prediction_data.next_b = prediction_data.next_a;
141 prediction_data.next_a = quad;
142
143 (hash, quad)
144 }
145
146 #[inline(always)]
147 fn decode_predicted_c(&mut self) -> (u16, u32) {
148 let prediction_data = &mut self.state.prediction_map[self.state.last_hash as usize];
149 let quad = prediction_data.next_c;
150 let hash = (quad.wrapping_mul(LION_HASH_MULTIPLIER) >> (BIT_SIZE_U32 - LION_HASH_BITS)) as u16;
151
152 prediction_data.next_c = prediction_data.next_b;
153 prediction_data.next_b = prediction_data.next_a;
154 prediction_data.next_a = quad;
155
156 (hash, quad)
157 }
158
159 #[inline(always)]
160 fn decode_predicted_d(&mut self) -> (u16, u32) {
161 let prediction_data = &mut self.state.prediction_map[self.state.last_hash as usize];
162 let quad = prediction_data.next_d;
163 let hash = (quad.wrapping_mul(LION_HASH_MULTIPLIER) >> (BIT_SIZE_U32 - LION_HASH_BITS)) as u16;
164
165 prediction_data.next_d = prediction_data.next_c;
166 prediction_data.next_c = prediction_data.next_b;
167 prediction_data.next_b = prediction_data.next_a;
168 prediction_data.next_a = quad;
169
170 (hash, quad)
171 }
172
173 #[inline(always)]
174 fn decode_predicted_e(&mut self) -> (u16, u32) {
175 let prediction_data = &mut self.state.prediction_map[self.state.last_hash as usize];
176 let quad = prediction_data.next_e;
177 let hash = (quad.wrapping_mul(LION_HASH_MULTIPLIER) >> (BIT_SIZE_U32 - LION_HASH_BITS)) as u16;
178
179 prediction_data.next_e = prediction_data.next_d;
180 prediction_data.next_d = prediction_data.next_c;
181 prediction_data.next_c = prediction_data.next_b;
182 prediction_data.next_b = prediction_data.next_a;
183 prediction_data.next_a = quad;
184
185 (hash, quad)
186 }
187
188 #[inline(always)]
189 fn update_last_hash(&mut self, hash_u16: u16) {
190 self.state.last_hash = hash_u16;
191 }
192
193 #[unsafe(no_mangle)]
194 pub extern "C" fn lion_encode(input: *const u8, input_size: usize, output: *mut u8, output_size: usize) -> usize {
195 unsafe { Self::encode(from_raw_parts(input, input_size), from_raw_parts_mut(output, output_size)).unwrap_or(0) }
196 }
197
198 #[unsafe(no_mangle)]
199 pub extern "C" fn lion_decode(input: *const u8, input_size: usize, output: *mut u8, output_size: usize) -> usize {
200 unsafe { Self::decode(from_raw_parts(input, input_size), from_raw_parts_mut(output, output_size)).unwrap_or(0) }
201 }
202
203 #[unsafe(no_mangle)]
204 pub extern "C" fn lion_safe_encode_buffer_size(size: usize) -> usize {
205 Self::safe_encode_buffer_size(size)
206 }
207}
208
209impl QuadEncoder for Lion {
210 #[inline(always)]
211 fn encode_quad(&mut self, quad: u32, out_buffer: &mut WriteBuffer, signature: &mut WriteSignature) {
212 let hash_u16 = (quad.wrapping_mul(LION_HASH_MULTIPLIER) >> (BIT_SIZE_U32 - LION_HASH_BITS)) as u16;
213 let prediction_data = &mut self.state.prediction_map[self.state.last_hash as usize];
214 let chunk_data = &mut self.state.chunk_map[hash_u16 as usize];
215
216 if prediction_data.next_a != quad {
217 if prediction_data.next_b != quad {
218 if prediction_data.next_c != quad {
219 if prediction_data.next_d != quad {
220 if prediction_data.next_e != quad {
221 if chunk_data.chunk_a != quad {
222 if chunk_data.chunk_b != quad {
223 signature.push_bits(PLAIN_FLAG, FLAG_SIZE_BITS); out_buffer.push(&quad.to_le_bytes());
225 } else {
226 signature.push_bits(MAP_B_FLAG, FLAG_SIZE_BITS); out_buffer.push(&hash_u16.to_le_bytes());
228 }
229 chunk_data.chunk_b = chunk_data.chunk_a;
230 chunk_data.chunk_a = quad;
231
232 shift_predictions(prediction_data, quad);
233 } else {
234 signature.push_bits(MAP_A_FLAG, FLAG_SIZE_BITS); out_buffer.push(&hash_u16.to_le_bytes());
236
237 shift_predictions(prediction_data, quad);
238 }
239 } else {
240 signature.push_bits(PREDICTED_E_FLAG, FLAG_SIZE_BITS); shift_predictions(prediction_data, quad);
243 }
244 } else {
245 signature.push_bits(PREDICTED_D_FLAG, FLAG_SIZE_BITS); prediction_data.next_d = prediction_data.next_c;
248 prediction_data.next_c = prediction_data.next_b;
249 prediction_data.next_b = prediction_data.next_a;
250 prediction_data.next_a = quad;
251 }
252 } else {
253 signature.push_bits(PREDICTED_C_FLAG, FLAG_SIZE_BITS); prediction_data.next_c = prediction_data.next_b;
256 prediction_data.next_b = prediction_data.next_a;
257 prediction_data.next_a = quad;
258 }
259 } else {
260 signature.push_bits(PREDICTED_B_FLAG, FLAG_SIZE_BITS); prediction_data.next_b = prediction_data.next_a;
263 prediction_data.next_a = quad;
264 }
265 } else {
266 signature.push_bits(PREDICTED_A_FLAG, FLAG_SIZE_BITS); }
268
269 self.update_last_hash(hash_u16);
270 }
271}
272
273impl Decoder for Lion {
274 #[inline(always)]
275 fn decode_unit(&mut self, in_buffer: &mut ReadBuffer, signature: &mut ReadSignature, out_buffer: &mut WriteBuffer) {
276 let (hash, quad) = match signature.read_bits(DECODE_FLAG_MASK, DECODE_FLAG_MASK_BITS) {
277 PREDICTED_B_FLAG => { self.decode_predicted_b() }
278 PREDICTED_C_FLAG => { self.decode_predicted_c() }
279 PREDICTED_D_FLAG => { self.decode_predicted_d() }
280 PREDICTED_E_FLAG => { self.decode_predicted_e() }
281 MAP_A_FLAG => { self.decode_map_a(in_buffer) }
282 MAP_B_FLAG => { self.decode_map_b(in_buffer) }
283 PLAIN_FLAG => { self.decode_plain(in_buffer) }
284 _ => { self.decode_predicted_a() }
285 };
286 self.state.last_hash = hash;
287 out_buffer.push(&quad.to_le_bytes());
288 }
289
290 #[inline(always)]
291 fn decode_partial_unit(&mut self, in_buffer: &mut ReadBuffer, signature: &mut ReadSignature, out_buffer: &mut WriteBuffer) -> bool {
292 let (hash, quad) = match signature.read_bits(DECODE_FLAG_MASK, DECODE_FLAG_MASK_BITS) {
293 PLAIN_FLAG => {
294 match in_buffer.remaining() {
295 0 => { return true; }
296 1..=3 => {
297 out_buffer.push(in_buffer.read(in_buffer.remaining()));
298 return true;
299 }
300 _ => { self.decode_plain(in_buffer) }
301 }
302 }
303 PREDICTED_B_FLAG => { self.decode_predicted_b() }
304 PREDICTED_C_FLAG => { self.decode_predicted_c() }
305 PREDICTED_D_FLAG => { self.decode_predicted_d() }
306 PREDICTED_E_FLAG => { self.decode_predicted_e() }
307 MAP_A_FLAG => { self.decode_map_a(in_buffer) }
308 MAP_B_FLAG => { self.decode_map_b(in_buffer) }
309 _ => { self.decode_predicted_a() }
310 };
311 self.state.last_hash = hash;
312 out_buffer.push(&quad.to_le_bytes());
313 false
314 }
315}
316
317impl Codec for Lion {
318 #[inline(always)]
319 fn block_size() -> usize { BYTE_SIZE_U32 * (Self::signature_significant_bytes() << 3) / FLAG_SIZE_BITS as usize }
320
321 #[inline(always)]
322 fn decode_unit_size() -> usize { 4 }
323
324 #[inline(always)]
325 fn signature_significant_bytes() -> usize { 6 }
326
327 fn clear_state(&mut self) {
328 self.state.last_hash = 0;
329 self.state.chunk_map.fill(ChunkData { chunk_a: 0, chunk_b: 0 });
330 self.state.prediction_map.fill(PredictionData { next_a: 0, next_b: 0, next_c: 0, next_d: 0, next_e: 0 });
331 }
332
333 #[inline(always)]
334 fn write_signature(out_buffer: &mut WriteBuffer, signature: &mut WriteSignature) {
335 out_buffer.write_at(signature.pos, &signature.value.to_le_bytes()[0..Self::signature_significant_bytes()]);
336 }
337
338 #[inline(always)]
339 fn read_signature(in_buffer: &mut ReadBuffer) -> ReadSignature {
340 match in_buffer.remaining() {
341 0..=7 => {
342 let bytes = [in_buffer.read(Self::signature_significant_bytes()), &[0, 0]].concat();
343 ReadSignature::new(u64::from_le_bytes(<&[u8] as TryInto<[u8; size_of::<u64>()]>>::try_into(&bytes).unwrap()))
344 }
345 _ => {
346 let bytes = in_buffer.read_u64_le();
347 in_buffer.rewind(BYTE_SIZE_U16);
348 ReadSignature::new(bytes & 0x0000ffffffffffff_u64)
349 }
350 }
351 }
352}