tetsy_reference_trie/
lib.rs

1// Copyright 2017, 2018 Parity Technologies
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15//! Reference implementation of a streamer.
16
17use std::fmt;
18use std::iter::once;
19use std::marker::PhantomData;
20use std::ops::Range;
21use tetsy_scale_codec::{Decode, Input, Output, Encode, Compact, Error as CodecError};
22use tetsy_trie_root::Hasher;
23
24use tetsy_trie_db::{
25	node::{NibbleSlicePlan, NodePlan, NodeHandlePlan},
26	triedbmut::ChildReference,
27	DBValue,
28	trie_visit,
29	TrieBuilder,
30	TrieRoot,
31	Partial,
32};
33use std::borrow::Borrow;
34use tetsy_keccak_hasher::KeccakHasher;
35
36use tetsy_trie_db::{
37	nibble_ops, NodeCodec,
38	Trie, TrieConfiguration,
39	TrieLayout, TrieMut,
40};
41pub use tetsy_trie_root::TrieStream;
42pub mod node {
43	pub use tetsy_trie_db::node::Node;
44}
45
46/// Trie layout using extension nodes.
47pub struct ExtensionLayout;
48
49impl TrieLayout for ExtensionLayout {
50	const USE_EXTENSION: bool = true;
51	const ALLOW_EMPTY: bool = false;
52	type Hash = KeccakHasher;
53	type Codec = ReferenceNodeCodec<KeccakHasher>;
54}
55
56impl TrieConfiguration for ExtensionLayout { }
57
58/// Trie layout without extension nodes, allowing
59/// generic hasher.
60pub struct GenericNoExtensionLayout<H>(PhantomData<H>);
61
62impl<H: Hasher> TrieLayout for GenericNoExtensionLayout<H> {
63	const USE_EXTENSION: bool = false;
64	const ALLOW_EMPTY: bool = false;
65	type Hash = H;
66	type Codec = ReferenceNodeCodecNoExt<H>;
67}
68
69/// Trie that allows empty values
70pub struct AllowEmptyLayout;
71
72impl TrieLayout for AllowEmptyLayout {
73	const USE_EXTENSION: bool = true;
74	const ALLOW_EMPTY: bool = true;
75	type Hash = KeccakHasher;
76	type Codec = ReferenceNodeCodec<KeccakHasher>;
77}
78
79impl<H: Hasher> TrieConfiguration for GenericNoExtensionLayout<H> { }
80
81/// Trie layout without extension nodes.
82pub type NoExtensionLayout = GenericNoExtensionLayout<tetsy_keccak_hasher::KeccakHasher>;
83
84/// Children bitmap codec for radix 16 trie.
85pub struct Bitmap(u16);
86
87const BITMAP_LENGTH: usize = 2;
88
89impl Bitmap {
90
91	fn decode(data: &[u8]) -> Result<Self, CodecError> {
92		Ok(u16::decode(&mut &data[..])
93			.map(|v| Bitmap(v))?)
94	}
95
96	fn value_at(&self, i: usize) -> bool {
97		self.0 & (1u16 << i) != 0
98	}
99
100	fn encode<I: Iterator<Item = bool>>(has_children: I , output: &mut [u8]) {
101		let mut bitmap: u16 = 0;
102		let mut cursor: u16 = 1;
103		for v in has_children {
104			if v { bitmap |= cursor }
105			cursor <<= 1;
106		}
107		output[0] = (bitmap % 256) as u8;
108		output[1] = (bitmap / 256) as u8;
109	}
110}
111
112pub type RefTrieDB<'a> = tetsy_trie_db::TrieDB<'a, ExtensionLayout>;
113pub type RefTrieDBNoExt<'a> = tetsy_trie_db::TrieDB<'a, NoExtensionLayout>;
114pub type RefTrieDBMut<'a> = tetsy_trie_db::TrieDBMut<'a, ExtensionLayout>;
115pub type RefTrieDBMutNoExt<'a> = tetsy_trie_db::TrieDBMut<'a, NoExtensionLayout>;
116pub type RefTrieDBMutAllowEmpty<'a> = tetsy_trie_db::TrieDBMut<'a, AllowEmptyLayout>;
117pub type RefFatDB<'a> = tetsy_trie_db::FatDB<'a, ExtensionLayout>;
118pub type RefFatDBMut<'a> = tetsy_trie_db::FatDBMut<'a, ExtensionLayout>;
119pub type RefSecTrieDB<'a> = tetsy_trie_db::SecTrieDB<'a, ExtensionLayout>;
120pub type RefSecTrieDBMut<'a> = tetsy_trie_db::SecTrieDBMut<'a, ExtensionLayout>;
121pub type RefLookup<'a, Q> = tetsy_trie_db::Lookup<'a, ExtensionLayout, Q>;
122pub type RefLookupNoExt<'a, Q> = tetsy_trie_db::Lookup<'a, NoExtensionLayout, Q>;
123
124pub fn tetsy_reference_trie_root<I, A, B>(input: I) -> <KeccakHasher as Hasher>::Out where
125	I: IntoIterator<Item = (A, B)>,
126	A: AsRef<[u8]> + Ord + fmt::Debug,
127	B: AsRef<[u8]> + fmt::Debug,
128{
129	tetsy_trie_root::tetsy_trie_root::<KeccakHasher, ReferenceTrieStream, _, _, _>(input)
130}
131
132fn tetsy_reference_trie_root_unhashed<I, A, B>(input: I) -> Vec<u8> where
133	I: IntoIterator<Item = (A, B)>,
134	A: AsRef<[u8]> + Ord + fmt::Debug,
135	B: AsRef<[u8]> + fmt::Debug,
136{
137	tetsy_trie_root::unhashed_trie::<KeccakHasher, ReferenceTrieStream, _, _, _>(input)
138}
139
140pub fn tetsy_reference_trie_root_no_extension<I, A, B>(input: I) -> <KeccakHasher as Hasher>::Out where
141	I: IntoIterator<Item = (A, B)>,
142	A: AsRef<[u8]> + Ord + fmt::Debug,
143	B: AsRef<[u8]> + fmt::Debug,
144{
145	tetsy_trie_root::tetsy_trie_root_no_extension::<KeccakHasher, ReferenceTrieStreamNoExt, _, _, _>(input)
146}
147
148fn tetsy_reference_trie_root_unhashed_no_extension<I, A, B>(input: I) -> Vec<u8> where
149	I: IntoIterator<Item = (A, B)>,
150	A: AsRef<[u8]> + Ord + fmt::Debug,
151	B: AsRef<[u8]> + fmt::Debug,
152{
153	tetsy_trie_root::unhashed_trie_no_extension::<KeccakHasher, ReferenceTrieStreamNoExt, _, _, _>(input)
154}
155
156const EMPTY_TRIE: u8 = 0;
157const LEAF_NODE_OFFSET: u8 = 1;
158const EXTENSION_NODE_OFFSET: u8 = 128;
159const BRANCH_NODE_NO_VALUE: u8 = 254;
160const BRANCH_NODE_WITH_VALUE: u8 = 255;
161const LEAF_NODE_OVER: u8 = EXTENSION_NODE_OFFSET - LEAF_NODE_OFFSET;
162const EXTENSION_NODE_OVER: u8 = BRANCH_NODE_NO_VALUE - EXTENSION_NODE_OFFSET;
163const LEAF_NODE_LAST: u8 = EXTENSION_NODE_OFFSET - 1;
164const EXTENSION_NODE_LAST: u8 = BRANCH_NODE_NO_VALUE - 1;
165
166// Constant use with no extensino trie codec.
167const EMPTY_TRIE_NO_EXT: u8 = 0;
168const NIBBLE_SIZE_BOUND_NO_EXT: usize = u16::max_value() as usize;
169const LEAF_PREFIX_MASK_NO_EXT: u8 = 0b_01 << 6;
170const BRANCH_WITHOUT_MASK_NO_EXT: u8 = 0b_10 << 6;
171const BRANCH_WITH_MASK_NO_EXT: u8 = 0b_11 << 6;
172
173/// Create a leaf/extension node, encoding a number of nibbles. Note that this
174/// cannot handle a number of nibbles that is zero or greater than 125 and if
175/// you attempt to do so *IT WILL PANIC*.
176fn fuse_nibbles_node<'a>(nibbles: &'a [u8], leaf: bool) -> impl Iterator<Item = u8> + 'a {
177	debug_assert!(
178		nibbles.len() < LEAF_NODE_OVER.min(EXTENSION_NODE_OVER) as usize,
179		"nibbles length too long. what kind of size of key are you trying to include in the trie!?!"
180	);
181	let first_byte = if leaf {
182		LEAF_NODE_OFFSET
183	} else {
184		EXTENSION_NODE_OFFSET
185	} + nibbles.len() as u8;
186
187	once(first_byte)
188		.chain(if nibbles.len() % 2 == 1 { Some(nibbles[0]) } else { None })
189		.chain(nibbles[nibbles.len() % 2..].chunks(2).map(|ch| ch[0] << 4 | ch[1]))
190}
191
192enum NodeKindNoExt {
193	Leaf,
194	BranchNoValue,
195	BranchWithValue,
196}
197
198/// Create a leaf or branch node header followed by its encoded partial nibbles.
199fn fuse_nibbles_node_no_extension<'a>(
200	nibbles: &'a [u8],
201	kind: NodeKindNoExt,
202) -> impl Iterator<Item = u8> + 'a {
203	let size = ::std::cmp::min(NIBBLE_SIZE_BOUND_NO_EXT, nibbles.len());
204
205	let iter_start = match kind {
206		NodeKindNoExt::Leaf => size_and_prefix_iterator(size, LEAF_PREFIX_MASK_NO_EXT),
207		NodeKindNoExt::BranchNoValue => size_and_prefix_iterator(size, BRANCH_WITHOUT_MASK_NO_EXT),
208		NodeKindNoExt::BranchWithValue => size_and_prefix_iterator(size, BRANCH_WITH_MASK_NO_EXT),
209	};
210	iter_start
211		.chain(if nibbles.len() % 2 == 1 { Some(nibbles[0]) } else { None })
212		.chain(nibbles[nibbles.len() % 2..].chunks(2).map(|ch| ch[0] << 4 | ch[1]))
213}
214
215/// Encoding of branch header and children bitmap (for trie stream radix 16).
216/// For stream variant with extension.
217fn branch_node(has_value: bool, has_children: impl Iterator<Item = bool>) -> [u8; 3] {
218	let mut result = [0, 0, 0];
219	branch_node_buffered(has_value, has_children, &mut result[..]);
220	result
221}
222
223/// Encoding of branch header and children bitmap for any radix.
224/// For codec/stream variant with extension.
225fn branch_node_buffered<I: Iterator<Item = bool>>(
226	has_value: bool,
227	has_children: I,
228	output: &mut[u8],
229) {
230	let first = if has_value {
231		BRANCH_NODE_WITH_VALUE
232	} else {
233		BRANCH_NODE_NO_VALUE
234	};
235	output[0] = first;
236	Bitmap::encode(has_children, &mut output[1..]);
237}
238
239/// Encoding of children bitmap (for trie stream radix 16).
240/// For stream variant without extension.
241fn branch_node_bit_mask(has_children: impl Iterator<Item = bool>) -> (u8, u8) {
242	let mut bitmap: u16 = 0;
243	let mut cursor: u16 = 1;
244	for v in has_children {
245		if v { bitmap |= cursor }
246		cursor <<= 1;
247	}
248	((bitmap % 256 ) as u8, (bitmap / 256 ) as u8)
249}
250
251/// Reference implementation of a `TrieStream` with extension nodes.
252#[derive(Default, Clone)]
253pub struct ReferenceTrieStream {
254	buffer: Vec<u8>
255}
256
257impl TrieStream for ReferenceTrieStream {
258	fn new() -> Self {
259		ReferenceTrieStream {
260			buffer: Vec::new()
261		}
262	}
263
264	fn append_empty_data(&mut self) {
265		self.buffer.push(EMPTY_TRIE);
266	}
267
268	fn append_leaf(&mut self, key: &[u8], value: &[u8]) {
269		self.buffer.extend(fuse_nibbles_node(key, true));
270		value.encode_to(&mut self.buffer);
271	}
272
273	fn begin_branch(
274		&mut self,
275		maybe_key: Option<&[u8]>,
276		maybe_value: Option<&[u8]>,
277		has_children: impl Iterator<Item = bool>,
278	) {
279		self.buffer.extend(&branch_node(maybe_value.is_some(), has_children));
280		if let Some(partial) = maybe_key {
281			// should not happen
282			self.buffer.extend(fuse_nibbles_node(partial, false));
283		}
284		if let Some(value) = maybe_value {
285			value.encode_to(&mut self.buffer);
286		}
287	}
288
289	fn append_extension(&mut self, key: &[u8]) {
290		self.buffer.extend(fuse_nibbles_node(key, false));
291	}
292
293	fn append_substream<H: Hasher>(&mut self, other: Self) {
294		let data = other.out();
295		match data.len() {
296			0..=31 => data.encode_to(&mut self.buffer),
297			_ => H::hash(&data).as_ref().encode_to(&mut self.buffer),
298		}
299	}
300
301	fn out(self) -> Vec<u8> { self.buffer }
302}
303
304/// Reference implementation of a `TrieStream` without extension.
305#[derive(Default, Clone)]
306pub struct ReferenceTrieStreamNoExt {
307	buffer: Vec<u8>
308}
309
310impl TrieStream for ReferenceTrieStreamNoExt {
311	fn new() -> Self {
312		ReferenceTrieStreamNoExt {
313			buffer: Vec::new()
314		}
315	}
316
317	fn append_empty_data(&mut self) {
318		self.buffer.push(EMPTY_TRIE_NO_EXT);
319	}
320
321	fn append_leaf(&mut self, key: &[u8], value: &[u8]) {
322		self.buffer.extend(fuse_nibbles_node_no_extension(key, NodeKindNoExt::Leaf));
323		value.encode_to(&mut self.buffer);
324	}
325
326	fn begin_branch(
327		&mut self,
328		maybe_key: Option<&[u8]>,
329		maybe_value: Option<&[u8]>,
330		has_children: impl Iterator<Item = bool>
331	) {
332		if let Some(partial) = maybe_key {
333			if maybe_value.is_some() {
334				self.buffer.extend(
335					fuse_nibbles_node_no_extension(partial, NodeKindNoExt::BranchWithValue)
336				);
337			} else {
338				self.buffer.extend(
339					fuse_nibbles_node_no_extension(partial, NodeKindNoExt::BranchNoValue)
340				);
341			}
342			let bitmap = branch_node_bit_mask(has_children);
343			self.buffer.extend([bitmap.0, bitmap.1].iter());
344		} else {
345			// should not happen
346			self.buffer.extend(&branch_node(maybe_value.is_some(), has_children));
347		}
348		if let Some(value) = maybe_value {
349			value.encode_to(&mut self.buffer);
350		}
351	}
352
353	fn append_extension(&mut self, _key: &[u8]) {
354		// should not happen
355	}
356
357	fn append_substream<H: Hasher>(&mut self, other: Self) {
358		let data = other.out();
359		match data.len() {
360			0..=31 => data.encode_to(&mut self.buffer),
361			_ => H::hash(&data).as_ref().encode_to(&mut self.buffer),
362		}
363	}
364
365	fn out(self) -> Vec<u8> { self.buffer }
366}
367
368/// A node header.
369#[derive(Copy, Clone, PartialEq, Eq, Debug)]
370enum NodeHeader {
371	Null,
372	Branch(bool),
373	Extension(usize),
374	Leaf(usize),
375}
376
377/// A node header no extension.
378#[derive(Copy, Clone, PartialEq, Eq, Debug)]
379enum NodeHeaderNoExt {
380	Null,
381	Branch(bool, usize),
382	Leaf(usize),
383}
384
385impl Encode for NodeHeader {
386	fn encode_to<T: Output + ?Sized>(&self, output: &mut T) {
387		match self {
388			NodeHeader::Null => output.push_byte(EMPTY_TRIE),
389			NodeHeader::Branch(true) => output.push_byte(BRANCH_NODE_WITH_VALUE),
390			NodeHeader::Branch(false) => output.push_byte(BRANCH_NODE_NO_VALUE),
391			NodeHeader::Leaf(nibble_count) =>
392				output.push_byte(LEAF_NODE_OFFSET + *nibble_count as u8),
393			NodeHeader::Extension(nibble_count) =>
394				output.push_byte(EXTENSION_NODE_OFFSET + *nibble_count as u8),
395		}
396	}
397}
398
399/// Encode and allocate node type header (type and size), and partial value.
400/// It uses an iterator over encoded partial bytes as input.
401fn size_and_prefix_iterator(size: usize, prefix: u8) -> impl Iterator<Item = u8> {
402	let size = ::std::cmp::min(NIBBLE_SIZE_BOUND_NO_EXT, size);
403
404	let l1 = std::cmp::min(62, size);
405	let (first_byte, mut rem) = if size == l1 {
406		(once(prefix + l1 as u8), 0)
407	} else {
408		(once(prefix + 63), size - l1)
409	};
410	let next_bytes = move || {
411		if rem > 0 {
412			if rem < 256 {
413				let result = rem - 1;
414				rem = 0;
415				Some(result as u8)
416			} else {
417				rem = rem.saturating_sub(255);
418				Some(255)
419			}
420		} else {
421			None
422		}
423	};
424	first_byte.chain(::std::iter::from_fn(next_bytes))
425}
426
427fn encode_size_and_prefix(size: usize, prefix: u8, out: &mut (impl Output + ?Sized)) {
428	for b in size_and_prefix_iterator(size, prefix) {
429		out.push_byte(b)
430	}
431}
432
433fn decode_size<I: Input>(first: u8, input: &mut I) -> Result<usize, CodecError> {
434	let mut result = (first & 255u8 >> 2) as usize;
435	if result < 63 {
436		return Ok(result);
437	}
438	result -= 1;
439	while result <= NIBBLE_SIZE_BOUND_NO_EXT {
440		let n = input.read_byte()? as usize;
441		if n < 255 {
442			return Ok(result + n + 1);
443		}
444		result += 255;
445	}
446	Err("Size limit reached for a nibble slice".into())
447}
448
449impl Encode for NodeHeaderNoExt {
450	fn encode_to<T: Output + ?Sized>(&self, output: &mut T) {
451		match self {
452			NodeHeaderNoExt::Null => output.push_byte(EMPTY_TRIE_NO_EXT),
453			NodeHeaderNoExt::Branch(true, nibble_count)	=>
454				encode_size_and_prefix(*nibble_count, BRANCH_WITH_MASK_NO_EXT, output),
455			NodeHeaderNoExt::Branch(false, nibble_count) =>
456				encode_size_and_prefix(*nibble_count, BRANCH_WITHOUT_MASK_NO_EXT, output),
457			NodeHeaderNoExt::Leaf(nibble_count) =>
458				encode_size_and_prefix(*nibble_count, LEAF_PREFIX_MASK_NO_EXT, output),
459		}
460	}
461}
462
463impl Decode for NodeHeader {
464	fn decode<I: Input>(input: &mut I) -> Result<Self, CodecError> {
465		Ok(match input.read_byte()? {
466			EMPTY_TRIE => NodeHeader::Null,
467			BRANCH_NODE_NO_VALUE => NodeHeader::Branch(false),
468			BRANCH_NODE_WITH_VALUE => NodeHeader::Branch(true),
469			i @ LEAF_NODE_OFFSET ..= LEAF_NODE_LAST =>
470				NodeHeader::Leaf((i - LEAF_NODE_OFFSET) as usize),
471			i @ EXTENSION_NODE_OFFSET ..= EXTENSION_NODE_LAST =>
472				NodeHeader::Extension((i - EXTENSION_NODE_OFFSET) as usize),
473		})
474	}
475}
476
477impl Decode for NodeHeaderNoExt {
478	fn decode<I: Input>(input: &mut I) -> Result<Self, CodecError> {
479		let i = input.read_byte()?;
480		if i == EMPTY_TRIE_NO_EXT {
481			return Ok(NodeHeaderNoExt::Null);
482		}
483		match i & (0b11 << 6) {
484			LEAF_PREFIX_MASK_NO_EXT =>
485				Ok(NodeHeaderNoExt::Leaf(decode_size(i, input)?)),
486			BRANCH_WITHOUT_MASK_NO_EXT =>
487				Ok(NodeHeaderNoExt::Branch(false, decode_size(i, input)?)),
488			BRANCH_WITH_MASK_NO_EXT =>
489				Ok(NodeHeaderNoExt::Branch(true, decode_size(i, input)?)),
490			// do not allow any special encoding
491			_ => Err("Unknown type of node".into()),
492		}
493	}
494}
495
496/// Simple reference implementation of a `NodeCodec`.
497#[derive(Default, Clone)]
498pub struct ReferenceNodeCodec<H>(PhantomData<H>);
499
500/// Simple reference implementation of a `NodeCodec`.
501/// Even if implementation follows initial specification of
502/// https://github.com/w3f/polkadot-re-spec/issues/8, this may
503/// not follow it in the future, it is mainly the testing codec without extension node.
504#[derive(Default, Clone)]
505pub struct ReferenceNodeCodecNoExt<H>(PhantomData<H>);
506
507fn partial_to_key(partial: Partial, offset: u8, over: u8) -> Vec<u8> {
508	let number_nibble_encoded = (partial.0).0 as usize;
509	let nibble_count = partial.1.len() * nibble_ops::NIBBLE_PER_BYTE + number_nibble_encoded;
510	assert!(nibble_count < over as usize);
511	let mut output = vec![offset + nibble_count as u8];
512	if number_nibble_encoded > 0 {
513		output.push(nibble_ops::pad_right((partial.0).1));
514	}
515	output.extend_from_slice(&partial.1[..]);
516	output
517}
518
519fn partial_from_iterator_to_key<I: Iterator<Item = u8>>(
520	partial: I,
521	nibble_count: usize,
522	offset: u8,
523	over: u8,
524) -> Vec<u8> {
525	assert!(nibble_count < over as usize);
526	let mut output = Vec::with_capacity(1 + (nibble_count / nibble_ops::NIBBLE_PER_BYTE));
527	output.push(offset + nibble_count as u8);
528	output.extend(partial);
529	output
530}
531
532fn partial_from_iterator_encode<I: Iterator<Item = u8>>(
533	partial: I,
534	nibble_count: usize,
535	node_kind: NodeKindNoExt,
536) -> Vec<u8> {
537	let nibble_count = ::std::cmp::min(NIBBLE_SIZE_BOUND_NO_EXT, nibble_count);
538
539	let mut output = Vec::with_capacity(3 + (nibble_count / nibble_ops::NIBBLE_PER_BYTE));
540	match node_kind {
541		NodeKindNoExt::Leaf =>
542			NodeHeaderNoExt::Leaf(nibble_count).encode_to(&mut output),
543		NodeKindNoExt::BranchWithValue =>
544			NodeHeaderNoExt::Branch(true, nibble_count).encode_to(&mut output),
545		NodeKindNoExt::BranchNoValue =>
546			NodeHeaderNoExt::Branch(false, nibble_count).encode_to(&mut output),
547	};
548	output.extend(partial);
549	output
550}
551
552fn partial_encode(partial: Partial, node_kind: NodeKindNoExt) -> Vec<u8> {
553	let number_nibble_encoded = (partial.0).0 as usize;
554	let nibble_count = partial.1.len() * nibble_ops::NIBBLE_PER_BYTE + number_nibble_encoded;
555
556	let nibble_count = ::std::cmp::min(NIBBLE_SIZE_BOUND_NO_EXT, nibble_count);
557
558	let mut output = Vec::with_capacity(3 + partial.1.len());
559	match node_kind {
560		NodeKindNoExt::Leaf =>
561			NodeHeaderNoExt::Leaf(nibble_count).encode_to(&mut output),
562		NodeKindNoExt::BranchWithValue =>
563			NodeHeaderNoExt::Branch(true, nibble_count).encode_to(&mut output),
564		NodeKindNoExt::BranchNoValue =>
565			NodeHeaderNoExt::Branch(false, nibble_count).encode_to(&mut output),
566	};
567	if number_nibble_encoded > 0 {
568		output.push(nibble_ops::pad_right((partial.0).1));
569	}
570	output.extend_from_slice(&partial.1[..]);
571	output
572}
573
574struct ByteSliceInput<'a> {
575	data: &'a [u8],
576	offset: usize,
577}
578
579impl<'a> ByteSliceInput<'a> {
580	fn new(data: &'a [u8]) -> Self {
581		ByteSliceInput {
582			data,
583			offset: 0,
584		}
585	}
586
587	fn take(&mut self, count: usize) -> Result<Range<usize>, CodecError> {
588		if self.offset + count > self.data.len() {
589			return Err("out of data".into());
590		}
591
592		let range = self.offset..(self.offset + count);
593		self.offset += count;
594		Ok(range)
595	}
596}
597
598impl<'a> Input for ByteSliceInput<'a> {
599	fn remaining_len(&mut self) -> Result<Option<usize>, CodecError> {
600		let remaining = if self.offset <= self.data.len() {
601			Some(self.data.len() - self.offset)
602		} else {
603			None
604		};
605		Ok(remaining)
606	}
607
608	fn read(&mut self, into: &mut [u8]) -> Result<(), CodecError> {
609		let range = self.take(into.len())?;
610		into.copy_from_slice(&self.data[range]);
611		Ok(())
612	}
613
614	fn read_byte(&mut self) -> Result<u8, CodecError> {
615		if self.offset + 1 > self.data.len() {
616			return Err("out of data".into());
617		}
618
619		let byte = self.data[self.offset];
620		self.offset += 1;
621		Ok(byte)
622	}
623}
624
625// NOTE: what we'd really like here is:
626// `impl<H: Hasher> NodeCodec<H> for RlpNodeCodec<H> where <KeccakHasher as Hasher>::Out: Decodable`
627// but due to the current limitations of Rust const evaluation we can't do
628// `const HASHED_NULL_NODE: <KeccakHasher as Hasher>::Out = <KeccakHasher as Hasher>::Out( … … )`.
629// Perhaps one day soon?
630impl<H: Hasher> NodeCodec for ReferenceNodeCodec<H> {
631	type Error = CodecError;
632	type HashOut = H::Out;
633
634	fn hashed_null_node() -> <H as Hasher>::Out {
635		H::hash(<Self as NodeCodec>::empty_node())
636	}
637
638	fn decode_plan(data: &[u8]) -> ::std::result::Result<NodePlan, Self::Error> {
639		let mut input = ByteSliceInput::new(data);
640		match NodeHeader::decode(&mut input)? {
641			NodeHeader::Null => Ok(NodePlan::Empty),
642			NodeHeader::Branch(has_value) => {
643				let bitmap_range = input.take(BITMAP_LENGTH)?;
644				let bitmap = Bitmap::decode(&data[bitmap_range])?;
645
646				let value = if has_value {
647					let count = <Compact<u32>>::decode(&mut input)?.0 as usize;
648					Some(input.take(count)?)
649				} else {
650					None
651				};
652				let mut children = [
653					None, None, None, None, None, None, None, None,
654					None, None, None, None, None, None, None, None,
655				];
656				for i in 0..nibble_ops::NIBBLE_LENGTH {
657					if bitmap.value_at(i) {
658						let count = <Compact<u32>>::decode(&mut input)?.0 as usize;
659						let range = input.take(count)?;
660						children[i] = Some(if count == H::LENGTH {
661							NodeHandlePlan::Hash(range)
662						} else {
663							NodeHandlePlan::Inline(range)
664						});
665					}
666				}
667				Ok(NodePlan::Branch { value, children })
668			}
669			NodeHeader::Extension(nibble_count) => {
670				let partial = input.take(
671					(nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE
672				)?;
673				let partial_padding = nibble_ops::number_padding(nibble_count);
674				let count = <Compact<u32>>::decode(&mut input)?.0 as usize;
675				let range = input.take(count)?;
676				let child = if count == H::LENGTH {
677					NodeHandlePlan::Hash(range)
678				} else {
679					NodeHandlePlan::Inline(range)
680				};
681				Ok(NodePlan::Extension {
682					partial: NibbleSlicePlan::new(partial, partial_padding),
683					child
684				})
685			}
686			NodeHeader::Leaf(nibble_count) => {
687				let partial = input.take(
688					(nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE
689				)?;
690				let partial_padding = nibble_ops::number_padding(nibble_count);
691				let count = <Compact<u32>>::decode(&mut input)?.0 as usize;
692				let value = input.take(count)?;
693				Ok(NodePlan::Leaf {
694					partial: NibbleSlicePlan::new(partial, partial_padding),
695					value,
696				})
697			}
698		}
699	}
700
701	fn is_empty_node(data: &[u8]) -> bool {
702		data == <Self as NodeCodec>::empty_node()
703	}
704
705	fn empty_node() -> &'static[u8] {
706		&[EMPTY_TRIE]
707	}
708
709	fn leaf_node(partial: Partial, value: &[u8]) -> Vec<u8> {
710		let mut output = partial_to_key(partial, LEAF_NODE_OFFSET, LEAF_NODE_OVER);
711		value.encode_to(&mut output);
712		output
713	}
714
715	fn extension_node(
716		partial: impl Iterator<Item = u8>,
717		number_nibble: usize,
718		child: ChildReference<Self::HashOut>,
719	) -> Vec<u8> {
720		let mut output = partial_from_iterator_to_key(
721			partial,
722			number_nibble,
723			EXTENSION_NODE_OFFSET,
724			EXTENSION_NODE_OVER,
725		);
726		match child {
727			ChildReference::Hash(h) => h.as_ref().encode_to(&mut output),
728			ChildReference::Inline(inline_data, len) =>
729				(&AsRef::<[u8]>::as_ref(&inline_data)[..len]).encode_to(&mut output),
730		};
731		output
732	}
733
734	fn branch_node(
735		children: impl Iterator<Item = impl Borrow<Option<ChildReference<Self::HashOut>>>>,
736		maybe_value: Option<&[u8]>,
737	) -> Vec<u8> {
738		let mut output = vec![0; BITMAP_LENGTH + 1];
739		let mut prefix: [u8; 3] = [0; 3];
740		let have_value = if let Some(value) = maybe_value {
741			value.encode_to(&mut output);
742			true
743		} else {
744			false
745		};
746		let has_children = children.map(|maybe_child| match maybe_child.borrow() {
747			Some(ChildReference::Hash(h)) => {
748				h.as_ref().encode_to(&mut output);
749				true
750			}
751			&Some(ChildReference::Inline(inline_data, len)) => {
752				inline_data.as_ref()[..len].encode_to(&mut output);
753				true
754			}
755			None => false,
756		});
757		branch_node_buffered(have_value, has_children, prefix.as_mut());
758		output[0..BITMAP_LENGTH + 1].copy_from_slice(prefix.as_ref());
759		output
760	}
761
762	fn branch_node_nibbled(
763		_partial:	impl Iterator<Item = u8>,
764		_number_nibble: usize,
765		_children: impl Iterator<Item = impl Borrow<Option<ChildReference<Self::HashOut>>>>,
766		_maybe_value: Option<&[u8]>) -> Vec<u8> {
767		unreachable!()
768	}
769
770}
771
772impl<H: Hasher> NodeCodec for ReferenceNodeCodecNoExt<H> {
773	type Error = CodecError;
774	type HashOut = <H as Hasher>::Out;
775
776	fn hashed_null_node() -> <H as Hasher>::Out {
777		H::hash(<Self as NodeCodec>::empty_node())
778	}
779
780	fn decode_plan(data: &[u8]) -> ::std::result::Result<NodePlan, Self::Error> {
781		let mut input = ByteSliceInput::new(data);
782		match NodeHeaderNoExt::decode(&mut input)? {
783			NodeHeaderNoExt::Null => Ok(NodePlan::Empty),
784			NodeHeaderNoExt::Branch(has_value, nibble_count) => {
785				let padding = nibble_count % nibble_ops::NIBBLE_PER_BYTE != 0;
786				// check that the padding is valid (if any)
787				if padding && nibble_ops::pad_left(data[input.offset]) != 0 {
788					return Err(CodecError::from("Bad format"));
789				}
790				let partial = input.take(
791					(nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE
792				)?;
793				let partial_padding = nibble_ops::number_padding(nibble_count);
794				let bitmap_range = input.take(BITMAP_LENGTH)?;
795				let bitmap = Bitmap::decode(&data[bitmap_range])?;
796				let value = if has_value {
797					let count = <Compact<u32>>::decode(&mut input)?.0 as usize;
798					Some(input.take(count)?)
799				} else {
800					None
801				};
802				let mut children = [
803					None, None, None, None, None, None, None, None,
804					None, None, None, None, None, None, None, None,
805				];
806				for i in 0..nibble_ops::NIBBLE_LENGTH {
807					if bitmap.value_at(i) {
808						let count = <Compact<u32>>::decode(&mut input)?.0 as usize;
809						let range = input.take(count)?;
810						children[i] = Some(if count == H::LENGTH {
811							NodeHandlePlan::Hash(range)
812						} else {
813							NodeHandlePlan::Inline(range)
814						});
815					}
816				}
817				Ok(NodePlan::NibbledBranch {
818					partial: NibbleSlicePlan::new(partial, partial_padding),
819					value,
820					children,
821				})
822			}
823			NodeHeaderNoExt::Leaf(nibble_count) => {
824				let padding = nibble_count % nibble_ops::NIBBLE_PER_BYTE != 0;
825				// check that the padding is valid (if any)
826				if padding && nibble_ops::pad_left(data[input.offset]) != 0 {
827					return Err(CodecError::from("Bad format"));
828				}
829				let partial = input.take(
830					(nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE
831				)?;
832				let partial_padding = nibble_ops::number_padding(nibble_count);
833				let count = <Compact<u32>>::decode(&mut input)?.0 as usize;
834				let value = input.take(count)?;
835				Ok(NodePlan::Leaf {
836					partial: NibbleSlicePlan::new(partial, partial_padding),
837					value,
838				})
839			}
840		}
841	}
842
843	fn is_empty_node(data: &[u8]) -> bool {
844		data == <Self as NodeCodec>::empty_node()
845	}
846
847	fn empty_node() -> &'static [u8] {
848		&[EMPTY_TRIE_NO_EXT]
849	}
850
851	fn leaf_node(partial: Partial, value: &[u8]) -> Vec<u8> {
852		let mut output = partial_encode(partial, NodeKindNoExt::Leaf);
853		value.encode_to(&mut output);
854		output
855	}
856
857	fn extension_node(
858		_partial: impl Iterator<Item = u8>,
859		_nbnibble: usize,
860		_child: ChildReference<<H as Hasher>::Out>,
861	) -> Vec<u8> {
862		unreachable!()
863	}
864
865	fn branch_node(
866		_children: impl Iterator<Item = impl Borrow<Option<ChildReference<<H as Hasher>::Out>>>>,
867		_maybe_value: Option<&[u8]>,
868	) -> Vec<u8> {
869		unreachable!()
870	}
871
872	fn branch_node_nibbled(
873		partial: impl Iterator<Item = u8>,
874		number_nibble: usize,
875		children: impl Iterator<Item = impl Borrow<Option<ChildReference<Self::HashOut>>>>,
876		maybe_value: Option<&[u8]>,
877	) -> Vec<u8> {
878		let mut output = if maybe_value.is_some() {
879			partial_from_iterator_encode(
880				partial,
881				number_nibble,
882				NodeKindNoExt::BranchWithValue,
883			)
884		} else {
885			partial_from_iterator_encode(
886				partial,
887				number_nibble,
888				NodeKindNoExt::BranchNoValue,
889			)
890		};
891		let bitmap_index = output.len();
892		let mut bitmap: [u8; BITMAP_LENGTH] = [0; BITMAP_LENGTH];
893		(0..BITMAP_LENGTH).for_each(|_| output.push(0));
894		if let Some(value) = maybe_value {
895			value.encode_to(&mut output);
896		};
897		Bitmap::encode(children.map(|maybe_child| match maybe_child.borrow() {
898			Some(ChildReference::Hash(h)) => {
899				h.as_ref().encode_to(&mut output);
900				true
901			}
902			&Some(ChildReference::Inline(inline_data, len)) => {
903				inline_data.as_ref()[..len].encode_to(&mut output);
904				true
905			}
906			None => false,
907		}), bitmap.as_mut());
908		output[bitmap_index..bitmap_index + BITMAP_LENGTH]
909			.copy_from_slice(&bitmap.as_ref()[..BITMAP_LENGTH]);
910		output
911	}
912
913}
914
915/// Compare trie builder and in memory trie.
916pub fn compare_implementations<X : tetsy_hash_db::HashDB<KeccakHasher, DBValue> + Eq> (
917	data: Vec<(Vec<u8>, Vec<u8>)>,
918	mut memdb: X,
919	mut hashdb: X,
920) {
921	let root_new = {
922		let mut cb = TrieBuilder::new(&mut hashdb);
923		trie_visit::<ExtensionLayout, _, _, _, _>(data.clone().into_iter(), &mut cb);
924		cb.root.unwrap_or(Default::default())
925	};
926	let root = {
927		let mut root = Default::default();
928		let mut t = RefTrieDBMut::new(&mut memdb, &mut root);
929		for i in 0..data.len() {
930			t.insert(&data[i].0[..], &data[i].1[..]).unwrap();
931		}
932		t.commit();
933		*t.root()
934	};
935	if root_new != root {
936		{
937			let db : &dyn tetsy_hash_db::HashDB<_, _> = &hashdb;
938			let t = RefTrieDB::new(&db, &root_new).unwrap();
939			println!("{:?}", t);
940			for a in t.iter().unwrap() {
941				println!("a:{:x?}", a);
942			}
943		}
944		{
945			let db : &dyn tetsy_hash_db::HashDB<_, _> = &memdb;
946			let t = RefTrieDB::new(&db, &root).unwrap();
947			println!("{:?}", t);
948			for a in t.iter().unwrap() {
949				println!("a:{:x?}", a);
950			}
951		}
952	}
953
954	assert_eq!(root, root_new);
955	// compare db content for key fuzzing
956	assert!(memdb == hashdb);
957}
958
959/// Compare trie builder and trie root implementations.
960pub fn compare_root(
961	data: Vec<(Vec<u8>, Vec<u8>)>,
962	mut memdb: impl tetsy_hash_db::HashDB<KeccakHasher, DBValue>,
963) {
964	let root_new = {
965		let mut cb = TrieRoot::<KeccakHasher, _>::default();
966		trie_visit::<ExtensionLayout, _, _, _, _>(data.clone().into_iter(), &mut cb);
967		cb.root.unwrap_or(Default::default())
968	};
969	let root = {
970		let mut root = Default::default();
971		let mut t = RefTrieDBMut::new(&mut memdb, &mut root);
972		for i in 0..data.len() {
973			t.insert(&data[i].0[..], &data[i].1[..]).unwrap();
974		}
975		*t.root()
976	};
977
978	assert_eq!(root, root_new);
979}
980
981/// Compare trie builder and trie root unhashed implementations.
982pub fn compare_unhashed(
983	data: Vec<(Vec<u8>, Vec<u8>)>,
984) {
985	let root_new = {
986		let mut cb = tetsy_trie_db::TrieRootUnhashed::<KeccakHasher>::default();
987		trie_visit::<ExtensionLayout, _, _, _, _>(data.clone().into_iter(), &mut cb);
988		cb.root.unwrap_or(Default::default())
989	};
990	let root = tetsy_reference_trie_root_unhashed(data);
991
992	assert_eq!(root, root_new);
993}
994
995/// Compare trie builder and trie root unhashed implementations.
996/// This uses the variant without extension nodes.
997pub fn compare_unhashed_no_extension(
998	data: Vec<(Vec<u8>, Vec<u8>)>,
999) {
1000	let root_new = {
1001		let mut cb = tetsy_trie_db::TrieRootUnhashed::<KeccakHasher>::default();
1002		trie_visit::<NoExtensionLayout, _, _, _, _>(data.clone().into_iter(), &mut cb);
1003		cb.root.unwrap_or(Default::default())
1004	};
1005	let root = tetsy_reference_trie_root_unhashed_no_extension(data);
1006
1007	assert_eq!(root, root_new);
1008}
1009
1010/// Trie builder root calculation utility.
1011pub fn calc_root<I, A, B>(
1012	data: I,
1013) -> <KeccakHasher as Hasher>::Out
1014	where
1015		I: IntoIterator<Item = (A, B)>,
1016		A: AsRef<[u8]> + Ord + fmt::Debug,
1017		B: AsRef<[u8]> + fmt::Debug,
1018{
1019	let mut cb = TrieRoot::<KeccakHasher, _>::default();
1020	trie_visit::<ExtensionLayout, _, _, _, _>(data.into_iter(), &mut cb);
1021	cb.root.unwrap_or(Default::default())
1022}
1023
1024/// Trie builder root calculation utility.
1025/// This uses the variant without extension nodes.
1026pub fn calc_root_no_extension<I, A, B>(
1027	data: I,
1028) -> <KeccakHasher as Hasher>::Out
1029	where
1030		I: IntoIterator<Item = (A, B)>,
1031		A: AsRef<[u8]> + Ord + fmt::Debug,
1032		B: AsRef<[u8]> + fmt::Debug,
1033{
1034	let mut cb = TrieRoot::<KeccakHasher, _>::default();
1035	tetsy_trie_db::trie_visit::<NoExtensionLayout, _, _, _, _>(data.into_iter(), &mut cb);
1036	cb.root.unwrap_or(Default::default())
1037}
1038
1039/// Trie builder trie building utility.
1040pub fn calc_root_build<I, A, B, DB>(
1041	data: I,
1042	hashdb: &mut DB
1043) -> <KeccakHasher as Hasher>::Out
1044	where
1045		I: IntoIterator<Item = (A, B)>,
1046		A: AsRef<[u8]> + Ord + fmt::Debug,
1047		B: AsRef<[u8]> + fmt::Debug,
1048		DB: tetsy_hash_db::HashDB<KeccakHasher, DBValue>
1049{
1050	let mut cb = TrieBuilder::new(hashdb);
1051	trie_visit::<ExtensionLayout, _, _, _, _>(data.into_iter(), &mut cb);
1052	cb.root.unwrap_or(Default::default())
1053}
1054
1055/// Trie builder trie building utility.
1056/// This uses the variant without extension nodes.
1057pub fn calc_root_build_no_extension<I, A, B, DB>(
1058	data: I,
1059	hashdb: &mut DB,
1060) -> <KeccakHasher as Hasher>::Out
1061	where
1062		I: IntoIterator<Item = (A, B)>,
1063		A: AsRef<[u8]> + Ord + fmt::Debug,
1064		B: AsRef<[u8]> + fmt::Debug,
1065		DB: tetsy_hash_db::HashDB<KeccakHasher, DBValue>
1066{
1067	let mut cb = TrieBuilder::new(hashdb);
1068	tetsy_trie_db::trie_visit::<NoExtensionLayout, _, _, _, _>(data.into_iter(), &mut cb);
1069	cb.root.unwrap_or(Default::default())
1070}
1071
1072/// Compare trie builder and in memory trie.
1073/// This uses the variant without extension nodes.
1074pub fn compare_implementations_no_extension(
1075	data: Vec<(Vec<u8>, Vec<u8>)>,
1076	mut memdb: impl tetsy_hash_db::HashDB<KeccakHasher, DBValue>,
1077	mut hashdb: impl tetsy_hash_db::HashDB<KeccakHasher, DBValue>,
1078) {
1079	let root_new = {
1080		let mut cb = TrieBuilder::new(&mut hashdb);
1081		trie_visit::<NoExtensionLayout, _, _, _, _>(data.clone().into_iter(), &mut cb);
1082		cb.root.unwrap_or(Default::default())
1083	};
1084	let root = {
1085		let mut root = Default::default();
1086		let mut t = RefTrieDBMutNoExt::new(&mut memdb, &mut root);
1087		for i in 0..data.len() {
1088			t.insert(&data[i].0[..], &data[i].1[..]).unwrap();
1089		}
1090		*t.root()
1091	};
1092
1093	if root != root_new {
1094		{
1095			let db : &dyn tetsy_hash_db::HashDB<_, _> = &memdb;
1096			let t = RefTrieDBNoExt::new(&db, &root).unwrap();
1097			println!("{:?}", t);
1098			for a in t.iter().unwrap() {
1099				println!("a:{:?}", a);
1100			}
1101		}
1102		{
1103			let db : &dyn tetsy_hash_db::HashDB<_, _> = &hashdb;
1104			let t = RefTrieDBNoExt::new(&db, &root_new).unwrap();
1105			println!("{:?}", t);
1106			for a in t.iter().unwrap() {
1107				println!("a:{:?}", a);
1108			}
1109		}
1110	}
1111
1112	assert_eq!(root, root_new);
1113}
1114
1115/// `compare_implementations_no_extension` for unordered input (tetsy_trie_root does
1116/// ordering before running when trie_build expect correct ordering).
1117pub fn compare_implementations_no_extension_unordered(
1118	data: Vec<(Vec<u8>, Vec<u8>)>,
1119	mut memdb: impl tetsy_hash_db::HashDB<KeccakHasher, DBValue>,
1120	mut hashdb: impl tetsy_hash_db::HashDB<KeccakHasher, DBValue>,
1121) {
1122	let mut b_map = std::collections::btree_map::BTreeMap::new();
1123	let root = {
1124		let mut root = Default::default();
1125		let mut t = RefTrieDBMutNoExt::new(&mut memdb, &mut root);
1126		for i in 0..data.len() {
1127			t.insert(&data[i].0[..], &data[i].1[..]).unwrap();
1128			b_map.insert(data[i].0.clone(), data[i].1.clone());
1129		}
1130		*t.root()
1131	};
1132	let root_new = {
1133		let mut cb = TrieBuilder::new(&mut hashdb);
1134		trie_visit::<NoExtensionLayout, _, _, _, _>(b_map.into_iter(), &mut cb);
1135		cb.root.unwrap_or(Default::default())
1136	};
1137
1138	if root != root_new {
1139		{
1140			let db : &dyn tetsy_hash_db::HashDB<_, _> = &memdb;
1141			let t = RefTrieDBNoExt::new(&db, &root).unwrap();
1142			println!("{:?}", t);
1143			for a in t.iter().unwrap() {
1144				println!("a:{:?}", a);
1145			}
1146		}
1147		{
1148			let db : &dyn tetsy_hash_db::HashDB<_, _> = &hashdb;
1149			let t = RefTrieDBNoExt::new(&db, &root_new).unwrap();
1150			println!("{:?}", t);
1151			for a in t.iter().unwrap() {
1152				println!("a:{:?}", a);
1153			}
1154		}
1155	}
1156
1157	assert_eq!(root, root_new);
1158}
1159
1160/// Testing utility that uses some periodic removal over
1161/// its input test data.
1162pub fn compare_no_extension_insert_remove(
1163	data: Vec<(bool, Vec<u8>, Vec<u8>)>,
1164	mut memdb: impl tetsy_hash_db::HashDB<KeccakHasher, DBValue>,
1165) {
1166	let mut data2 = std::collections::BTreeMap::new();
1167	let mut root = Default::default();
1168	let mut a = 0;
1169	{
1170		let mut t = RefTrieDBMutNoExt::new(&mut memdb, &mut root);
1171		t.commit();
1172	}
1173	while a < data.len() {
1174		// new triemut every 3 element
1175		root = {
1176			let mut t = RefTrieDBMutNoExt::from_existing(&mut memdb, &mut root).unwrap();
1177			for _ in 0..3 {
1178				if data[a].0 {
1179					// remove
1180					t.remove(&data[a].1[..]).unwrap();
1181					data2.remove(&data[a].1[..]);
1182				} else {
1183					// add
1184					t.insert(&data[a].1[..], &data[a].2[..]).unwrap();
1185					data2.insert(&data[a].1[..], &data[a].2[..]);
1186				}
1187
1188				a += 1;
1189				if a == data.len() {
1190					break;
1191				}
1192			}
1193			t.commit();
1194			*t.root()
1195		};
1196	}
1197	let mut t = RefTrieDBMutNoExt::from_existing(&mut memdb, &mut root).unwrap();
1198	// we are testing the RefTrie code here so we do not sort or check uniqueness
1199	// before.
1200	assert_eq!(*t.root(), calc_root_no_extension(data2));
1201}
1202
1203#[cfg(test)]
1204mod tests {
1205	use super::*;
1206	use tetsy_trie_db::node::Node;
1207
1208	#[test]
1209	fn test_encoding_simple_trie() {
1210		for prefix in [
1211			LEAF_PREFIX_MASK_NO_EXT,
1212			BRANCH_WITHOUT_MASK_NO_EXT,
1213			BRANCH_WITH_MASK_NO_EXT,
1214		].iter() {
1215			for i in (0..1000).chain(NIBBLE_SIZE_BOUND_NO_EXT - 2..NIBBLE_SIZE_BOUND_NO_EXT + 2) {
1216				let mut output = Vec::new();
1217				encode_size_and_prefix(i, *prefix, &mut output);
1218				let input = &mut &output[..];
1219				let first = input.read_byte().unwrap();
1220				assert_eq!(first & (0b11 << 6), *prefix);
1221				let v = decode_size(first, input);
1222				assert_eq!(Ok(std::cmp::min(i, NIBBLE_SIZE_BOUND_NO_EXT)), v);
1223			}
1224		}
1225	}
1226
1227	#[test]
1228	fn too_big_nibble_length() {
1229		// + 1 for 0 added byte of nibble encode
1230		let input = vec![0u8; (NIBBLE_SIZE_BOUND_NO_EXT as usize + 1) / 2 + 1];
1231		let enc = <ReferenceNodeCodecNoExt<KeccakHasher> as NodeCodec>
1232		::leaf_node(((0, 0), &input), &[1]);
1233		let dec = <ReferenceNodeCodecNoExt<KeccakHasher> as NodeCodec>
1234		::decode(&enc).unwrap();
1235		let o_sl = if let Node::Leaf(sl, _) = dec {
1236			Some(sl)
1237		} else { None };
1238		assert!(o_sl.is_some());
1239	}
1240
1241	#[test]
1242	fn size_encode_limit_values() {
1243		let sizes = [0, 1, 62, 63, 64, 317, 318, 319, 572, 573, 574];
1244		let encs = [
1245			vec![0],
1246			vec![1],
1247			vec![0x3e],
1248			vec![0x3f, 0],
1249			vec![0x3f, 1],
1250			vec![0x3f, 0xfe],
1251			vec![0x3f, 0xff, 0],
1252			vec![0x3f, 0xff, 1],
1253			vec![0x3f, 0xff, 0xfe],
1254			vec![0x3f, 0xff, 0xff, 0],
1255			vec![0x3f, 0xff, 0xff, 1],
1256		];
1257		for i in 0..sizes.len() {
1258			let mut enc = Vec::new();
1259			encode_size_and_prefix(sizes[i], 0, &mut enc);
1260			assert_eq!(enc, encs[i]);
1261			let s_dec = decode_size(encs[i][0], &mut &encs[i][1..]);
1262			assert_eq!(s_dec, Ok(sizes[i]));
1263		}
1264	}
1265}