bao/
lib.rs

1//! [Repo](https://github.com/oconnor663/bao) —
2//! [Crate](https://crates.io/crates/bao) —
3//! [Spec](https://github.com/oconnor663/bao/blob/master/docs/spec.md)
4//!
5//! Bao is an implementation of BLAKE3 verified streaming. For more about how
6//! verified streaming works and what the Bao format looks like, see the
7//! [project README](https://github.com/oconnor663/bao) and the [full
8//! specification](https://github.com/oconnor663/bao/blob/master/docs/spec.md).
9//!
10//! # Example
11//!
12//! ```
13//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
14//! use std::io::prelude::*;
15//!
16//! // Encode some example bytes.
17//! let input = b"some input";
18//! let (encoded, hash) = bao::encode::encode(input);
19//!
20//! // Decode them with one of the all-at-once functions.
21//! let decoded_at_once = bao::decode::decode(&encoded, &hash)?;
22//!
23//! // Also decode them incrementally.
24//! let mut decoded_incrementally = Vec::new();
25//! let mut decoder = bao::decode::Decoder::new(&*encoded, &hash);
26//! decoder.read_to_end(&mut decoded_incrementally)?;
27//!
28//! // Assert that we got the same results both times.
29//! assert_eq!(decoded_at_once, decoded_incrementally);
30//!
31//! // Flipping a bit in encoding will cause a decoding error.
32//! let mut bad_encoded = encoded.clone();
33//! let last_index = bad_encoded.len() - 1;
34//! bad_encoded[last_index] ^= 1;
35//! let err = bao::decode::decode(&bad_encoded, &hash).unwrap_err();
36//! assert_eq!(std::io::ErrorKind::InvalidData, err.kind());
37//! # Ok(())
38//! # }
39//! ```
40
41#![forbid(unsafe_code)]
42
43pub mod decode;
44pub mod encode;
45
46pub use blake3::Hash;
47
48use std::mem;
49
50/// The size of a `Hash`, 32 bytes.
51pub const HASH_SIZE: usize = 32;
52pub(crate) const PARENT_SIZE: usize = 2 * HASH_SIZE;
53pub(crate) const HEADER_SIZE: usize = 8;
54pub(crate) const CHUNK_SIZE: usize = 1024;
55pub(crate) const MAX_DEPTH: usize = 54; // 2^54 * CHUNK_SIZE = 2^64
56
57/// An array of `HASH_SIZE` bytes. This will be a wrapper type in a future version.
58pub(crate) type ParentNode = [u8; 2 * HASH_SIZE];
59
60pub(crate) fn encode_len(len: u64) -> [u8; HEADER_SIZE] {
61    debug_assert_eq!(mem::size_of_val(&len), HEADER_SIZE);
62    len.to_le_bytes()
63}
64
65pub(crate) fn decode_len(bytes: &[u8; HEADER_SIZE]) -> u64 {
66    u64::from_le_bytes(*bytes)
67}
68
69// The root node is hashed differently from interior nodes. It gets suffixed
70// with the length of the entire input, and we set the Blake2 final node flag.
71// That means that no root hash can ever collide with an interior hash, or with
72// the root of a different size tree.
73#[derive(Clone, Copy, Debug)]
74pub(crate) enum Finalization {
75    NotRoot,
76    Root,
77}
78
79impl Finalization {
80    fn is_root(self) -> bool {
81        match self {
82            Self::NotRoot => false,
83            Self::Root => true,
84        }
85    }
86}
87
88#[doc(hidden)]
89pub mod benchmarks {
90    pub const CHUNK_SIZE: usize = super::CHUNK_SIZE;
91}
92
93#[cfg(test)]
94pub(crate) mod test {
95    use super::*;
96
97    // Interesting input lengths to run tests on.
98    pub const TEST_CASES: &[usize] = &[
99        0,
100        1,
101        10,
102        CHUNK_SIZE - 1,
103        CHUNK_SIZE,
104        CHUNK_SIZE + 1,
105        2 * CHUNK_SIZE - 1,
106        2 * CHUNK_SIZE,
107        2 * CHUNK_SIZE + 1,
108        3 * CHUNK_SIZE - 1,
109        3 * CHUNK_SIZE,
110        3 * CHUNK_SIZE + 1,
111        4 * CHUNK_SIZE - 1,
112        4 * CHUNK_SIZE,
113        4 * CHUNK_SIZE + 1,
114        8 * CHUNK_SIZE - 1,
115        8 * CHUNK_SIZE,
116        8 * CHUNK_SIZE + 1,
117        16 * CHUNK_SIZE - 1,
118        16 * CHUNK_SIZE,
119        16 * CHUNK_SIZE + 1,
120    ];
121}