sapio_bitcoin/util/
bip158.rs

1// Rust Bitcoin Library
2// Written in 2019 by
3//   The rust-bitcoin developers
4//
5// To the extent possible under law, the author(s) have dedicated all
6// copyright and related and neighboring rights to this software to
7// the public domain worldwide. This software is distributed without
8// any warranty.
9//
10// You should have received a copy of the CC0 Public Domain Dedication
11// along with this software.
12// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
13//
14
15// This module was largely copied from https://github.com/rust-bitcoin/murmel/blob/master/src/blockfilter.rs
16// on 11. June 2019 which is licensed under Apache, that file specifically
17// was written entirely by Tamas Blummer, who is re-licensing its contents here as CC0.
18
19//! BIP158 Compact Block Filters for light clients.
20//!
21//! This module implements a structure for compact filters on block data, for
22//! use in the BIP 157 light client protocol. The filter construction proposed
23//! is an alternative to Bloom filters, as used in BIP 37, that minimizes filter
24//! size by using Golomb-Rice coding for compression.
25//!
26//! ## Example
27//!
28//! ```ignore
29//! fn get_script_for_coin(coin: &OutPoint) -> Result<Script, BlockFilterError> {
30//!   // get utxo ...
31//! }
32//!
33//! // create a block filter for a block (server side)
34//! let filter = BlockFilter::new_script_filter(&block, get_script_for_coin)?;
35//!
36//! // or create a filter from known raw data
37//! let filter = BlockFilter::new(content);
38//!
39//! // read and evaluate a filter
40//!
41//! let query: Iterator<Item=Script> = // .. some scripts you care about
42//! if filter.match_any(&block_hash, &mut query.map(|s| s.as_bytes())) {
43//!   // get this block
44//! }
45//!  ```
46//!
47
48use prelude::*;
49
50use io::{self as io, Cursor};
51use core::fmt::{self, Display, Formatter};
52use core::cmp::{self, Ordering};
53
54use hashes::{Hash, siphash24};
55use hash_types::{BlockHash, FilterHash, FilterHeader};
56
57use blockdata::block::Block;
58use blockdata::script::Script;
59use blockdata::transaction::OutPoint;
60use consensus::{Decodable, Encodable};
61use consensus::encode::VarInt;
62use util::endian;
63
64/// Golomb encoding parameter as in BIP-158, see also https://gist.github.com/sipa/576d5f09c3b86c3b1b75598d799fc845
65const P: u8 = 19;
66const M: u64 = 784931;
67
68/// Errors for blockfilter
69#[derive(Debug)]
70pub enum Error {
71    /// missing UTXO, can not calculate script filter
72    UtxoMissing(OutPoint),
73    /// some IO error reading or writing binary serialization of the filter
74    Io(io::Error),
75}
76
77#[cfg(feature = "std")]
78#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
79impl ::std::error::Error for Error {}
80
81impl Display for Error {
82    fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
83        match *self {
84            Error::UtxoMissing(ref coin) => write!(f, "unresolved UTXO {}", coin),
85            Error::Io(ref io) => write!(f, "{}", io)
86        }
87    }
88}
89
90impl From<io::Error> for Error {
91    fn from(io: io::Error) -> Self {
92        Error::Io(io)
93    }
94}
95
96
97/// a computed or read block filter
98#[derive(Debug, Clone, PartialEq, Eq)]
99pub struct BlockFilter {
100    /// Golomb encoded filter
101    pub content: Vec<u8>
102}
103
104impl FilterHash {
105    /// compute the filter header from a filter hash and previous filter header
106    pub fn filter_header(&self, previous_filter_header: &FilterHeader) -> FilterHeader {
107        let mut header_data = [0u8; 64];
108        header_data[0..32].copy_from_slice(&self[..]);
109        header_data[32..64].copy_from_slice(&previous_filter_header[..]);
110        FilterHeader::hash(&header_data)
111    }
112}
113
114impl BlockFilter {
115    /// compute this filter's id in a chain of filters
116    pub fn filter_header(&self, previous_filter_header: &FilterHeader) -> FilterHeader {
117        let filter_hash = FilterHash::hash(self.content.as_slice());
118        filter_hash.filter_header(previous_filter_header)
119    }
120
121    /// create a new filter from pre-computed data
122    pub fn new (content: &[u8]) -> BlockFilter {
123        BlockFilter { content: content.to_vec() }
124    }
125
126    /// Compute a SCRIPT_FILTER that contains spent and output scripts
127    pub fn new_script_filter<M>(block: &Block, script_for_coin: M) -> Result<BlockFilter, Error>
128        where M: Fn(&OutPoint) -> Result<Script, Error> {
129        let mut out = Vec::new();
130        {
131            let mut writer = BlockFilterWriter::new(&mut out, block);
132            writer.add_output_scripts();
133            writer.add_input_scripts(script_for_coin)?;
134            writer.finish()?;
135        }
136        Ok(BlockFilter { content: out })
137    }
138
139    /// match any query pattern
140    pub fn match_any(&self, block_hash: &BlockHash, query: &mut dyn Iterator<Item=&[u8]>) -> Result<bool, Error> {
141        let filter_reader = BlockFilterReader::new(block_hash);
142        filter_reader.match_any(&mut Cursor::new(self.content.as_slice()), query)
143    }
144
145    /// match all query pattern
146    pub fn match_all(&self, block_hash: &BlockHash, query: &mut dyn Iterator<Item=&[u8]>) -> Result<bool, Error> {
147        let filter_reader = BlockFilterReader::new(block_hash);
148        filter_reader.match_all(&mut Cursor::new(self.content.as_slice()), query)
149    }
150}
151
152/// Compiles and writes a block filter
153pub struct BlockFilterWriter<'a> {
154    block: &'a Block,
155    writer: GCSFilterWriter<'a>,
156}
157
158impl<'a> BlockFilterWriter<'a> {
159    /// Create a block filter writer
160    pub fn new(writer: &'a mut dyn io::Write, block: &'a Block) -> BlockFilterWriter<'a> {
161        let block_hash_as_int = block.block_hash().into_inner();
162        let k0 = endian::slice_to_u64_le(&block_hash_as_int[0..8]);
163        let k1 = endian::slice_to_u64_le(&block_hash_as_int[8..16]);
164        let writer = GCSFilterWriter::new(writer, k0, k1, M, P);
165        BlockFilterWriter { block, writer }
166    }
167
168    /// Add output scripts of the block - excluding OP_RETURN scripts
169    pub fn add_output_scripts(&mut self) {
170        for transaction in &self.block.txdata {
171            for output in &transaction.output {
172                if !output.script_pubkey.is_op_return() {
173                    self.add_element(output.script_pubkey.as_bytes());
174                }
175            }
176        }
177    }
178
179    /// Add consumed output scripts of a block to filter
180    pub fn add_input_scripts<M>(&mut self, script_for_coin: M) -> Result<(), Error>
181        where M: Fn(&OutPoint) -> Result<Script, Error> {
182        for script in self.block.txdata.iter()
183            .skip(1) // skip coinbase
184            .flat_map(|t| t.input.iter().map(|i| &i.previous_output))
185            .map(script_for_coin) {
186            match script {
187                Ok(script) => self.add_element(script.as_bytes()),
188                Err(e) => return Err(e)
189            }
190        }
191        Ok(())
192    }
193
194    /// Add arbitrary element to a filter
195    pub fn add_element(&mut self, data: &[u8]) {
196        self.writer.add_element(data);
197    }
198
199    /// Write block filter
200    pub fn finish(&mut self) -> Result<usize, io::Error> {
201        self.writer.finish()
202    }
203}
204
205
206/// Reads and interpret a block filter
207pub struct BlockFilterReader {
208    reader: GCSFilterReader
209}
210
211impl BlockFilterReader {
212    /// Create a block filter reader
213    pub fn new(block_hash: &BlockHash) -> BlockFilterReader {
214        let block_hash_as_int = block_hash.into_inner();
215        let k0 = endian::slice_to_u64_le(&block_hash_as_int[0..8]);
216        let k1 = endian::slice_to_u64_le(&block_hash_as_int[8..16]);
217        BlockFilterReader { reader: GCSFilterReader::new(k0, k1, M, P) }
218    }
219
220    /// match any query pattern
221    pub fn match_any(&self, reader: &mut dyn io::Read, query: &mut dyn Iterator<Item=&[u8]>) -> Result<bool, Error> {
222        self.reader.match_any(reader, query)
223    }
224
225    /// match all query pattern
226    pub fn match_all(&self, reader: &mut dyn io::Read, query: &mut dyn Iterator<Item=&[u8]>) -> Result<bool, Error> {
227        self.reader.match_all(reader, query)
228    }
229}
230
231
232/// Golomb-Rice encoded filter reader
233pub struct GCSFilterReader {
234    filter: GCSFilter,
235    m: u64
236}
237
238impl GCSFilterReader {
239    /// Create a new filter reader with specific seed to siphash
240    pub fn new(k0: u64, k1: u64, m: u64, p: u8) -> GCSFilterReader {
241        GCSFilterReader { filter: GCSFilter::new(k0, k1, p), m }
242    }
243
244    /// match any query pattern
245    pub fn match_any(&self, reader: &mut dyn io::Read, query: &mut dyn Iterator<Item=&[u8]>) -> Result<bool, Error> {
246        let mut decoder = reader;
247        let n_elements: VarInt = Decodable::consensus_decode(&mut decoder).unwrap_or(VarInt(0));
248        let reader = &mut decoder;
249        // map hashes to [0, n_elements << grp]
250        let nm = n_elements.0 * self.m;
251        let mut mapped = query.map(|e| map_to_range(self.filter.hash(e), nm)).collect::<Vec<_>>();
252        // sort
253        mapped.sort_unstable();
254        if mapped.is_empty() {
255            return Ok(true);
256        }
257        if n_elements.0 == 0 {
258            return Ok(false);
259        }
260
261        // find first match in two sorted arrays in one read pass
262        let mut reader = BitStreamReader::new(reader);
263        let mut data = self.filter.golomb_rice_decode(&mut reader)?;
264        let mut remaining = n_elements.0 - 1;
265        for p in mapped {
266            loop {
267                match data.cmp(&p) {
268                    Ordering::Equal => return Ok(true),
269                    Ordering::Less => {
270                        if remaining > 0 {
271                            data += self.filter.golomb_rice_decode(&mut reader)?;
272                            remaining -= 1;
273                        } else {
274                            return Ok(false);
275                        }
276                    }
277                    Ordering::Greater => break,
278                }
279            }
280        }
281        Ok(false)
282    }
283
284    /// match all query pattern
285    pub fn match_all(&self, reader: &mut dyn io::Read, query: &mut dyn Iterator<Item=&[u8]>) -> Result<bool, Error> {
286        let mut decoder = reader;
287        let n_elements: VarInt = Decodable::consensus_decode(&mut decoder).unwrap_or(VarInt(0));
288        let reader = &mut decoder;
289        // map hashes to [0, n_elements << grp]
290        let nm = n_elements.0 * self.m;
291        let mut mapped = query.map(|e| map_to_range(self.filter.hash(e), nm)).collect::<Vec<_>>();
292        // sort
293        mapped.sort_unstable();
294        mapped.dedup();
295        if mapped.is_empty() {
296            return Ok(true);
297        }
298        if n_elements.0 == 0 {
299            return Ok(false);
300        }
301
302        // figure if all mapped are there in one read pass
303        let mut reader = BitStreamReader::new(reader);
304        let mut data = self.filter.golomb_rice_decode(&mut reader)?;
305        let mut remaining = n_elements.0 - 1;
306        for p in mapped {
307            loop {
308                match data.cmp(&p) {
309                    Ordering::Equal => break,
310                    Ordering::Less => {
311                        if remaining > 0 {
312                            data += self.filter.golomb_rice_decode(&mut reader)?;
313                            remaining -= 1;
314                        } else {
315                            return Ok(false);
316                        }
317                    },
318                    Ordering::Greater => return Ok(false),
319                }
320            }
321        }
322        Ok(true)
323    }
324}
325
326// fast reduction of hash to [0, nm) range
327fn map_to_range(hash: u64, nm: u64) -> u64 {
328    ((hash as u128 * nm as u128) >> 64) as u64
329}
330
331/// Colomb-Rice encoded filter writer
332pub struct GCSFilterWriter<'a> {
333    filter: GCSFilter,
334    writer: &'a mut dyn io::Write,
335    elements: HashSet<Vec<u8>>,
336    m: u64
337}
338
339impl<'a> GCSFilterWriter<'a> {
340    /// Create a new GCS writer wrapping a generic writer, with specific seed to siphash
341    pub fn new(writer: &'a mut dyn io::Write, k0: u64, k1: u64, m: u64, p: u8) -> GCSFilterWriter<'a> {
342        GCSFilterWriter {
343            filter: GCSFilter::new(k0, k1, p),
344            writer,
345            elements: HashSet::new(),
346            m
347        }
348    }
349
350    /// Add some data to the filter
351    pub fn add_element(&mut self, element: &[u8]) {
352        if !element.is_empty() {
353            self.elements.insert(element.to_vec());
354        }
355    }
356
357    /// write the filter to the wrapped writer
358    pub fn finish(&mut self) -> Result<usize, io::Error> {
359        let nm = self.elements.len() as u64 * self.m;
360
361        // map hashes to [0, n_elements * M)
362        let mut mapped: Vec<_> = self.elements.iter()
363            .map(|e| map_to_range(self.filter.hash(e.as_slice()), nm)).collect();
364        mapped.sort_unstable();
365
366        // write number of elements as varint
367        let mut encoder = Vec::new();
368        VarInt(mapped.len() as u64).consensus_encode(&mut encoder).expect("in-memory writers don't error");
369        let mut wrote = self.writer.write(encoder.as_slice())?;
370
371        // write out deltas of sorted values into a Golonb-Rice coded bit stream
372        let mut writer = BitStreamWriter::new(self.writer);
373        let mut last = 0;
374        for data in mapped {
375            wrote += self.filter.golomb_rice_encode(&mut writer, data - last)?;
376            last = data;
377        }
378        wrote += writer.flush()?;
379        Ok(wrote)
380    }
381}
382
383/// Golomb Coded Set Filter
384struct GCSFilter {
385    k0: u64, // sip hash key
386    k1: u64, // sip hash key
387    p: u8
388}
389
390impl GCSFilter {
391    /// Create a new filter
392    fn new(k0: u64, k1: u64, p: u8) -> GCSFilter {
393        GCSFilter { k0, k1, p }
394    }
395
396    /// Golomb-Rice encode a number n to a bit stream (Parameter 2^k)
397    fn golomb_rice_encode(&self, writer: &mut BitStreamWriter, n: u64) -> Result<usize, io::Error> {
398        let mut wrote = 0;
399        let mut q = n >> self.p;
400        while q > 0 {
401            let nbits = cmp::min(q, 64);
402            wrote += writer.write(!0u64, nbits as u8)?;
403            q -= nbits;
404        }
405        wrote += writer.write(0, 1)?;
406        wrote += writer.write(n, self.p)?;
407        Ok(wrote)
408    }
409
410    /// Golomb-Rice decode a number from a bit stream (Parameter 2^k)
411    fn golomb_rice_decode(&self, reader: &mut BitStreamReader) -> Result<u64, io::Error> {
412        let mut q = 0u64;
413        while reader.read(1)? == 1 {
414            q += 1;
415        }
416        let r = reader.read(self.p)?;
417        Ok((q << self.p) + r)
418    }
419
420    /// Hash an arbitrary slice with siphash using parameters of this filter
421    fn hash(&self, element: &[u8]) -> u64 {
422        siphash24::Hash::hash_to_u64_with_keys(self.k0, self.k1, element)
423    }
424}
425
426/// Bitwise stream reader
427pub struct BitStreamReader<'a> {
428    buffer: [u8; 1],
429    offset: u8,
430    reader: &'a mut dyn io::Read,
431}
432
433impl<'a> BitStreamReader<'a> {
434    /// Create a new BitStreamReader that reads bitwise from a given reader
435    pub fn new(reader: &'a mut dyn io::Read) -> BitStreamReader {
436        BitStreamReader {
437            buffer: [0u8],
438            reader,
439            offset: 8,
440        }
441    }
442
443    /// Read nbit bits
444    pub fn read(&mut self, mut nbits: u8) -> Result<u64, io::Error> {
445        if nbits > 64 {
446            return Err(io::Error::new(io::ErrorKind::Other, "can not read more than 64 bits at once"));
447        }
448        let mut data = 0u64;
449        while nbits > 0 {
450            if self.offset == 8 {
451                self.reader.read_exact(&mut self.buffer)?;
452                self.offset = 0;
453            }
454            let bits = cmp::min(8 - self.offset, nbits);
455            data <<= bits;
456            data |= ((self.buffer[0] << self.offset) >> (8 - bits)) as u64;
457            self.offset += bits;
458            nbits -= bits;
459        }
460        Ok(data)
461    }
462}
463
464/// Bitwise stream writer
465pub struct BitStreamWriter<'a> {
466    buffer: [u8; 1],
467    offset: u8,
468    writer: &'a mut dyn io::Write,
469}
470
471impl<'a> BitStreamWriter<'a> {
472    /// Create a new BitStreamWriter that writes bitwise to a given writer
473    pub fn new(writer: &'a mut dyn io::Write) -> BitStreamWriter {
474        BitStreamWriter {
475            buffer: [0u8],
476            writer,
477            offset: 0,
478        }
479    }
480
481    /// Write nbits bits from data
482    pub fn write(&mut self, data: u64, mut nbits: u8) -> Result<usize, io::Error> {
483        if nbits > 64 {
484            return Err(io::Error::new(io::ErrorKind::Other, "can not write more than 64 bits at once"));
485        }
486        let mut wrote = 0;
487        while nbits > 0 {
488            let bits = cmp::min(8 - self.offset, nbits);
489            self.buffer[0] |= ((data << (64 - nbits)) >> (64 - 8 + self.offset)) as u8;
490            self.offset += bits;
491            nbits -= bits;
492            if self.offset == 8 {
493                wrote += self.flush()?;
494            }
495        }
496        Ok(wrote)
497    }
498
499    /// flush bits not yet written
500    pub fn flush(&mut self) -> Result<usize, io::Error> {
501        if self.offset > 0 {
502            self.writer.write_all(&self.buffer)?;
503            self.buffer[0] = 0u8;
504            self.offset = 0;
505            Ok(1)
506        } else {
507            Ok(0)
508        }
509    }
510}
511
512#[cfg(test)]
513mod test {
514    use io::Cursor;
515
516    use hash_types::BlockHash;
517    use hashes::hex::FromHex;
518
519    use super::*;
520
521    extern crate serde_json;
522    use self::serde_json::Value;
523
524    use consensus::encode::deserialize;
525    use std::collections::HashMap;
526
527    #[test]
528    fn test_blockfilters() {
529
530        // test vectors from: https://github.com/jimpo/bitcoin/blob/c7efb652f3543b001b4dd22186a354605b14f47e/src/test/data/blockfilters.json
531        let data = include_str!("../../test_data/blockfilters.json");
532
533        let testdata = serde_json::from_str::<Value>(data).unwrap().as_array().unwrap().clone();
534        for t in testdata.iter().skip(1) {
535            let block_hash = BlockHash::from_hex(&t.get(1).unwrap().as_str().unwrap()).unwrap();
536            let block: Block = deserialize(&Vec::from_hex(&t.get(2).unwrap().as_str().unwrap()).unwrap()).unwrap();
537            assert_eq!(block.block_hash(), block_hash);
538            let scripts = t.get(3).unwrap().as_array().unwrap();
539            let previous_filter_header = FilterHeader::from_hex(&t.get(4).unwrap().as_str().unwrap()).unwrap();
540            let filter_content = Vec::from_hex(&t.get(5).unwrap().as_str().unwrap()).unwrap();
541            let filter_header = FilterHeader::from_hex(&t.get(6).unwrap().as_str().unwrap()).unwrap();
542
543            let mut txmap = HashMap::new();
544            let mut si = scripts.iter();
545            for tx in block.txdata.iter().skip(1) {
546                for input in tx.input.iter() {
547                    txmap.insert(input.previous_output.clone(), Script::from(Vec::from_hex(si.next().unwrap().as_str().unwrap()).unwrap()));
548                }
549            }
550
551            let filter = BlockFilter::new_script_filter(&block,
552                                        |o| if let Some(s) = txmap.get(o) {
553                                            Ok(s.clone())
554                                        } else {
555                                            Err(Error::UtxoMissing(o.clone()))
556                                        }).unwrap();
557
558            let test_filter = BlockFilter::new(filter_content.as_slice());
559
560            assert_eq!(test_filter.content, filter.content);
561
562            let block_hash = &block.block_hash();
563            assert!(filter.match_all(block_hash, &mut txmap.iter()
564                .filter_map(|(_, s)| if !s.is_empty() { Some(s.as_bytes()) } else { None })).unwrap());
565
566            for (_, script) in &txmap {
567                let query = vec![script];
568                if !script.is_empty () {
569                    assert!(filter.match_any(&block_hash, &mut query.iter()
570                        .map(|s| s.as_bytes())).unwrap());
571                }
572            }
573
574            assert_eq!(filter_header, filter.filter_header(&previous_filter_header));
575        }
576    }
577
578    #[test]
579    fn test_filter() {
580        let mut patterns = HashSet::new();
581
582        patterns.insert(Vec::from_hex("000000").unwrap());
583        patterns.insert(Vec::from_hex("111111").unwrap());
584        patterns.insert(Vec::from_hex("222222").unwrap());
585        patterns.insert(Vec::from_hex("333333").unwrap());
586        patterns.insert(Vec::from_hex("444444").unwrap());
587        patterns.insert(Vec::from_hex("555555").unwrap());
588        patterns.insert(Vec::from_hex("666666").unwrap());
589        patterns.insert(Vec::from_hex("777777").unwrap());
590        patterns.insert(Vec::from_hex("888888").unwrap());
591        patterns.insert(Vec::from_hex("999999").unwrap());
592        patterns.insert(Vec::from_hex("aaaaaa").unwrap());
593        patterns.insert(Vec::from_hex("bbbbbb").unwrap());
594        patterns.insert(Vec::from_hex("cccccc").unwrap());
595        patterns.insert(Vec::from_hex("dddddd").unwrap());
596        patterns.insert(Vec::from_hex("eeeeee").unwrap());
597        patterns.insert(Vec::from_hex("ffffff").unwrap());
598
599        let mut out = Vec::new();
600        {
601            let mut writer = GCSFilterWriter::new(&mut out, 0, 0, M, P);
602            for p in &patterns {
603                writer.add_element(p.as_slice());
604            }
605            writer.finish().unwrap();
606        }
607
608        let bytes = out;
609
610        {
611            let mut query = Vec::new();
612            query.push(Vec::from_hex("abcdef").unwrap());
613            query.push(Vec::from_hex("eeeeee").unwrap());
614
615            let reader = GCSFilterReader::new(0, 0, M, P);
616            let mut input = Cursor::new(bytes.clone());
617            assert!(reader.match_any(&mut input, &mut query.iter().map(|v| v.as_slice())).unwrap());
618        }
619        {
620            let mut query = Vec::new();
621            query.push(Vec::from_hex("abcdef").unwrap());
622            query.push(Vec::from_hex("123456").unwrap());
623
624            let reader = GCSFilterReader::new(0, 0, M, P);
625            let mut input = Cursor::new(bytes.clone());
626            assert!(!reader.match_any(&mut input, &mut query.iter().map(|v| v.as_slice())).unwrap());
627        }
628        {
629            let reader = GCSFilterReader::new(0, 0, M, P);
630            let mut query = Vec::new();
631            for p in &patterns {
632                query.push(p.clone());
633            }
634            let mut input = Cursor::new(bytes.clone());
635            assert!(reader.match_all(&mut input, &mut query.iter().map(|v| v.as_slice())).unwrap());
636        }
637        {
638            let reader = GCSFilterReader::new(0, 0, M, P);
639            let mut query = Vec::new();
640            for p in &patterns {
641                query.push(p.clone());
642            }
643            query.push(Vec::from_hex("abcdef").unwrap());
644            let mut input = Cursor::new(bytes.clone());
645            assert!(!reader.match_all(&mut input, &mut query.iter().map(|v| v.as_slice())).unwrap());
646        }
647    }
648
649    #[test]
650    fn test_bit_stream() {
651        let mut out = Vec::new();
652        {
653            let mut writer = BitStreamWriter::new(&mut out);
654            writer.write(0, 1).unwrap(); // 0
655            writer.write(2, 2).unwrap(); // 10
656            writer.write(6, 3).unwrap(); // 110
657            writer.write(11, 4).unwrap(); // 1011
658            writer.write(1, 5).unwrap(); // 00001
659            writer.write(32, 6).unwrap(); // 100000
660            writer.write(7, 7).unwrap(); // 0000111
661            writer.flush().unwrap();
662        }
663        let bytes = out;
664        assert_eq!("01011010110000110000000001110000", format!("{:08b}{:08b}{:08b}{:08b}", bytes[0], bytes[1], bytes[2], bytes[3]));
665        {
666            let mut input = Cursor::new(bytes);
667            let mut reader = BitStreamReader::new(&mut input);
668            assert_eq!(reader.read(1).unwrap(), 0);
669            assert_eq!(reader.read(2).unwrap(), 2);
670            assert_eq!(reader.read(3).unwrap(), 6);
671            assert_eq!(reader.read(4).unwrap(), 11);
672            assert_eq!(reader.read(5).unwrap(), 1);
673            assert_eq!(reader.read(6).unwrap(), 32);
674            assert_eq!(reader.read(7).unwrap(), 7);
675            // 4 bits remained
676            assert!(reader.read(5).is_err());
677        }
678    }
679}