use crate::tokens::{Token, TokenPacker, Tokenizer};
use anyhow::{Error, Result};
use std::convert::From;
use std::fmt;
use std::hash::Hash;
#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Byte(u8);
impl std::fmt::Display for Byte {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:#04x}", &self.0)
}
}
impl Token for Byte {
type Tokenizer = ByteTokenizer;
type Packer = BytePacker;
fn bit_count(&self) -> usize {
8
}
}
impl From<u8> for Byte {
fn from(data: u8) -> Self {
Self(data)
}
}
pub struct ByteTokenizer;
impl Tokenizer for ByteTokenizer {
type T = Byte;
type Iter<R: std::io::Read> = ByteIter<R>;
fn tokenize<R: std::io::Read>(r: R) -> Result<Self::Iter<R>> {
Ok(ByteIter(r))
}
}
#[derive(Clone, Debug)]
pub struct ByteIter<R: std::io::Read>(R);
impl<R: std::io::Read> std::iter::Iterator for ByteIter<R> {
type Item = Result<Byte>;
fn next(&mut self) -> Option<Self::Item> {
let mut buf: [u8; 1] = [0; 1];
match self.0.read(&mut buf[..]) {
Err(e) => Some(Err(Error::new(e))),
Ok(0) => None,
Ok(1) => Some(Ok(Byte(buf[0]))),
Ok(l) => panic!("read {} bytes in 1 byte buffer", l),
}
}
}
#[derive(Clone, Debug, Default)]
pub struct BytePacker;
impl TokenPacker for BytePacker {
type T = Byte;
fn pack<I, W: std::io::Write>(i: I, mut w: W) -> Result<()>
where
I: std::iter::Iterator<Item = Self::T>,
{
let mut buf: [u8; 1] = [0; 1];
for b in i {
buf[0] = b.0;
if let Err(e) = w.write_all(&buf[..]) {
return Err(Error::new(e));
}
}
w.flush()?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
const TEXT: &str = "
Ah! well a-day! what evil looks
Had I from old and young!
Instead of the cross, the Albatross
About my neck was hung.
";
#[test]
fn roundtrip() {
let mut r = Cursor::new(TEXT);
let d = ByteTokenizer::tokenize(&mut r).unwrap();
let i = d.map(|t| t.unwrap());
let mut wc: Cursor<Vec<u8>> = Cursor::new(vec![]);
BytePacker::pack(i, &mut wc).unwrap();
let got = std::str::from_utf8(&wc.get_ref()[..]).unwrap();
assert_eq!(got, TEXT);
}
}