Struct jomini::binary::TokenReader
source · pub struct TokenReader<R> { /* private fields */ }
Expand description
Lexer that works over a Read implementation
Example of computing the max nesting depth using a TokenReader.
use jomini::binary::{TokenReader, Token};
let data = [0x2d, 0x28, 0x01, 0x00, 0x03, 0x00, 0x03, 0x00, 0x04, 0x00, 0x04, 0x00];
let mut reader = TokenReader::new(&data[..]);
let mut max_depth = 0;
let mut current_depth = 0;
while let Some(token) = reader.next()? {
match token {
Token::Open => {
current_depth += 1;
max_depth = max_depth.max(current_depth);
}
Token::Close => current_depth -= 1,
_ => {}
}
}
assert_eq!(max_depth, 2);
Unlike a BinaryTape, which will skip ghost objects, pair open and close tokens together, and recognize if a container is an object, array, or mixed – the tokens yielded from a TokenReader are not fully formed. This is a much more raw view of the data that can be used to construct higher level parsers, melters, and deserializers that operate over a stream of data.
TokenReader operates over a fixed size buffer, so using a BufRead affords no benefits. An error will be returned for tokens that are impossible to fit within the buffer (eg: if the provided with 100 byte buffer but there is a binary string that is 101 bytes long).
Implementations§
source§impl TokenReader<()>
impl TokenReader<()>
sourcepub fn from_slice(data: &[u8]) -> TokenReader<&[u8]>
pub fn from_slice(data: &[u8]) -> TokenReader<&[u8]>
Read from a byte slice without memcpy’s
source§impl<R> TokenReader<R>where
R: Read,
impl<R> TokenReader<R>where
R: Read,
sourcepub fn position(&self) -> usize
pub fn position(&self) -> usize
Returns the byte position of the data stream that has been processed.
use jomini::binary::{TokenReader, Token};
let mut reader = TokenReader::new(&[0xd2, 0x28, 0xff][..]);
assert_eq!(reader.read().unwrap(), Token::Id(0x28d2));
assert_eq!(reader.position(), 2);
sourcepub fn read_bytes(&mut self, bytes: usize) -> Result<&[u8], ReaderError>
pub fn read_bytes(&mut self, bytes: usize) -> Result<&[u8], ReaderError>
Advance a given number of bytes and return them.
The internal buffer must be large enough to accomodate all bytes.
use jomini::binary::{TokenReader, LexError, ReaderErrorKind};
let mut reader = TokenReader::new(&b"EU4bin"[..]);
assert_eq!(reader.read_bytes(6).unwrap(), &b"EU4bin"[..]);
assert!(matches!(reader.read_bytes(1).unwrap_err().kind(), ReaderErrorKind::Lexer(LexError::Eof)));
sourcepub fn skip_container(&mut self) -> Result<(), ReaderError>
pub fn skip_container(&mut self) -> Result<(), ReaderError>
Advance through the containing block until the closing token is consumed
use jomini::binary::{TokenReader, Token};
let mut reader = TokenReader::new(&[
0xd2, 0x28, 0x01, 0x00, 0x03, 0x00, 0x03, 0x00,
0x04, 0x00, 0x04, 0x00, 0xff, 0xff
][..]);
assert_eq!(reader.read().unwrap(), Token::Id(0x28d2));
assert_eq!(reader.read().unwrap(), Token::Equal);
assert_eq!(reader.read().unwrap(), Token::Open);
assert!(reader.skip_container().is_ok());
assert_eq!(reader.read().unwrap(), Token::Id(0xffff));
sourcepub fn into_parts(self) -> (Box<[u8]>, R)
pub fn into_parts(self) -> (Box<[u8]>, R)
Consume the token reader and return the internal buffer and reader. This allows the buffer to be reused.
use jomini::binary::TokenReader;
let data = b"EU4bin";
let mut reader = TokenReader::new(&data[..]);
assert_eq!(reader.read_bytes(6).unwrap(), &data[..]);
let (buf, _) = reader.into_parts();
let data = b"HOI4bin";
let mut reader = TokenReader::builder().buffer(buf).build(&data[..]);
assert_eq!(reader.read_bytes(7).unwrap(), &data[..]);
sourcepub fn read(&mut self) -> Result<Token<'_>, ReaderError>
pub fn read(&mut self) -> Result<Token<'_>, ReaderError>
Read the next token in the stream. Will error if not enough data remains to decode a token.
use jomini::binary::{TokenReader, Token, ReaderErrorKind, LexError};
let mut reader = TokenReader::new(&[
0xd2, 0x28, 0x01, 0x00, 0x03, 0x00, 0x04, 0x00
][..]);
assert_eq!(reader.read().unwrap(), Token::Id(0x28d2));
assert_eq!(reader.read().unwrap(), Token::Equal);
assert_eq!(reader.read().unwrap(), Token::Open);
assert_eq!(reader.read().unwrap(), Token::Close);
assert!(matches!(reader.read().unwrap_err().kind(), ReaderErrorKind::Lexer(LexError::Eof)));
sourcepub fn next(&mut self) -> Result<Option<Token<'_>>, ReaderError>
pub fn next(&mut self) -> Result<Option<Token<'_>>, ReaderError>
Read a token, returning none when all the data has been consumed
use jomini::binary::{TokenReader, Token};
let mut reader = TokenReader::new(&[
0xd2, 0x28, 0x01, 0x00, 0x03, 0x00, 0x04, 0x00
][..]);
assert_eq!(reader.next().unwrap(), Some(Token::Id(0x28d2)));
assert_eq!(reader.next().unwrap(), Some(Token::Equal));
assert_eq!(reader.next().unwrap(), Some(Token::Open));
assert_eq!(reader.next().unwrap(), Some(Token::Close));
assert_eq!(reader.next().unwrap(), None);
source§impl TokenReader<()>
impl TokenReader<()>
sourcepub fn builder() -> TokenReaderBuilder
pub fn builder() -> TokenReaderBuilder
Initializes a default TokenReaderBuilder