Struct jomini::text::TokenReader

source ·
pub struct TokenReader<R> { /* private fields */ }
Expand description

Scan a Read implementation for text Tokens

Example of computing the max nesting depth using a TokenReader.

use jomini::text::{TokenReader, Token};
let data = b"foo={{ id=3 } {} { id = 4 }}";
let mut reader = TokenReader::new(&data[..]);
let mut max_depth = 0;
let mut current_depth = 0;
while let Some(token) = reader.next()? {
  match token {
    Token::Open => {
      current_depth += 1;
      max_depth = max_depth.max(current_depth);
    }
    Token::Close => current_depth -= 1,
    _ => {}
  }
}
assert_eq!(max_depth, 2);

Unlike a TextTape, which will skip ghost objects, pair open and close tokens together, and recognize if a container is an object, array, or mixed – the tokens yielded from a TokenReader are not fully formed. This is a much more raw view of the data that can be used to construct higher level parsers and deserializers that operate over a stream of data.

The TokenReader is considered experimental, as it uses a different parsing algorithm geared towards parsing large save files. Ergonomic equivalents for more esoteric game syntax (like parameter definitions) have not yet been finalized. Game files can still be parsed with the experimental APIs, but these APIs may change in the future based on feedback. Since the binary format is not used for game files, the binary::TokenReader is not considered experimental)

TokenReader operates over a fixed size buffer, so using a BufRead affords no benefits. An error will be returned for tokens that are impossible to fit within the buffer (eg: if the provided with 100 byte buffer but there is a binary string that is 101 bytes long).

Implementations§

source§

impl TokenReader<()>

source

pub fn from_slice(data: &[u8]) -> TokenReader<&[u8]>

Read from a byte slice without memcpy’s

source§

impl<R> TokenReader<R>
where R: Read,

source

pub fn new(reader: R) -> Self

Create a new text reader

source

pub fn position(&self) -> usize

Returns the byte position of the data stream that has been processed.

use jomini::{Scalar, text::{TokenReader, Token}};
let mut reader = TokenReader::new(&b"date=1444.11.11"[..]);
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"date")));
assert_eq!(reader.position(), 4);
source

pub fn read_bytes(&mut self, bytes: usize) -> Result<&[u8], ReaderError>

Advance a given number of bytes and return them.

The internal buffer must be large enough to accomodate all bytes.

use jomini::text::{TokenReader, ReaderErrorKind};
let mut reader = TokenReader::new(&b"EU4txt"[..]);
assert_eq!(reader.read_bytes(6).unwrap(), &b"EU4txt"[..]);
assert!(matches!(reader.read_bytes(1).unwrap_err().kind(), ReaderErrorKind::Eof));
source

pub fn skip_container(&mut self) -> Result<(), ReaderError>

Advance through the containing block until the closing token is consumed

use jomini::{Scalar, text::{TokenReader, Token, Operator}};
let mut reader = TokenReader::new(&b"foo={{bar={}}} qux=1"[..]);
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"foo")));
assert_eq!(reader.read().unwrap(), Token::Operator(Operator::Equal));
assert_eq!(reader.read().unwrap(), Token::Open);
assert!(reader.skip_container().is_ok());
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"qux")));
assert_eq!(reader.read().unwrap(), Token::Operator(Operator::Equal));
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"1")));
source

pub fn skip_unquoted_value(&mut self) -> Result<(), ReaderError>

Skip any trailing data associated with the unquoted value. Useful for skipping an unquoted value that may be serving as a header.

In the below example the rgb { 1 2 3 } will first be parsed as unquoted rgb, but the { 1 2 3 } needs to be skipped as well as it is tied to rgb.

use jomini::{Scalar, text::{TokenReader, Token, Operator}};
let mut reader = TokenReader::new(&b"color = rgb { 1 2 3 }  foo=bar"[..]);
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"color")));
assert_eq!(reader.read().unwrap(), Token::Operator(Operator::Equal));
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"rgb")));
assert!(reader.skip_unquoted_value().is_ok());
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"foo")));
assert_eq!(reader.read().unwrap(), Token::Operator(Operator::Equal));
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"bar")));
source

pub fn into_parts(self) -> (Box<[u8]>, R)

Consume the token reader and return the internal buffer and reader. This allows the buffer to be reused.

use jomini::text::{TokenReader};
let data = b"EU4txt";
let mut reader = TokenReader::new(&data[..]);
assert_eq!(reader.read_bytes(6).unwrap(), &data[..]);

let (buf, _) = reader.into_parts();
let data = b"HOI4txt";
let mut reader = TokenReader::builder().buffer(buf).build(&data[..]);
assert_eq!(reader.read_bytes(7).unwrap(), &data[..]);
source

pub fn read(&mut self) -> Result<Token<'_>, ReaderError>

Read the next token in the stream. Will error if not enough data remains to decode a token.

use jomini::{Scalar, text::{TokenReader, Token, ReaderErrorKind, Operator}};
let mut reader = TokenReader::new(&b"date=1444.11.11"[..]);
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"date")));
assert_eq!(reader.read().unwrap(), Token::Operator(Operator::Equal));
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"1444.11.11")));
assert!(matches!(reader.read().unwrap_err().kind(), ReaderErrorKind::Eof));
source

pub fn next(&mut self) -> Result<Option<Token<'_>>, ReaderError>

Read a token, returning none when all the data has been consumed

use jomini::{Scalar, text::{TokenReader, Token, Operator}};
let mut reader = TokenReader::new(&b"date=1444.11.11"[..]);
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"date")));
assert_eq!(reader.read().unwrap(), Token::Operator(Operator::Equal));
assert_eq!(reader.read().unwrap(), Token::Unquoted(Scalar::new(b"1444.11.11")));
assert_eq!(reader.next().unwrap(), None);
source§

impl TokenReader<()>

source

pub fn builder() -> TokenReaderBuilder

Initializes a default TokenReaderBuilder

Trait Implementations§

source§

impl<R: Debug> Debug for TokenReader<R>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<R> Freeze for TokenReader<R>
where R: Freeze,

§

impl<R> RefUnwindSafe for TokenReader<R>
where R: RefUnwindSafe,

§

impl<R> !Send for TokenReader<R>

§

impl<R> !Sync for TokenReader<R>

§

impl<R> Unpin for TokenReader<R>
where R: Unpin,

§

impl<R> UnwindSafe for TokenReader<R>
where R: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

source§

impl<T, U> Into<U> for T
where U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.