Trait vlqencoding::VLQDecodeAt
source · pub trait VLQDecodeAt<T> {
fn read_vlq_at(&self, offset: usize) -> Result<(T, usize)>;
}
Required Methods§
sourcefn read_vlq_at(&self, offset: usize) -> Result<(T, usize)>
fn read_vlq_at(&self, offset: usize) -> Result<(T, usize)>
Read a VLQ byte array from the given offset and decode it to an integer.
Returns Ok((decoded_integer, bytes_read))
on success.
This is similar to VLQDecode::read_vlq
. It’s for immutable AsRef<[u8]>
instead of
a mutable io::Read
object.
Examples
use vlqencoding::VLQDecodeAt;
use std::io::ErrorKind;
let c = &[120u8, 211, 171, 202, 220, 84, 255];
let x: Result<(u8, _), _> = c.read_vlq_at(0);
assert_eq!(x.unwrap(), (120u8, 1));
let x: Result<(u64, _), _> = c.read_vlq_at(1);
assert_eq!(x.unwrap(), (22742734291u64, 5));
let x: Result<(u64, _), _> = c.read_vlq_at(6);
assert_eq!(x.unwrap_err().kind(), ::std::io::ErrorKind::InvalidData);
let x: Result<(u64, _), _> = c.read_vlq_at(7);
assert_eq!(x.unwrap_err().kind(), ::std::io::ErrorKind::InvalidData);