use std::{
borrow::Cow,
collections::{BTreeMap, HashMap},
fmt,
io::{self, prelude::*, Cursor},
sync::Arc,
};
use binrw::prelude::*;
use crc32fast::hash as crc32;
use enumset::{enum_set, EnumSet, EnumSetType};
use log::*;
use crate::{
io_utils::CountingCrcReader,
records::{self, op, Record},
Attachment, Channel, McapError, McapResult, Message, Schema, MAGIC,
};
#[derive(EnumSetType, Debug)]
pub enum Options {
IgnoreEndMagic,
}
pub struct LinearReader<'a> {
buf: &'a [u8],
malformed: bool,
}
impl<'a> LinearReader<'a> {
pub fn new(buf: &'a [u8]) -> McapResult<Self> {
Self::new_with_options(buf, enum_set!())
}
pub fn new_with_options(buf: &'a [u8], options: EnumSet<Options>) -> McapResult<Self> {
if !buf.starts_with(MAGIC)
|| (!options.contains(Options::IgnoreEndMagic)
&& (!buf.ends_with(MAGIC) || buf.len() < 2 * MAGIC.len()))
{
return Err(McapError::BadMagic);
}
let buf = &buf[MAGIC.len()..];
if buf.ends_with(MAGIC) {
Ok(Self::sans_magic(&buf[0..buf.len() - MAGIC.len()]))
} else {
Ok(Self::sans_magic(buf))
}
}
pub fn sans_magic(buf: &'a [u8]) -> Self {
Self {
buf,
malformed: false,
}
}
fn bytes_remaining(&self) -> usize {
self.buf.len()
}
}
impl<'a> Iterator for LinearReader<'a> {
type Item = McapResult<records::Record<'a>>;
fn next(&mut self) -> Option<Self::Item> {
if self.buf.is_empty() {
return None;
}
if self.malformed {
return None;
}
let record = match read_record_from_slice(&mut self.buf) {
Ok(k) => k,
Err(e) => {
self.malformed = true;
return Some(Err(e));
}
};
Some(Ok(record))
}
}
fn read_record_from_slice<'a>(buf: &mut &'a [u8]) -> McapResult<records::Record<'a>> {
if buf.len() < 5 {
warn!("Malformed MCAP - not enough space for record + length!");
return Err(McapError::UnexpectedEof);
}
let op = read_u8(buf);
let len = read_u64(buf);
if buf.len() < len as usize {
warn!(
"Malformed MCAP - record with length {len}, but only {} bytes remain",
buf.len()
);
return Err(McapError::UnexpectedEof);
}
let body = &buf[..len as usize];
debug!("slice: opcode {op:02X}, length {len}");
let record = read_record(op, body)?;
trace!(" {:?}", record);
*buf = &buf[len as usize..];
Ok(record)
}
fn read_record(op: u8, body: &[u8]) -> McapResult<records::Record<'_>> {
macro_rules! record {
($b:ident) => {{
let mut cur = Cursor::new($b);
let res = cur.read_le()?;
assert_eq!($b.len() as u64, cur.position());
res
}};
}
Ok(match op {
op::HEADER => Record::Header(record!(body)),
op::FOOTER => Record::Footer(record!(body)),
op::SCHEMA => {
let mut c = Cursor::new(body);
let header: records::SchemaHeader = c.read_le()?;
let data = Cow::Borrowed(&body[c.position() as usize..]);
if header.data_len != data.len() as u32 {
warn!(
"Schema {}'s data length doesn't match the total schema length",
header.name
);
}
Record::Schema { header, data }
}
op::CHANNEL => Record::Channel(record!(body)),
op::MESSAGE => {
let mut c = Cursor::new(body);
let header = c.read_le()?;
let data = Cow::Borrowed(&body[c.position() as usize..]);
Record::Message { header, data }
}
op::CHUNK => {
let mut c = Cursor::new(body);
let header: records::ChunkHeader = c.read_le()?;
let data = &body[c.position() as usize..];
if header.compressed_size != data.len() as u64 {
warn!("Chunk's compressed length doesn't match its header");
}
Record::Chunk { header, data }
}
op::MESSAGE_INDEX => Record::MessageIndex(record!(body)),
op::CHUNK_INDEX => Record::ChunkIndex(record!(body)),
op::ATTACHMENT => {
let mut c = Cursor::new(body);
let header: records::AttachmentHeader = c.read_le()?;
let data = &body[c.position() as usize..body.len() - 4];
if header.data_len != data.len() as u64 {
warn!(
"Attachment {}'s data length doesn't match the total schema length",
header.name
);
}
let crc = Cursor::new(&body[body.len() - 4..]).read_le()?;
if crc != 0 {
let calculated = crc32(&body[..body.len() - 4]);
if crc != calculated {
return Err(McapError::BadAttachmentCrc {
saved: crc,
calculated,
});
}
}
Record::Attachment { header, data }
}
op::ATTACHMENT_INDEX => Record::AttachmentIndex(record!(body)),
op::STATISTICS => Record::Statistics(record!(body)),
op::METADATA => Record::Metadata(record!(body)),
op::METADATA_INDEX => Record::MetadataIndex(record!(body)),
op::SUMMARY_OFFSET => Record::SummaryOffset(record!(body)),
op::END_OF_DATA => Record::EndOfData(record!(body)),
opcode => Record::Unknown {
opcode,
data: Cow::Borrowed(body),
},
})
}
enum ChunkDecompressor<'a> {
Null(LinearReader<'a>),
Compressed(Option<CountingCrcReader<Box<dyn Read + Send + 'a>>>),
}
pub struct ChunkReader<'a> {
header: records::ChunkHeader,
decompressor: ChunkDecompressor<'a>,
}
impl<'a> ChunkReader<'a> {
pub fn new(header: records::ChunkHeader, data: &'a [u8]) -> McapResult<Self> {
let decompressor = match header.compression.as_str() {
"zstd" => ChunkDecompressor::Compressed(Some(CountingCrcReader::new(Box::new(
zstd::Decoder::new(data)?,
)))),
"lz4" => ChunkDecompressor::Compressed(Some(CountingCrcReader::new(Box::new(
lz4::Decoder::new(data)?,
)))),
"" => {
if header.uncompressed_size != header.compressed_size {
warn!(
"Chunk is uncompressed, but claims different compress/uncompressed lengths"
);
}
if header.uncompressed_crc != 0 {
let calculated = crc32(data);
if header.uncompressed_crc != calculated {
return Err(McapError::BadChunkCrc {
saved: header.uncompressed_crc,
calculated,
});
}
}
ChunkDecompressor::Null(LinearReader::sans_magic(data))
}
wat => return Err(McapError::UnsupportedCompression(wat.to_string())),
};
Ok(Self {
header,
decompressor,
})
}
}
impl<'a> Iterator for ChunkReader<'a> {
type Item = McapResult<records::Record<'a>>;
fn next(&mut self) -> Option<Self::Item> {
match &mut self.decompressor {
ChunkDecompressor::Null(r) => r.next(),
ChunkDecompressor::Compressed(stream) => {
if stream.is_none() {
return None;
}
let s = stream.as_mut().unwrap();
let record = match read_record_from_chunk_stream(s) {
Ok(k) => k,
Err(e) => {
*stream = None; return Some(Err(e));
}
};
if s.position() >= self.header.uncompressed_size {
let calculated = stream.take().unwrap().finalize();
if self.header.uncompressed_crc != 0
&& self.header.uncompressed_crc != calculated
{
return Some(Err(McapError::BadChunkCrc {
saved: self.header.uncompressed_crc,
calculated,
}));
}
}
Some(Ok(record))
}
}
}
}
fn read_record_from_chunk_stream<'a, R: Read>(r: &mut R) -> McapResult<records::Record<'a>> {
use byteorder::{ReadBytesExt, LE};
let op = r.read_u8()?;
let len = r.read_u64::<LE>()?;
debug!("chunk: opcode {op:02X}, length {len}");
let record = match op {
op::SCHEMA => {
let mut record = Vec::new();
r.take(len).read_to_end(&mut record)?;
if len as usize != record.len() {
return Err(McapError::UnexpectedEoc);
}
let mut c = Cursor::new(&record);
let header: records::SchemaHeader = c.read_le()?;
let header_end = c.position();
let data = record.split_off(header_end as usize);
if header.data_len as usize != data.len() {
warn!(
"Schema {}'s data length doesn't match the total schema length",
header.name
);
}
Record::Schema {
header,
data: Cow::Owned(data),
}
}
op::CHANNEL => {
let mut record = Vec::new();
r.take(len).read_to_end(&mut record)?;
if len as usize != record.len() {
return Err(McapError::UnexpectedEoc);
}
let mut c = Cursor::new(&record);
let channel: records::Channel = c.read_le()?;
if c.position() != record.len() as u64 {
warn!(
"Channel {}'s length doesn't match its record length",
channel.topic
);
}
Record::Channel(channel)
}
op::MESSAGE => {
const HEADER_LEN: u64 = 22;
let mut header_buf = Vec::new();
r.take(HEADER_LEN).read_to_end(&mut header_buf)?;
if header_buf.len() as u64 != HEADER_LEN {
return Err(McapError::UnexpectedEoc);
}
let header: records::MessageHeader = Cursor::new(header_buf).read_le()?;
let mut data = Vec::new();
r.take(len - HEADER_LEN).read_to_end(&mut data)?;
if data.len() as u64 != len - HEADER_LEN {
return Err(McapError::UnexpectedEoc);
}
Record::Message {
header,
data: Cow::Owned(data),
}
}
wut => return Err(McapError::UnexpectedChunkRecord(wut)),
};
trace!(" {:?}", record);
Ok(record)
}
pub struct ChunkFlattener<'a> {
top_level: LinearReader<'a>,
dechunk: Option<ChunkReader<'a>>,
malformed: bool,
}
impl<'a> ChunkFlattener<'a> {
pub fn new(buf: &'a [u8]) -> McapResult<Self> {
Self::new_with_options(buf, enum_set!())
}
pub fn new_with_options(buf: &'a [u8], options: EnumSet<Options>) -> McapResult<Self> {
let top_level = LinearReader::new_with_options(buf, options)?;
Ok(Self {
top_level,
dechunk: None,
malformed: false,
})
}
fn bytes_remaining(&self) -> usize {
self.top_level.bytes_remaining()
}
}
impl<'a> Iterator for ChunkFlattener<'a> {
type Item = McapResult<records::Record<'a>>;
fn next(&mut self) -> Option<Self::Item> {
if self.malformed {
return None;
}
let n: Option<Self::Item> = loop {
if let Some(d) = &mut self.dechunk {
match d.next() {
Some(d) => break Some(d),
None => self.dechunk = None,
}
}
match self.top_level.next() {
Some(Ok(Record::Chunk { header, data })) => {
self.dechunk = match ChunkReader::new(header, data) {
Ok(d) => Some(d),
Err(e) => break Some(Err(e)),
};
}
not_a_chunk => break not_a_chunk,
}
};
if matches!(n, Some(Err(_))) {
self.malformed = true;
}
n
}
}
#[derive(Debug, Default)]
struct ChannelAccumulator<'a> {
schemas: HashMap<u16, Arc<Schema<'a>>>,
channels: HashMap<u16, Arc<Channel<'a>>>,
}
impl<'a> ChannelAccumulator<'a> {
fn add_schema(&mut self, header: records::SchemaHeader, data: Cow<'a, [u8]>) -> McapResult<()> {
if header.id == 0 {
return Err(McapError::InvalidSchemaId);
}
let schema = Arc::new(Schema {
name: header.name.clone(),
encoding: header.encoding,
data,
});
if let Some(preexisting) = self.schemas.insert(header.id, schema.clone()) {
if schema != preexisting {
return Err(McapError::ConflictingSchemas(header.name));
}
}
Ok(())
}
fn add_channel(&mut self, chan: records::Channel) -> McapResult<()> {
let schema = if chan.schema_id == 0 {
None
} else {
match self.schemas.get(&chan.schema_id) {
Some(s) => Some(s.clone()),
None => {
return Err(McapError::UnknownSchema(chan.topic, chan.schema_id));
}
}
};
let channel = Arc::new(Channel {
topic: chan.topic.clone(),
schema,
message_encoding: chan.message_encoding,
metadata: chan.metadata,
});
if let Some(preexisting) = self.channels.insert(chan.id, channel.clone()) {
if preexisting != channel {
return Err(McapError::ConflictingChannels(chan.topic));
}
}
Ok(())
}
fn get(&self, chan_id: u16) -> Option<Arc<Channel<'a>>> {
self.channels.get(&chan_id).cloned()
}
}
pub struct MessageStream<'a> {
full_file: &'a [u8],
records: ChunkFlattener<'a>,
done: bool,
channeler: ChannelAccumulator<'static>,
}
impl<'a> MessageStream<'a> {
pub fn new(buf: &'a [u8]) -> McapResult<Self> {
Self::new_with_options(buf, enum_set!())
}
pub fn new_with_options(buf: &'a [u8], options: EnumSet<Options>) -> McapResult<Self> {
let full_file = buf;
let records = ChunkFlattener::new_with_options(buf, options)?;
Ok(Self {
full_file,
records,
done: false,
channeler: ChannelAccumulator::default(),
})
}
}
impl<'a> Iterator for MessageStream<'a> {
type Item = McapResult<Message<'static>>;
fn next(&mut self) -> Option<Self::Item> {
if self.done {
return None;
}
let n = loop {
let record = match self.records.next() {
Some(Ok(rec)) => rec,
Some(Err(e)) => break Some(Err(e)),
None => break None,
};
match record {
Record::Schema { header, data } => {
let data = Cow::Owned(data.into_owned());
if let Err(e) = self.channeler.add_schema(header, data) {
break Some(Err(e));
}
}
Record::Channel(chan) => {
if let Err(e) = self.channeler.add_channel(chan) {
break Some(Err(e));
}
}
Record::Message { header, data } => {
let channel = match self.channeler.get(header.channel_id) {
Some(c) => c,
None => {
break Some(Err(McapError::UnknownChannel(
header.sequence,
header.channel_id,
)))
}
};
let m = Message {
channel,
sequence: header.sequence,
log_time: header.log_time,
publish_time: header.publish_time,
data: Cow::Owned(data.into_owned()),
};
break Some(Ok(m));
}
Record::EndOfData(end) => {
if end.data_section_crc != 0 {
let data_section_len = (self.full_file.len() - MAGIC.len() * 2) - self.records.bytes_remaining();
let data_section =
&self.full_file[MAGIC.len()..MAGIC.len() + data_section_len];
let calculated = crc32(data_section);
if end.data_section_crc != calculated {
break Some(Err(McapError::BadDataCrc {
saved: end.data_section_crc,
calculated,
}));
}
}
break None; }
_skip => {}
};
};
if !matches!(n, Some(Ok(_))) {
self.done = true;
}
n
}
}
const FOOTER_LEN: usize = 20 + 8 + 1;
pub fn footer(mcap: &[u8]) -> McapResult<records::Footer> {
if mcap.len() < MAGIC.len() * 2 + FOOTER_LEN {
return Err(McapError::UnexpectedEof);
}
if !mcap.starts_with(MAGIC) || !mcap.ends_with(MAGIC) {
return Err(McapError::BadMagic);
}
let footer_buf = &mcap[mcap.len() - MAGIC.len() - FOOTER_LEN..];
match LinearReader::sans_magic(footer_buf).next() {
Some(Ok(Record::Footer(f))) => Ok(f),
_ => Err(McapError::BadFooter),
}
}
#[derive(Default, Eq, PartialEq)]
pub struct Summary<'a> {
pub stats: Option<records::Statistics>,
pub channels: HashMap<u16, Arc<Channel<'a>>>,
pub schemas: HashMap<u16, Arc<Schema<'a>>>,
pub chunk_indexes: Vec<records::ChunkIndex>,
pub attachment_indexes: Vec<records::AttachmentIndex>,
pub metadata_indexes: Vec<records::MetadataIndex>,
}
impl fmt::Debug for Summary<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let channels = self.channels.iter().collect::<BTreeMap<_, _>>();
let schemas = self.schemas.iter().collect::<BTreeMap<_, _>>();
f.debug_struct("Summary")
.field("stats", &self.stats)
.field("channels", &channels)
.field("schemas", &schemas)
.field("chunk_indexes", &self.chunk_indexes)
.field("attachment_indexes", &self.attachment_indexes)
.field("metadata_indexes", &self.metadata_indexes)
.finish()
}
}
impl<'a> Summary<'a> {
pub fn read(mcap: &'a [u8]) -> McapResult<Option<Self>> {
let foot = footer(mcap)?;
if foot.summary_start == 0 {
return Ok(None);
}
if foot.summary_crc != 0 {
let calculated =
crc32(&mcap[foot.summary_start as usize..mcap.len() - MAGIC.len() - 4]);
if foot.summary_crc != calculated {
return Err(McapError::BadSummaryCrc {
saved: foot.summary_crc,
calculated,
});
}
}
let mut summary = Summary::default();
let mut channeler = ChannelAccumulator::default();
let summary_end = match foot.summary_offset_start {
0 => MAGIC.len() - FOOTER_LEN,
sos => sos as usize,
};
let summary_buf = &mcap[foot.summary_start as usize..summary_end];
for record in LinearReader::sans_magic(summary_buf) {
match record? {
Record::Statistics(s) => {
if summary.stats.is_some() {
warn!("Multiple statistics records found in summary");
}
summary.stats = Some(s);
}
Record::Schema { header, data } => channeler.add_schema(header, data)?,
Record::Channel(c) => channeler.add_channel(c)?,
Record::ChunkIndex(c) => summary.chunk_indexes.push(c),
Record::AttachmentIndex(a) => summary.attachment_indexes.push(a),
Record::MetadataIndex(i) => summary.metadata_indexes.push(i),
_ => {}
};
}
summary.schemas = channeler.schemas;
summary.channels = channeler.channels;
Ok(Some(summary))
}
pub fn stream_chunk(
&self,
mcap: &'a [u8],
index: &records::ChunkIndex,
) -> McapResult<impl Iterator<Item = McapResult<Message<'a>>> + '_> {
let end = (index.chunk_start_offset + index.chunk_length) as usize;
if mcap.len() < end {
return Err(McapError::BadIndex);
}
let mut reader = LinearReader::sans_magic(&mcap[index.chunk_start_offset as usize..end]);
let (h, d) = match reader.next().ok_or(McapError::BadIndex)? {
Ok(records::Record::Chunk { header, data }) => (header, data),
Ok(_other_record) => return Err(McapError::BadIndex),
Err(e) => return Err(e),
};
if reader.next().is_some() {
return Err(McapError::BadIndex);
}
let messages = ChunkReader::new(h, d)?.filter_map(|record| match record {
Ok(records::Record::Message { header, data }) => {
let channel = match self.channels.get(&header.channel_id) {
Some(c) => c.clone(),
None => {
return Some(Err(McapError::UnknownChannel(
header.sequence,
header.channel_id,
)));
}
};
let m = Message {
channel,
sequence: header.sequence,
log_time: header.log_time,
publish_time: header.publish_time,
data,
};
Some(Ok(m))
}
Ok(_other_record) => None,
Err(e) => Some(Err(e)),
});
Ok(messages)
}
pub fn read_message_indexes(
&self,
mcap: &[u8],
index: &records::ChunkIndex,
) -> McapResult<HashMap<Arc<Channel>, Vec<records::MessageIndexEntry>>> {
if index.message_index_offsets.is_empty() {
return Err(McapError::BadIndex);
}
let mut indexes = HashMap::new();
for (channel_id, offset) in &index.message_index_offsets {
let offset = *offset as usize;
if mcap.len() < offset + 15 {
return Err(McapError::BadIndex);
}
let mut reader = LinearReader::sans_magic(&mcap[offset..]);
let index = match reader.next().ok_or(McapError::BadIndex)? {
Ok(records::Record::MessageIndex(i)) => i,
Ok(_other_record) => return Err(McapError::BadIndex),
Err(e) => return Err(e),
};
if *channel_id != index.channel_id {
return Err(McapError::BadIndex);
}
let channel = match self.channels.get(&index.channel_id) {
Some(c) => c,
None => {
return Err(McapError::UnknownChannel(
0, index.channel_id,
));
}
};
if indexes.insert(channel.clone(), index.records).is_some() {
return Err(McapError::ConflictingChannels(channel.topic.clone()));
}
}
Ok(indexes)
}
pub fn seek_message(
&self,
mcap: &'a [u8],
index: &records::ChunkIndex,
message: &records::MessageIndexEntry,
) -> McapResult<Message> {
let end = (index.chunk_start_offset + index.chunk_length) as usize;
if mcap.len() < end {
return Err(McapError::BadIndex);
}
let mut reader = LinearReader::sans_magic(&mcap[index.chunk_start_offset as usize..end]);
let (h, d) = match reader.next().ok_or(McapError::BadIndex)? {
Ok(records::Record::Chunk { header, data }) => (header, data),
Ok(_other_record) => return Err(McapError::BadIndex),
Err(e) => return Err(e),
};
if reader.next().is_some() {
return Err(McapError::BadIndex);
}
let mut chunk_reader = ChunkReader::new(h, d)?;
match &mut chunk_reader.decompressor {
ChunkDecompressor::Null(reader) => {
while reader.bytes_remaining() as u64 > index.uncompressed_size - message.offset {
match reader.next() {
Some(Ok(_)) => {}
Some(Err(e)) => return Err(e),
None => return Err(McapError::BadIndex),
};
}
if reader.bytes_remaining() as u64 != index.uncompressed_size - message.offset {
return Err(McapError::BadIndex);
}
}
ChunkDecompressor::Compressed(maybe_read) => {
let reader = maybe_read.as_mut().unwrap();
io::copy(&mut reader.take(message.offset), &mut io::sink())?;
}
}
match chunk_reader.next() {
Some(Ok(records::Record::Message { header, data })) => {
let channel = match self.channels.get(&header.channel_id) {
Some(c) => c.clone(),
None => {
return Err(McapError::UnknownChannel(
header.sequence,
header.channel_id,
));
}
};
let m = Message {
channel,
sequence: header.sequence,
log_time: header.log_time,
publish_time: header.publish_time,
data,
};
Ok(m)
}
Some(Ok(_other_record)) => Err(McapError::BadIndex),
Some(Err(e)) => Err(e),
None => Err(McapError::BadIndex),
}
}
}
pub fn attachment<'a>(
mcap: &'a [u8],
index: &records::AttachmentIndex,
) -> McapResult<Attachment<'a>> {
let end = (index.offset + index.length) as usize;
if mcap.len() < end {
return Err(McapError::BadIndex);
}
let mut reader = LinearReader::sans_magic(&mcap[index.offset as usize..end]);
let (h, d) = match reader.next().ok_or(McapError::BadIndex)? {
Ok(records::Record::Attachment { header, data }) => (header, data),
Ok(_other_record) => return Err(McapError::BadIndex),
Err(e) => return Err(e),
};
if reader.next().is_some() {
return Err(McapError::BadIndex);
}
Ok(Attachment {
log_time: h.log_time,
create_time: h.create_time,
name: h.name,
content_type: h.content_type,
data: Cow::Borrowed(d),
})
}
pub fn metadata(mcap: &[u8], index: &records::MetadataIndex) -> McapResult<records::Metadata> {
let end = (index.offset + index.length) as usize;
if mcap.len() < end {
return Err(McapError::BadIndex);
}
let mut reader = LinearReader::sans_magic(&mcap[index.offset as usize..end]);
let m = match reader.next().ok_or(McapError::BadIndex)? {
Ok(records::Record::Metadata(m)) => m,
Ok(_other_record) => return Err(McapError::BadIndex),
Err(e) => return Err(e),
};
if reader.next().is_some() {
return Err(McapError::BadIndex);
}
Ok(m)
}
macro_rules! reader {
($type:ty) => {
paste::paste! {
#[inline]
fn [<read_ $type>](block: &mut &[u8]) -> $type {
const SIZE: usize = std::mem::size_of::<$type>();
let res = $type::from_le_bytes(
block[0..SIZE].try_into().unwrap()
);
*block = &block[SIZE..];
res
}
}
};
}
reader!(u8);
reader!(u64);
#[cfg(test)]
mod test {
use super::*;
#[test]
fn only_two_magics() {
let two_magics = MAGIC.repeat(2);
let mut reader = LinearReader::new(&two_magics).unwrap();
assert!(reader.next().is_none());
}
#[test]
fn only_one_magic() {
assert!(matches!(LinearReader::new(MAGIC), Err(McapError::BadMagic)));
}
#[test]
fn only_two_magic_with_ignore_end_magic() {
let two_magics = MAGIC.repeat(2);
let mut reader =
LinearReader::new_with_options(&two_magics, enum_set!(Options::IgnoreEndMagic))
.unwrap();
assert!(reader.next().is_none());
}
#[test]
fn only_one_magic_with_ignore_end_magic() {
let mut reader =
LinearReader::new_with_options(MAGIC, enum_set!(Options::IgnoreEndMagic)).unwrap();
assert!(reader.next().is_none());
}
}