use crate::io::*;
use crate::types::*;
use std::io::{BufRead, BufReader, Read, Seek, SeekFrom, Write};
pub struct FstReader<R: BufRead + Seek> {
input: InputVariant<R>,
meta: MetaData,
}
enum InputVariant<R: BufRead + Seek> {
Original(R),
Uncompressed(BufReader<std::fs::File>),
}
pub struct FstFilter {
pub start: u64,
pub end: Option<u64>,
pub include: Option<Vec<FstSignalHandle>>,
}
impl FstFilter {
pub fn all() -> Self {
FstFilter {
start: 0,
end: None,
include: None,
}
}
pub fn new(start: u64, end: u64, signals: Vec<FstSignalHandle>) -> Self {
FstFilter {
start,
end: Some(end),
include: Some(signals),
}
}
pub fn filter_time(start: u64, end: u64) -> Self {
FstFilter {
start,
end: Some(end),
include: None,
}
}
pub fn filter_signals(signals: Vec<FstSignalHandle>) -> Self {
FstFilter {
start: 0,
end: None,
include: Some(signals),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct FstHeader {
pub start_time: u64,
pub end_time: u64,
pub var_count: u64,
pub max_handle: u64,
pub version: String,
pub date: String,
pub timescale_exponent: i8,
}
impl<R: BufRead + Seek> FstReader<R> {
pub fn open(input: R) -> Result<Self> {
Self::open_internal(input, false)
}
pub fn open_and_read_time_table(input: R) -> Result<Self> {
Self::open_internal(input, true)
}
fn open_internal(mut input: R, read_time_table: bool) -> Result<Self> {
let uncompressed_input = uncompress_gzip_wrapper(&mut input)?;
match uncompressed_input {
None => {
let mut header_reader = HeaderReader::new(input);
header_reader.read(read_time_table)?;
let (input, meta) = header_reader.into_input_and_meta_data().unwrap();
Ok(FstReader {
input: InputVariant::Original(input),
meta,
})
}
Some(uc) => {
let mut header_reader = HeaderReader::new(uc);
header_reader.read(read_time_table)?;
let (uc2, meta) = header_reader.into_input_and_meta_data().unwrap();
Ok(FstReader {
input: InputVariant::Uncompressed(uc2),
meta,
})
}
}
}
pub fn get_header(&self) -> FstHeader {
FstHeader {
start_time: self.meta.header.start_time,
end_time: self.meta.header.end_time,
var_count: self.meta.header.var_count,
max_handle: self.meta.header.max_var_id_code,
version: self.meta.header.version.clone(),
date: self.meta.header.date.clone(),
timescale_exponent: self.meta.header.timescale_exponent,
}
}
pub fn get_time_table(&self) -> Option<&[u64]> {
match &self.meta.time_table {
Some(table) => Some(table),
None => None,
}
}
pub fn read_hierarchy(&mut self, callback: impl FnMut(FstHierarchyEntry)) -> Result<()> {
match &mut self.input {
InputVariant::Original(input) => read_hierarchy(input, &self.meta, callback),
InputVariant::Uncompressed(input) => read_hierarchy(input, &self.meta, callback),
}
}
pub fn read_signals(
&mut self,
filter: &FstFilter,
callback: impl FnMut(u64, FstSignalHandle, FstSignalValue),
) -> Result<()> {
let signal_count = self.meta.signals.len();
let signal_mask = if let Some(signals) = &filter.include {
let mut signal_mask = BitMask::repeat(false, signal_count);
for sig in signals {
let signal_idx = sig.get_index();
signal_mask.set(signal_idx, true);
}
signal_mask
} else {
BitMask::repeat(true, signal_count)
};
let data_filter = DataFilter {
start: filter.start,
end: filter.end.unwrap_or(self.meta.header.end_time),
signals: signal_mask,
};
match &mut self.input {
InputVariant::Original(input) => {
read_signals(input, &self.meta, &data_filter, callback)
}
InputVariant::Uncompressed(input) => {
read_signals(input, &self.meta, &data_filter, callback)
}
}
}
}
pub enum FstSignalValue<'a> {
String(&'a [u8]),
Real(f64),
}
pub fn is_fst_file(input: &mut (impl Read + Seek)) -> bool {
let is_fst = matches!(internal_check_fst_file(input), Ok(true));
let _ = input.seek(SeekFrom::Start(0));
is_fst
}
fn internal_check_fst_file(input: &mut (impl Read + Seek)) -> Result<bool> {
loop {
let _block_tpe = match read_block_tpe(input) {
Err(ReaderError::Io(_)) => {
break;
}
Err(other) => return Err(other),
Ok(tpe) => tpe,
};
let section_length = read_u64(input)?;
input.seek(SeekFrom::Current((section_length as i64) - 8))?;
}
Ok(true)
}
fn read_hierarchy(
input: &mut (impl Read + Seek),
meta: &MetaData,
mut callback: impl FnMut(FstHierarchyEntry),
) -> Result<()> {
input.seek(SeekFrom::Start(meta.hierarchy_offset))?;
let bytes = read_hierarchy_bytes(input, meta.hierarchy_compression)?;
let mut input = bytes.as_slice();
let mut handle_count = 0u32;
while let Some(entry) = read_hierarchy_entry(&mut input, &mut handle_count)? {
callback(entry);
}
Ok(())
}
fn read_signals(
input: &mut (impl Read + Seek),
meta: &MetaData,
filter: &DataFilter,
mut callback: impl FnMut(u64, FstSignalHandle, FstSignalValue),
) -> Result<()> {
let mut reader = DataReader {
input,
meta,
filter,
callback: &mut callback,
};
reader.read()
}
fn uncompress_gzip_wrapper(
input: &mut (impl Read + Seek),
) -> Result<Option<BufReader<std::fs::File>>> {
let block_tpe = read_block_tpe(input)?;
if block_tpe != BlockType::GZipWrapper {
input.seek(SeekFrom::Start(0))?;
Ok(None)
} else {
let section_length = read_u64(input)?;
let uncompress_length = read_u64(input)? as usize;
if section_length == 0 {
return Err(ReaderError::NotFinishedCompressing());
}
let mut target = tempfile::tempfile().unwrap();
let mut decoder = flate2::read::GzDecoder::new(input);
let mut buf = vec![0u8; 32768]; let mut remaining = uncompress_length;
while remaining > 0 {
let read_len = std::cmp::min(buf.len(), remaining);
remaining -= read_len;
decoder.read_exact(&mut buf[..read_len])?;
target.write_all(&buf[..read_len])?;
}
target.seek(SeekFrom::Start(0))?;
let new_input = std::io::BufReader::new(target);
Ok(Some(new_input))
}
}
#[derive(Debug)]
struct MetaData {
header: Header,
signals: Vec<SignalInfo>,
#[allow(dead_code)]
blackouts: Vec<BlackoutData>,
data_sections: Vec<DataSectionInfo>,
float_endian: FloatingPointEndian,
hierarchy_compression: HierarchyCompression,
hierarchy_offset: u64,
time_table: Option<Vec<u64>>,
}
pub type Result<T> = std::result::Result<T, ReaderError>;
struct HeaderReader<R: Read + Seek> {
input: R,
header: Option<Header>,
signals: Option<Vec<SignalInfo>>,
blackouts: Option<Vec<BlackoutData>>,
data_sections: Vec<DataSectionInfo>,
float_endian: FloatingPointEndian,
hierarchy: Option<(HierarchyCompression, u64)>,
time_table: Option<Vec<u64>>,
}
impl<R: Read + Seek> HeaderReader<R> {
fn new(input: R) -> Self {
HeaderReader {
input,
header: None,
signals: None,
blackouts: None,
data_sections: Vec::default(),
float_endian: FloatingPointEndian::Little,
hierarchy: None,
time_table: None,
}
}
fn read_data(&mut self, tpe: &BlockType) -> Result<()> {
let file_offset = self.input.stream_position()?;
let section_length = read_u64(&mut self.input)?;
let start_time = read_u64(&mut self.input)?;
let end_time = read_u64(&mut self.input)?;
if let Some(table) = &mut self.time_table {
let (_, mut time_chain) =
read_time_chain(&mut self.input, file_offset, section_length)?;
let is_first_section = table.is_empty();
if is_first_section && time_chain[0] > start_time {
table.push(start_time);
}
table.append(&mut time_chain);
self.input.seek(SeekFrom::Start(file_offset + 3 * 8))?;
}
self.skip(section_length, 3 * 8)?;
let kind = DataSectionKind::from_block_type(tpe).unwrap();
let info = DataSectionInfo {
file_offset,
start_time,
end_time,
kind,
};
self.data_sections.push(info);
Ok(())
}
fn skip(&mut self, section_length: u64, already_read: i64) -> Result<u64> {
Ok(self
.input
.seek(SeekFrom::Current((section_length as i64) - already_read))?)
}
fn read_hierarchy(&mut self, compression: HierarchyCompression) -> Result<()> {
let file_offset = self.input.stream_position()?;
let section_length = read_u64(&mut self.input)?;
self.skip(section_length, 8)?;
assert!(
self.hierarchy.is_none(),
"Only a single hierarchy block is expected!"
);
self.hierarchy = Some((compression, file_offset));
Ok(())
}
fn read(&mut self, read_time_table: bool) -> Result<()> {
if read_time_table {
self.time_table = Some(Vec::new());
}
loop {
let block_tpe = match read_block_tpe(&mut self.input) {
Err(ReaderError::Io(_)) => {
break;
}
Err(other) => return Err(other),
Ok(tpe) => tpe,
};
match block_tpe {
BlockType::Header => {
let (header, endian) = read_header(&mut self.input)?;
self.header = Some(header);
self.float_endian = endian;
}
BlockType::VcData => self.read_data(&block_tpe)?,
BlockType::VcDataDynamicAlias => self.read_data(&block_tpe)?,
BlockType::VcDataDynamicAlias2 => self.read_data(&block_tpe)?,
BlockType::Blackout => {
self.blackouts = Some(read_blackout(&mut self.input)?);
}
BlockType::Geometry => {
self.signals = Some(read_geometry(&mut self.input)?);
}
BlockType::Hierarchy => self.read_hierarchy(HierarchyCompression::ZLib)?,
BlockType::HierarchyLZ4 => self.read_hierarchy(HierarchyCompression::Lz4)?,
BlockType::HierarchyLZ4Duo => self.read_hierarchy(HierarchyCompression::Lz4Duo)?,
BlockType::GZipWrapper => panic!("GZip Wrapper should have been handled earlier!"),
BlockType::Skip => {
let section_length = read_u64(&mut self.input)?;
self.skip(section_length, 8)?;
}
};
}
Ok(())
}
fn into_input_and_meta_data(mut self) -> Result<(R, MetaData)> {
self.input.seek(SeekFrom::Start(0))?;
let meta = MetaData {
header: self.header.unwrap(),
signals: self.signals.unwrap(),
blackouts: self.blackouts.unwrap_or_default(),
data_sections: self.data_sections,
float_endian: self.float_endian,
hierarchy_compression: self.hierarchy.unwrap().0,
hierarchy_offset: self.hierarchy.unwrap().1,
time_table: self.time_table,
};
Ok((self.input, meta))
}
}
struct DataReader<'a, R: Read + Seek, F: FnMut(u64, FstSignalHandle, FstSignalValue)> {
input: &'a mut R,
meta: &'a MetaData,
filter: &'a DataFilter,
callback: &'a mut F,
}
impl<'a, R: Read + Seek, F: FnMut(u64, FstSignalHandle, FstSignalValue)> DataReader<'a, R, F> {
fn read_value_changes(
&mut self,
section_kind: DataSectionKind,
section_start: u64,
section_length: u64,
time_section_length: u64,
time_chain: &[u64],
) -> Result<()> {
let (max_handle, _) = read_variant_u64(&mut self.input)?;
let vc_start = self.input.stream_position()?;
let packtpe = ValueChangePackType::from_u8(read_u8(&mut self.input)?);
let chain_len_offset = section_start + section_length - time_section_length - 8;
let (chain_table, chain_table_lengths) = read_chain_table(
&mut self.input,
chain_len_offset,
section_kind,
max_handle,
vc_start,
)?;
let mut mu: Vec<u8> = Vec::new();
let mut head_pointer: Vec<u32> = Vec::with_capacity(max_handle as usize);
let mut length_remaining: Vec<u32> = Vec::with_capacity(max_handle as usize);
let mut scatter_pointer = vec![0u32; max_handle as usize];
let mut tc_head = vec![0u32; std::cmp::max(1, time_chain.len())];
for (signal_idx, (entry, length)) in chain_table
.iter()
.zip(chain_table_lengths.iter())
.take(max_handle as usize)
.enumerate()
{
if *entry != 0 {
if self.filter.signals.is_set(signal_idx) {
self.input.seek(SeekFrom::Start(vc_start + entry))?;
let mut bytes =
read_packed_signal_value_bytes(&mut self.input, *length, packtpe)?;
let len = self.meta.signals[signal_idx].len();
let tdelta = if len == 1 {
read_one_bit_signal_time_delta(&bytes, 0)?
} else {
read_multi_bit_signal_time_delta(&bytes, 0)?
};
head_pointer.push(mu.len() as u32);
length_remaining.push(bytes.len() as u32);
mu.append(&mut bytes);
scatter_pointer[signal_idx] = tc_head[tdelta];
tc_head[tdelta] = signal_idx as u32 + 1; }
}
if head_pointer.len() == signal_idx {
head_pointer.push(1234);
length_remaining.push(1234);
}
}
for (time_id, time) in time_chain.iter().enumerate() {
if *time > self.filter.end {
break;
}
while tc_head[time_id] != 0 {
let signal_id = (tc_head[time_id] - 1) as usize; let mut mu_slice = &mu.as_slice()[head_pointer[signal_id] as usize..];
let (vli, skiplen) = read_variant_u32(&mut mu_slice)?;
let signal_len = self.meta.signals[signal_id].len();
let signal_handle = FstSignalHandle::from_index(signal_id);
let len = match signal_len {
1 => {
let value = one_bit_signal_value_to_char(vli);
let value_buf = [value];
(self.callback)(*time, signal_handle, FstSignalValue::String(&value_buf));
0 }
0 => {
let (len, skiplen2) = read_variant_u32(&mut mu_slice)?;
let value = read_bytes(&mut mu_slice, len as usize)?;
(self.callback)(*time, signal_handle, FstSignalValue::String(&value));
len + skiplen2
}
len => {
let signal_len = len as usize;
if !self.meta.signals[signal_id].is_real() {
let (value, len) = if (vli & 1) == 0 {
let read_len = int_div_ceil(signal_len, 8);
let bytes = read_bytes(&mut mu_slice, read_len)?;
(
multi_bit_digital_signal_to_chars(&bytes, signal_len),
read_len as u32,
)
} else {
(read_bytes(&mut mu_slice, signal_len)?, len)
};
(self.callback)(*time, signal_handle, FstSignalValue::String(&value));
len
} else {
assert_eq!(vli & 1, 1, "TODO: implement support for rare packed case");
let value = read_f64(&mut mu_slice, self.meta.float_endian)?;
(self.callback)(*time, signal_handle, FstSignalValue::Real(value));
8
}
}
};
let total_skiplen = skiplen + len;
head_pointer[signal_id] += total_skiplen;
length_remaining[signal_id] -= total_skiplen;
tc_head[time_id] = scatter_pointer[signal_id];
scatter_pointer[signal_id] = 0;
if length_remaining[signal_id] > 0 {
let tdelta = if signal_len == 1 {
read_one_bit_signal_time_delta(&mu, head_pointer[signal_id])?
} else {
read_multi_bit_signal_time_delta(&mu, head_pointer[signal_id])?
};
scatter_pointer[signal_id] = tc_head[time_id + tdelta];
tc_head[time_id + tdelta] = (signal_id + 1) as u32; }
}
}
Ok(())
}
fn read(&mut self) -> Result<()> {
let sections = self.meta.data_sections.clone();
let relevant_sections = sections
.iter()
.filter(|s| self.filter.end >= s.start_time && s.end_time >= self.filter.start);
for (sec_num, section) in relevant_sections.enumerate() {
self.input.seek(SeekFrom::Start(section.file_offset))?;
let section_length = read_u64(&mut self.input)?;
let start_time = read_u64(&mut self.input)?;
let end_time = read_u64(&mut self.input)?;
assert_eq!(start_time, section.start_time);
assert_eq!(end_time, section.end_time);
let is_first_section = sec_num == 0;
let (time_section_length, time_chain) =
read_time_chain(&mut self.input, section.file_offset, section_length)?;
if is_first_section && time_chain[0] > start_time {
read_frame(
&mut self.input,
section.file_offset,
section_length,
&self.meta.signals,
&self.filter.signals,
self.meta.float_endian,
start_time,
self.callback,
)?;
} else {
skip_frame(&mut self.input, section.file_offset)?;
}
self.read_value_changes(
section.kind,
section.file_offset,
section_length,
time_section_length,
&time_chain,
)?;
}
Ok(())
}
}