fst_reader/
io.rs

1// Copyright 2023 The Regents of the University of California
2// Copyright 2024 Cornell University
3// released under BSD 3-Clause License
4// author: Kevin Laeufer <laeufer@cornell.edu>
5// Contains basic read and write operations for FST files.
6
7use crate::types::*;
8use crate::FstSignalValue;
9use num_enum::{TryFromPrimitive, TryFromPrimitiveError};
10use std::cmp::Ordering;
11#[cfg(test)]
12use std::io::Write;
13use std::io::{Read, Seek, SeekFrom};
14use std::num::NonZeroU32;
15use thiserror::Error;
16
17#[derive(Debug, Error)]
18pub enum ReaderError {
19    #[error("failed to read a null terminated string because it exceeds the expected size of {0} bytes.\n{1}")]
20    CStringTooLong(usize, String),
21    #[error("failed to parse an enum table string: {0}\n{1}")]
22    EnumTableString(String, String),
23    #[error("failed to read leb128 integer, more than the expected {0} bits")]
24    Leb128(u32),
25    #[error("failed to parse an integer")]
26    ParseInt(#[from] std::num::ParseIntError),
27    #[error("failed to decompress with lz4")]
28    Lz4Decompress(#[from] lz4_flex::block::DecompressError),
29    #[error("failed to decompress with zlib")]
30    ZLibDecompress(#[from] miniz_oxide::inflate::DecompressError),
31    #[error("failed to parse a gzip header: {0}")]
32    GZipHeader(String),
33    #[error("failed to decode string")]
34    Utf8(#[from] std::str::Utf8Error),
35    #[error("failed to decode string")]
36    Utf8String(#[from] std::string::FromUtf8Error),
37    #[error("I/O operation failed")]
38    Io(#[from] std::io::Error),
39    #[error("The FST file is still being compressed into its final GZIP wrapper.")]
40    NotFinishedCompressing(),
41    #[error("Unexpected block type")]
42    BlockType(#[from] TryFromPrimitiveError<BlockType>),
43    #[error("Unexpected file type")]
44    FileType(#[from] TryFromPrimitiveError<FileType>),
45    #[error("Unexpected vhdl variable type")]
46    FstVhdlVarType(#[from] TryFromPrimitiveError<FstVhdlVarType>),
47    #[error("Unexpected vhdl data type")]
48    FstVhdlDataType(#[from] TryFromPrimitiveError<FstVhdlDataType>),
49    #[error("Unexpected variable type")]
50    FstVarType(#[from] TryFromPrimitiveError<FstVarType>),
51    #[error("Unexpected scope type")]
52    FstScopeType(#[from] TryFromPrimitiveError<FstScopeType>),
53    #[error("Unexpected variable direction")]
54    FstVarDirection(#[from] TryFromPrimitiveError<FstVarDirection>),
55    #[error("Unexpected attribute type")]
56    AttributeType(#[from] TryFromPrimitiveError<AttributeType>),
57    #[error("Unexpected misc attribute type")]
58    MiscType(#[from] TryFromPrimitiveError<MiscType>),
59}
60
61pub type ReadResult<T> = Result<T, ReaderError>;
62
63#[cfg(test)]
64pub type WriteResult<T> = Result<T, ReaderError>;
65
66//////////////// Primitives
67
68#[inline]
69pub(crate) fn read_variant_u32(input: &mut impl Read) -> ReadResult<(u32, u32)> {
70    let mut byte = [0u8; 1];
71    let mut res = 0u32;
72    // 32bit / 7bit = ~4.6
73    for ii in 0..5u32 {
74        input.read_exact(&mut byte)?;
75        let value = (byte[0] as u32) & 0x7f;
76        res |= value << (7 * ii);
77        if (byte[0] & 0x80) == 0 {
78            return Ok((res, ii + 1));
79        }
80    }
81    Err(ReaderError::Leb128(32))
82}
83
84#[inline]
85pub(crate) fn read_variant_i64(input: &mut impl Read) -> ReadResult<i64> {
86    let mut byte = [0u8; 1];
87    let mut res = 0u64;
88    // 64bit / 7bit = ~9.1
89    for ii in 0..10 {
90        input.read_exact(&mut byte)?;
91        let value = (byte[0] & 0x7f) as u64;
92        let shift_by = 7 * ii;
93        res |= value << shift_by;
94        if (byte[0] & 0x80) == 0 {
95            // sign extend
96            let sign_bit_set = (byte[0] & 0x40) != 0;
97            if (shift_by + 7) < u64::BITS && sign_bit_set {
98                res |= u64::MAX << (shift_by + 7);
99            }
100            return Ok(res as i64);
101        }
102    }
103    Err(ReaderError::Leb128(64))
104}
105
106#[inline]
107pub(crate) fn read_variant_u64(input: &mut impl Read) -> ReadResult<(u64, usize)> {
108    let mut byte = [0u8; 1];
109    let mut res = 0u64;
110    for ii in 0..10 {
111        // 64bit / 7bit = ~9.1
112        input.read_exact(&mut byte)?;
113        let value = (byte[0] as u64) & 0x7f;
114        res |= value << (7 * ii);
115        if (byte[0] & 0x80) == 0 {
116            return Ok((res, ii + 1));
117        }
118    }
119    Err(ReaderError::Leb128(64))
120}
121
122#[cfg(test)]
123#[inline]
124pub(crate) fn write_variant_u64(output: &mut impl Write, mut value: u64) -> WriteResult<usize> {
125    // often, the value is small
126    if value <= 0x7f {
127        let byte = [value as u8; 1];
128        output.write_all(&byte)?;
129        return Ok(1);
130    }
131
132    let mut bytes = Vec::with_capacity(10);
133    while value != 0 {
134        let next_value = value >> 7;
135        let mask: u8 = if next_value == 0 { 0 } else { 0x80 };
136        bytes.push((value & 0x7f) as u8 | mask);
137        value = next_value;
138    }
139    assert!(bytes.len() <= 10);
140    output.write_all(&bytes)?;
141    Ok(bytes.len())
142}
143
144#[cfg(test)]
145#[inline]
146pub(crate) fn write_variant_i64(output: &mut impl Write, mut value: i64) -> WriteResult<usize> {
147    // often, the value is small
148    if value <= 63 && value >= -64 {
149        let byte = [value as u8 & 0x7f; 1];
150        output.write_all(&byte)?;
151        return Ok(1);
152    }
153
154    // calculate the number of bits we need to represent
155    let bits = if value >= 0 {
156        64 - value.leading_zeros() + 1
157    } else {
158        64 - value.leading_ones() + 1
159    };
160    let num_bytes = bits.div_ceil(7) as usize;
161
162    let mut bytes = Vec::with_capacity(num_bytes);
163    for ii in 0..num_bytes {
164        let mark = if ii == num_bytes - 1 { 0 } else { 0x80 };
165        bytes.push((value & 0x7f) as u8 | mark);
166        value >>= 7;
167    }
168    output.write_all(&bytes)?;
169    Ok(bytes.len())
170}
171
172#[cfg(test)]
173#[inline]
174pub(crate) fn write_variant_u32(output: &mut impl Write, value: u32) -> WriteResult<usize> {
175    write_variant_u64(output, value as u64)
176}
177
178#[inline]
179pub(crate) fn read_u64(input: &mut impl Read) -> ReadResult<u64> {
180    let mut buf = [0u8; 8];
181    input.read_exact(&mut buf)?;
182    Ok(u64::from_be_bytes(buf))
183}
184
185#[cfg(test)]
186#[inline]
187pub(crate) fn write_u64(output: &mut impl Write, value: u64) -> WriteResult<()> {
188    let buf = value.to_be_bytes();
189    output.write_all(&buf)?;
190    Ok(())
191}
192
193#[inline]
194pub(crate) fn read_u8(input: &mut impl Read) -> ReadResult<u8> {
195    let mut buf = [0u8; 1];
196    input.read_exact(&mut buf)?;
197    Ok(buf[0])
198}
199
200#[cfg(test)]
201fn write_u8(output: &mut impl Write, value: u8) -> WriteResult<()> {
202    let buf = value.to_be_bytes();
203    output.write_all(&buf)?;
204    Ok(())
205}
206
207#[inline]
208pub(crate) fn read_i8(input: &mut impl Read) -> ReadResult<i8> {
209    let mut buf = [0u8; 1];
210    input.read_exact(&mut buf)?;
211    Ok(i8::from_be_bytes(buf))
212}
213
214#[cfg(test)]
215#[inline]
216fn write_i8(output: &mut impl Write, value: i8) -> WriteResult<()> {
217    let buf = value.to_be_bytes();
218    output.write_all(&buf)?;
219    Ok(())
220}
221
222pub(crate) fn read_c_str(input: &mut impl Read, max_len: usize) -> ReadResult<String> {
223    let mut bytes: Vec<u8> = Vec::with_capacity(32);
224    for _ in 0..max_len {
225        let byte = read_u8(input)?;
226        if byte == 0 {
227            return Ok(String::from_utf8(bytes)?);
228        } else {
229            bytes.push(byte);
230        }
231    }
232    Err(ReaderError::CStringTooLong(
233        max_len,
234        String::from_utf8_lossy(&bytes).to_string(),
235    ))
236}
237
238#[cfg(test)]
239fn write_c_str(output: &mut impl Write, value: &str) -> WriteResult<()> {
240    let bytes = value.as_bytes();
241    output.write_all(bytes)?;
242    write_u8(output, 0)?;
243    Ok(())
244}
245
246#[inline] // inline to specialize on length
247pub(crate) fn read_c_str_fixed_length(input: &mut impl Read, len: usize) -> ReadResult<String> {
248    let mut bytes = read_bytes(input, len)?;
249    let zero_index = bytes.iter().position(|b| *b == 0u8).unwrap_or(len - 1);
250    let str_len = zero_index;
251    bytes.truncate(str_len);
252    Ok(String::from_utf8(bytes)?)
253}
254
255#[cfg(test)]
256#[cfg(test)]
257#[inline]
258fn write_c_str_fixed_length(
259    output: &mut impl Write,
260    value: &str,
261    max_len: usize,
262) -> WriteResult<()> {
263    let bytes = value.as_bytes();
264    if bytes.len() >= max_len {
265        todo!("Return error.")
266    }
267    output.write_all(bytes)?;
268    let zeros = vec![0u8; max_len - bytes.len()];
269    output.write_all(&zeros)?;
270    Ok(())
271}
272
273const RCV_STR: [u8; 8] = [b'x', b'z', b'h', b'u', b'w', b'l', b'-', b'?'];
274#[inline]
275pub(crate) fn one_bit_signal_value_to_char(vli: u32) -> u8 {
276    if (vli & 1) == 0 {
277        (((vli >> 1) & 1) as u8) | b'0'
278    } else {
279        RCV_STR[((vli >> 1) & 7) as usize]
280    }
281}
282
283/// Decodes a digital (1/0) signal. This is indicated by bit0 in vli being cleared.
284#[inline]
285pub(crate) fn multi_bit_digital_signal_to_chars(bytes: &[u8], len: usize, output: &mut Vec<u8>) {
286    output.resize(len, 0);
287    for (ii, out) in output.iter_mut().enumerate() {
288        let byte_id = ii / 8;
289        let bit_id = 7 - (ii & 7);
290        let bit = (bytes[byte_id] >> bit_id) & 1;
291        *out = bit | b'0';
292    }
293}
294
295pub(crate) fn read_one_bit_signal_time_delta(bytes: &[u8], offset: u32) -> ReadResult<usize> {
296    let mut slice = &bytes[(offset as usize)..];
297    let (vli, _) = read_variant_u32(&mut slice)?;
298    let shift_count = 2u32 << (vli & 1);
299    Ok((vli >> shift_count) as usize)
300}
301
302pub(crate) fn read_multi_bit_signal_time_delta(bytes: &[u8], offset: u32) -> ReadResult<usize> {
303    let mut slice = &bytes[(offset as usize)..];
304    let (vli, _) = read_variant_u32(&mut slice)?;
305    Ok((vli >> 1) as usize)
306}
307
308/// Reads ZLib compressed bytes.
309pub(crate) fn read_zlib_compressed_bytes(
310    input: &mut (impl Read + Seek),
311    uncompressed_length: u64,
312    compressed_length: u64,
313    allow_uncompressed: bool,
314) -> ReadResult<Vec<u8>> {
315    let bytes = if uncompressed_length == compressed_length && allow_uncompressed {
316        read_bytes(input, compressed_length as usize)?
317    } else {
318        let start = input.stream_position()?;
319
320        // read first byte to check which compression is used.
321        let first_byte = read_u8(input)?;
322        input.seek(SeekFrom::Start(start))?;
323        // for zlib compression, the first byte should be 0x78
324        let is_zlib = first_byte == 0x78;
325        debug_assert!(is_zlib, "expected a zlib compressed block!");
326
327        let compressed = read_bytes(input, compressed_length as usize)?;
328
329        miniz_oxide::inflate::decompress_to_vec_zlib_with_limit(
330            compressed.as_slice(),
331            uncompressed_length as usize,
332        )?
333    };
334    assert_eq!(bytes.len(), uncompressed_length as usize);
335    Ok(bytes)
336}
337
338/// ZLib compresses bytes. If allow_uncompressed is true, we overwrite the compressed with the
339/// uncompressed bytes if it turns out that the compressed bytes are longer.
340#[cfg(test)]
341pub(crate) fn write_compressed_bytes(
342    output: &mut (impl Write + Seek),
343    bytes: &[u8],
344    compression_level: u8,
345    allow_uncompressed: bool,
346) -> WriteResult<usize> {
347    let compressed = miniz_oxide::deflate::compress_to_vec_zlib(bytes, compression_level);
348    if !allow_uncompressed || compressed.len() < bytes.len() {
349        output.write_all(compressed.as_slice())?;
350        Ok(compressed.len())
351    } else {
352        // it turns out that the compression was futile!
353        output.write_all(bytes)?;
354        Ok(bytes.len())
355    }
356}
357
358#[inline]
359pub(crate) fn read_bytes(input: &mut impl Read, len: usize) -> ReadResult<Vec<u8>> {
360    let mut buf: Vec<u8> = Vec::with_capacity(len);
361    input.take(len as u64).read_to_end(&mut buf)?;
362    Ok(buf)
363}
364
365pub(crate) fn read_block_tpe(input: &mut impl Read) -> ReadResult<BlockType> {
366    Ok(BlockType::try_from(read_u8(input)?)?)
367}
368
369pub(crate) fn determine_f64_endian(
370    input: &mut impl Read,
371    needle: f64,
372) -> ReadResult<FloatingPointEndian> {
373    let bytes = read_bytes(input, 8)?;
374    let mut byte_reader: &[u8] = &bytes;
375    let le = read_f64(&mut byte_reader, FloatingPointEndian::Little)?;
376    if le == needle {
377        return Ok(FloatingPointEndian::Little);
378    }
379    byte_reader = &bytes;
380    let be = read_f64(&mut byte_reader, FloatingPointEndian::Big)?;
381    if be == needle {
382        Ok(FloatingPointEndian::Big)
383    } else {
384        todo!("should not get here")
385    }
386}
387
388#[inline]
389pub(crate) fn read_f64(input: &mut impl Read, endian: FloatingPointEndian) -> ReadResult<f64> {
390    let mut buf = [0u8; 8];
391    input.read_exact(&mut buf)?;
392    match endian {
393        FloatingPointEndian::Little => Ok(f64::from_le_bytes(buf)),
394        FloatingPointEndian::Big => Ok(f64::from_be_bytes(buf)),
395    }
396}
397
398#[cfg(test)]
399#[inline]
400fn write_f64(output: &mut impl Write, value: f64) -> WriteResult<()> {
401    // for f64, we have the option to use either LE or BE, we just need to be consistent
402    let buf = value.to_le_bytes();
403    output.write_all(&buf)?;
404    Ok(())
405}
406
407fn read_lz4_compressed_bytes(
408    input: &mut impl Read,
409    uncompressed_length: usize,
410    compressed_length: usize,
411) -> ReadResult<Vec<u8>> {
412    let compressed = read_bytes(input, compressed_length)?;
413    let bytes = lz4_flex::decompress(&compressed, uncompressed_length)?;
414    Ok(bytes)
415}
416
417//////////////// Header
418
419const HEADER_LENGTH: u64 = 329;
420const HEADER_VERSION_MAX_LEN: usize = 128;
421const HEADER_DATE_MAX_LEN: usize = 119;
422pub(crate) fn read_header(input: &mut impl Read) -> ReadResult<(Header, FloatingPointEndian)> {
423    let section_length = read_u64(input)?;
424    assert_eq!(section_length, HEADER_LENGTH);
425    let start_time = read_u64(input)?;
426    let end_time = read_u64(input)?;
427    let float_endian = determine_f64_endian(input, DOUBLE_ENDIAN_TEST)?;
428    let memory_used_by_writer = read_u64(input)?;
429    let scope_count = read_u64(input)?;
430    let var_count = read_u64(input)?;
431    let max_var_id_code = read_u64(input)?;
432    let vc_section_count = read_u64(input)?;
433    let timescale_exponent = read_i8(input)?;
434    let version = read_c_str_fixed_length(input, HEADER_VERSION_MAX_LEN)?;
435    // this size was reduced compared to what is documented in block_format.txt
436    let date = read_c_str_fixed_length(input, HEADER_DATE_MAX_LEN)?;
437    let file_type = FileType::try_from(read_u8(input)?)?;
438    let time_zero = read_u64(input)?;
439
440    let header = Header {
441        start_time,
442        end_time,
443        memory_used_by_writer,
444        scope_count,
445        var_count,
446        max_var_id_code,
447        vc_section_count,
448        timescale_exponent,
449        version,
450        date,
451        file_type,
452        time_zero,
453    };
454    Ok((header, float_endian))
455}
456
457#[cfg(test)]
458pub(crate) fn write_header(output: &mut impl Write, header: &Header) -> WriteResult<()> {
459    write_u64(output, HEADER_LENGTH)?;
460    write_u64(output, header.start_time)?;
461    write_u64(output, header.end_time)?;
462    write_f64(output, DOUBLE_ENDIAN_TEST)?;
463    write_u64(output, header.memory_used_by_writer)?;
464    write_u64(output, header.scope_count)?;
465    write_u64(output, header.var_count)?;
466    write_u64(output, header.max_var_id_code)?;
467    write_u64(output, header.vc_section_count)?;
468    write_i8(output, header.timescale_exponent)?;
469    write_c_str_fixed_length(output, &header.version, HEADER_VERSION_MAX_LEN)?;
470    write_c_str_fixed_length(output, &header.date, HEADER_DATE_MAX_LEN)?;
471    write_u8(output, header.file_type as u8)?;
472    write_u64(output, header.time_zero)?;
473    Ok(())
474}
475
476//////////////// Geometry
477
478pub(crate) fn read_geometry(input: &mut (impl Read + Seek)) -> ReadResult<Vec<SignalInfo>> {
479    let section_length = read_u64(input)?;
480    let uncompressed_length = read_u64(input)?;
481    let max_handle = read_u64(input)?;
482    let compressed_length = section_length - 3 * 8;
483
484    let bytes = read_zlib_compressed_bytes(input, uncompressed_length, compressed_length, true)?;
485
486    let mut signals: Vec<SignalInfo> = Vec::with_capacity(max_handle as usize);
487    let mut byte_reader: &[u8] = &bytes;
488
489    for _ii in 0..max_handle {
490        let (value, _) = read_variant_u32(&mut byte_reader)?;
491        signals.push(SignalInfo::from_file_format(value));
492    }
493    Ok(signals)
494}
495
496#[cfg(test)]
497pub(crate) fn write_geometry(
498    output: &mut (impl Write + Seek),
499    signals: &Vec<SignalInfo>,
500    compression: u8,
501) -> WriteResult<()> {
502    // remember start to fix the section length afterwards
503    let start = output.stream_position()?;
504    write_u64(output, 0)?; // dummy section length
505
506    // write uncompressed signal info
507    let mut bytes: Vec<u8> = Vec::with_capacity(signals.len() * 2);
508    for signal in signals {
509        write_variant_u64(&mut bytes, signal.to_file_format() as u64)?;
510    }
511    let uncompressed_length = bytes.len() as u64;
512    write_u64(output, uncompressed_length)?;
513    let max_handle = signals.len() as u64;
514    write_u64(output, max_handle)?;
515
516    // compress signals
517    let compressed_len = write_compressed_bytes(output, &bytes, compression, true)? as u64;
518
519    // fix section length
520    let section_length = compressed_len + 3 * 8;
521    let end = output.stream_position()?;
522    output.seek(SeekFrom::Start(start))?;
523    write_u64(output, section_length)?;
524    output.seek(SeekFrom::Start(end))?;
525
526    Ok(())
527}
528
529//////////////// Blackout
530
531pub(crate) fn read_blackout(input: &mut (impl Read + Seek)) -> ReadResult<Vec<BlackoutData>> {
532    // remember start for later sanity check
533    let start = input.stream_position()?;
534    let section_length = read_u64(input)?;
535    let (num_blackouts, _) = read_variant_u32(input)?;
536    let mut blackouts = Vec::with_capacity(num_blackouts as usize);
537    let mut current_blackout = 0u64;
538    for _ in 0..num_blackouts {
539        let activity = read_u8(input)? != 0;
540        let (delta, _) = read_variant_u64(input)?;
541        current_blackout += delta;
542        let bo = BlackoutData {
543            time: current_blackout,
544            contains_activity: activity,
545        };
546        blackouts.push(bo);
547    }
548    let end = input.stream_position()?;
549    assert_eq!(start + section_length, end);
550    Ok(blackouts)
551}
552
553#[cfg(test)]
554pub(crate) fn write_blackout(
555    output: &mut (impl Write + Seek),
556    blackouts: &[BlackoutData],
557) -> WriteResult<()> {
558    // remember start to fix the section length afterwards
559    let start = output.stream_position()?;
560    write_u64(output, 0)?; // dummy section length
561
562    let num_blackouts = blackouts.len() as u32;
563    write_variant_u32(output, num_blackouts)?;
564
565    let mut last_blackout = 0u64;
566    for blackout in blackouts {
567        let activity_byte = if blackout.contains_activity { 1 } else { 0 };
568        write_u8(output, activity_byte)?;
569        let delta = blackout.time - last_blackout;
570        last_blackout = blackout.time;
571        write_variant_u64(output, delta)?;
572    }
573
574    // fix section length
575    let end = output.stream_position()?;
576    output.seek(SeekFrom::Start(start))?;
577    write_u64(output, end - start)?;
578    output.seek(SeekFrom::Start(end))?;
579
580    Ok(())
581}
582
583//////////////// Hierarchy
584#[cfg(test)]
585const HIERARCHY_GZIP_COMPRESSION_LEVEL: u8 = 4;
586
587/// uncompresses zlib compressed bytes with a gzip header
588fn read_gzip_compressed_bytes(
589    input: &mut impl Read,
590    uncompressed_len: usize,
591    compressed_len: usize,
592) -> ReadResult<Vec<u8>> {
593    read_gzip_header(input)?;
594    // we do not care about other header bytes
595    let data = read_bytes(input, compressed_len - 10)?;
596    let uncompressed =
597        miniz_oxide::inflate::decompress_to_vec_with_limit(data.as_slice(), uncompressed_len)?;
598    debug_assert_eq!(uncompressed.len(), uncompressed_len);
599    Ok(uncompressed)
600}
601
602pub(crate) fn read_gzip_header(input: &mut impl Read) -> ReadResult<()> {
603    let header = read_bytes(input, 10)?;
604    let correct_magic = header[0] == 0x1f && header[1] == 0x8b;
605    if !correct_magic {
606        return Err(ReaderError::GZipHeader(format!(
607            "expected magic bytes (0x1f, 0x8b) got {header:x?}"
608        )));
609    }
610    let is_deflate_compressed = header[2] == 8;
611    if !is_deflate_compressed {
612        return Err(ReaderError::GZipHeader(format!(
613            "expected deflate compression (8) got {:x?}",
614            header[2]
615        )));
616    }
617    let flag = header[3];
618    if flag != 0 {
619        return Err(ReaderError::GZipHeader(format!(
620            "TODO currently extra flags are not supported {flag}"
621        )));
622    }
623    Ok(())
624}
625
626pub(crate) fn read_hierarchy_bytes(
627    input: &mut (impl Read + Seek),
628    compression: HierarchyCompression,
629) -> ReadResult<Vec<u8>> {
630    let section_length = read_u64(input)? as usize;
631    let uncompressed_length = read_u64(input)? as usize;
632    let compressed_length = section_length - 2 * 8;
633
634    let bytes = match compression {
635        HierarchyCompression::ZLib => {
636            read_gzip_compressed_bytes(input, uncompressed_length, compressed_length)?
637        }
638        HierarchyCompression::Lz4 => {
639            read_lz4_compressed_bytes(input, uncompressed_length, compressed_length)?
640        }
641        HierarchyCompression::Lz4Duo => {
642            // the length after the _first_ decompression
643            let (len, skiplen) = read_variant_u64(input)?;
644            let lvl1_len = len as usize;
645            let lvl1 = read_lz4_compressed_bytes(input, lvl1_len, compressed_length - skiplen)?;
646            let mut lvl1_reader = lvl1.as_slice();
647            read_lz4_compressed_bytes(&mut lvl1_reader, uncompressed_length, lvl1_len)?
648        }
649    };
650    assert_eq!(bytes.len(), uncompressed_length);
651    Ok(bytes)
652}
653
654#[cfg(test)]
655const GZIP_HEADER: [u8; 10] = [
656    0x1f, 0x8b, // magic bytes
657    8,    // using deflate
658    0,    // no flags
659    0, 0, 0, 0,   // timestamp = 0
660    0,   // compression level (does not really matter)
661    255, // OS set to 255 by default
662];
663
664/// writes zlib compressed bytes with a gzip header
665#[cfg(test)]
666pub(crate) fn write_gzip_compressed_bytes(
667    output: &mut impl Write,
668    bytes: &[u8],
669    compression_level: u8,
670) -> ReadResult<()> {
671    output.write_all(GZIP_HEADER.as_slice())?;
672    let compressed = miniz_oxide::deflate::compress_to_vec(bytes, compression_level);
673    output.write_all(compressed.as_slice())?;
674    Ok(())
675}
676
677#[cfg(test)]
678pub(crate) fn write_hierarchy_bytes(
679    output: &mut (impl Write + Seek),
680    compression: HierarchyCompression,
681    bytes: &[u8],
682) -> WriteResult<()> {
683    // remember start to fix the section length afterwards
684    let start = output.stream_position()?;
685    write_u64(output, 0)?; // dummy section length
686    let uncompressed_length = bytes.len() as u64;
687    write_u64(output, uncompressed_length)?;
688
689    match compression {
690        HierarchyCompression::ZLib => {
691            write_gzip_compressed_bytes(output, bytes, HIERARCHY_GZIP_COMPRESSION_LEVEL)?;
692        }
693        HierarchyCompression::Lz4 => {
694            let compressed = lz4_flex::compress(bytes);
695            output.write_all(&compressed)?;
696        }
697        HierarchyCompression::Lz4Duo => {
698            let compressed_lvl1 = lz4_flex::compress(bytes);
699            let lvl1_len = compressed_lvl1.len() as u64;
700            write_variant_u64(output, lvl1_len)?;
701            let compressed_lvl2 = lz4_flex::compress(&compressed_lvl1);
702            output.write_all(&compressed_lvl2)?;
703        }
704    };
705
706    // fix section length
707    let end = output.stream_position()?;
708    output.seek(SeekFrom::Start(start))?;
709    write_u64(output, end - start)?;
710    output.seek(SeekFrom::Start(end))?;
711    Ok(())
712}
713
714fn enum_table_from_string(value: String, handle: u64) -> ReadResult<FstHierarchyEntry> {
715    let parts: Vec<&str> = value.split(' ').collect();
716    if parts.len() < 2 {
717        return Err(ReaderError::EnumTableString(
718            "not enough spaces".to_string(),
719            value,
720        ));
721    }
722    let name = parts[0].to_string();
723    let element_count = parts[1].parse::<usize>()?;
724    let expected_part_len = element_count * 2;
725    if parts.len() - 2 != expected_part_len {
726        return Err(ReaderError::EnumTableString(
727            format!(
728                "expected {} parts got {}",
729                expected_part_len,
730                parts.len() - 2
731            ),
732            value,
733        ));
734    }
735    let mut mapping = Vec::with_capacity(element_count);
736    for ii in 0..element_count {
737        let name = parts[2 + ii].to_string();
738        let value = parts[2 + element_count + ii].to_string();
739        mapping.push((value, name));
740    }
741    // TODO: deal with correct de-escaping
742    Ok(FstHierarchyEntry::EnumTable {
743        name,
744        handle,
745        mapping,
746    })
747}
748
749#[cfg(test)]
750fn enum_table_to_string(name: &str, mapping: &[(String, String)]) -> String {
751    let mut out = String::with_capacity(name.len() + mapping.len() * 32 + 32);
752    out.push_str(name);
753    out.push(' ');
754    out.push_str(&format!("{}", mapping.len()));
755    for (_value, name) in mapping {
756        out.push(' ');
757        out.push_str(name);
758    }
759    for (value, _name) in mapping {
760        out.push(' ');
761        out.push_str(value);
762    }
763    out
764}
765
766const FST_SUP_VAR_DATA_TYPE_BITS: u32 = 10;
767const FST_SUP_VAR_DATA_TYPE_MASK: u64 = (1 << FST_SUP_VAR_DATA_TYPE_BITS) - 1;
768
769fn parse_misc_attribute(
770    name: String,
771    tpe: MiscType,
772    arg: u64,
773    arg2: Option<u64>,
774) -> ReadResult<FstHierarchyEntry> {
775    let res = match tpe {
776        MiscType::Comment => FstHierarchyEntry::Comment { string: name },
777        MiscType::EnvVar => todo!("EnvVar Attribute"), // fstWriterSetEnvVar()
778        MiscType::SupVar => {
779            // This attribute supplies VHDL specific information and is used by GHDL
780            let var_type = (arg >> FST_SUP_VAR_DATA_TYPE_BITS) as u8;
781            let data_type = (arg & FST_SUP_VAR_DATA_TYPE_MASK) as u8;
782            FstHierarchyEntry::VhdlVarInfo {
783                type_name: name,
784                var_type: FstVhdlVarType::try_from_primitive(var_type)?,
785                data_type: FstVhdlDataType::try_from_primitive(data_type)?,
786            }
787        }
788        MiscType::PathName => FstHierarchyEntry::PathName { name, id: arg },
789        MiscType::SourceStem => FstHierarchyEntry::SourceStem {
790            is_instantiation: false,
791            path_id: arg2.unwrap(),
792            line: arg,
793        },
794        MiscType::SourceInstantiationStem => FstHierarchyEntry::SourceStem {
795            is_instantiation: true,
796            path_id: arg2.unwrap(),
797            line: arg,
798        },
799        MiscType::ValueList => todo!("ValueList Attribute"), // fstWriterSetValueList()
800        MiscType::EnumTable => {
801            if name.is_empty() {
802                FstHierarchyEntry::EnumTableRef { handle: arg }
803            } else {
804                enum_table_from_string(name, arg)?
805            }
806        }
807        MiscType::Unknown => todo!("unknown Attribute"),
808    };
809    Ok(res)
810}
811
812fn read_hierarchy_attribute_arg2_encoded_as_name(input: &mut impl Read) -> ReadResult<u64> {
813    let (value, _) = read_variant_u64(input)?;
814    let end_byte = read_u8(input)?;
815    assert_eq!(end_byte, 0, "expected to be zero terminated!");
816    Ok(value)
817}
818
819const HIERARCHY_TPE_VCD_SCOPE: u8 = 254;
820const HIERARCHY_TPE_VCD_UP_SCOPE: u8 = 255;
821const HIERARCHY_TPE_VCD_ATTRIBUTE_BEGIN: u8 = 252;
822const HIERARCHY_TPE_VCD_ATTRIBUTE_END: u8 = 253;
823
824pub(crate) fn read_hierarchy_entry(
825    input: &mut impl Read,
826    handle_count: &mut u32,
827) -> ReadResult<Option<FstHierarchyEntry>> {
828    let entry_tpe = match read_u8(input) {
829        Ok(tpe) => tpe,
830        Err(_) => return Ok(None),
831    };
832    let entry = match entry_tpe {
833        HIERARCHY_TPE_VCD_SCOPE => {
834            // VcdScope (ScopeType)
835            let tpe = FstScopeType::try_from_primitive(read_u8(input)?)?;
836            let name = read_c_str(input, HIERARCHY_NAME_MAX_SIZE)?;
837            let component = read_c_str(input, HIERARCHY_NAME_MAX_SIZE)?;
838            FstHierarchyEntry::Scope {
839                tpe,
840                name,
841                component,
842            }
843        }
844        0..=29 => {
845            // VcdEvent ... SvShortReal (VariableType)
846            let tpe = FstVarType::try_from_primitive(entry_tpe)?;
847            let direction = FstVarDirection::try_from_primitive(read_u8(input)?)?;
848            let name = read_c_str(input, HIERARCHY_NAME_MAX_SIZE)?;
849            let (raw_length, _) = read_variant_u32(input)?;
850            let length = if tpe == FstVarType::Port {
851                // remove delimiting spaces and adjust signal size
852                (raw_length - 2) / 3
853            } else {
854                raw_length
855            };
856            let (alias, _) = read_variant_u32(input)?;
857            let (is_alias, handle) = if alias == 0 {
858                *handle_count += 1;
859                (false, FstSignalHandle::new(*handle_count))
860            } else {
861                (true, FstSignalHandle::new(alias))
862            };
863            FstHierarchyEntry::Var {
864                tpe,
865                direction,
866                name,
867                length,
868                handle,
869                is_alias,
870            }
871        }
872        HIERARCHY_TPE_VCD_UP_SCOPE => {
873            // VcdUpScope (ScopeType)
874            FstHierarchyEntry::UpScope
875        }
876        HIERARCHY_TPE_VCD_ATTRIBUTE_BEGIN => {
877            let tpe = AttributeType::try_from_primitive(read_u8(input)?)?;
878            let subtype = MiscType::try_from_primitive(read_u8(input)?)?;
879            match tpe {
880                AttributeType::Misc => {
881                    let (name, arg2) = match subtype {
882                        MiscType::SourceStem | MiscType::SourceInstantiationStem => {
883                            let arg2 = read_hierarchy_attribute_arg2_encoded_as_name(input)?;
884                            ("".to_string(), Some(arg2))
885                        }
886                        _ => {
887                            let name = read_c_str(input, HIERARCHY_ATTRIBUTE_MAX_SIZE)?;
888                            (name, None)
889                        }
890                    };
891                    let (arg, _) = read_variant_u64(input)?;
892                    parse_misc_attribute(name, subtype, arg, arg2)?
893                }
894                AttributeType::Array => todo!("ARRAY attributes"),
895                AttributeType::Enum => todo!("ENUM attributes"),
896                AttributeType::Pack => todo!("PACK attributes"),
897            }
898        }
899        HIERARCHY_TPE_VCD_ATTRIBUTE_END => {
900            // GenAttributeEnd (ScopeType)
901            FstHierarchyEntry::AttributeEnd
902        }
903
904        other => todo!("Deal with hierarchy entry of type: {other}"),
905    };
906
907    Ok(Some(entry))
908}
909
910#[cfg(test)]
911fn write_hierarchy_attribute(
912    output: &mut impl Write,
913    tpe: AttributeType,
914    subtype: MiscType,
915    name: &str,
916    arg: u64,
917    arg2: Option<u64>,
918) -> WriteResult<()> {
919    write_u8(output, HIERARCHY_TPE_VCD_ATTRIBUTE_BEGIN)?;
920    write_u8(output, tpe as u8)?;
921    write_u8(output, subtype as u8)?;
922    let raw_name_bytes = match arg2 {
923        None => {
924            assert!(name.len() <= HIERARCHY_ATTRIBUTE_MAX_SIZE);
925            name.to_string().into_bytes()
926        }
927        Some(value) => {
928            assert!(name.is_empty(), "cannot have a name + an arg2!");
929            let mut buf = vec![0u8; 10];
930            let mut buf_writer: &mut [u8] = buf.as_mut();
931            let len = write_variant_u64(&mut buf_writer, value)?;
932            buf.truncate(len);
933            buf
934        }
935    };
936    output.write_all(&raw_name_bytes)?;
937    write_u8(output, 0)?; // zero terminate string/variant
938    write_variant_u64(output, arg)?;
939    Ok(())
940}
941
942#[cfg(test)]
943pub(crate) fn write_hierarchy_entry(
944    output: &mut impl Write,
945    handle_count: &mut u32,
946    entry: &FstHierarchyEntry,
947) -> WriteResult<()> {
948    match entry {
949        FstHierarchyEntry::Scope {
950            tpe,
951            name,
952            component,
953        } => {
954            write_u8(output, HIERARCHY_TPE_VCD_SCOPE)?;
955            write_u8(output, *tpe as u8)?;
956            assert!(name.len() <= HIERARCHY_NAME_MAX_SIZE);
957            write_c_str(output, name)?;
958            assert!(component.len() <= HIERARCHY_NAME_MAX_SIZE);
959            write_c_str(output, component)?;
960        }
961        FstHierarchyEntry::UpScope => {
962            write_u8(output, HIERARCHY_TPE_VCD_UP_SCOPE)?;
963        }
964        FstHierarchyEntry::Var {
965            tpe,
966            direction,
967            name,
968            length,
969            handle,
970            is_alias,
971        } => {
972            write_u8(output, *tpe as u8)?;
973            write_u8(output, *direction as u8)?;
974            assert!(name.len() <= HIERARCHY_NAME_MAX_SIZE);
975            write_c_str(output, name)?;
976            let raw_length = if *tpe == FstVarType::Port {
977                3 * (*length) + 2
978            } else {
979                *length
980            };
981            write_variant_u32(output, raw_length)?;
982            if *is_alias {
983                write_variant_u32(output, handle.get_raw())?;
984            } else {
985                // sanity check handle
986                assert_eq!(handle.get_index(), *handle_count as usize);
987                *handle_count += 1;
988                // write no-alias
989                write_variant_u32(output, 0)?;
990            }
991        }
992        FstHierarchyEntry::PathName { name, id } => write_hierarchy_attribute(
993            output,
994            AttributeType::Misc,
995            MiscType::PathName,
996            name,
997            *id,
998            None,
999        )?,
1000        FstHierarchyEntry::SourceStem {
1001            is_instantiation,
1002            path_id,
1003            line,
1004        } => {
1005            let subtpe = if *is_instantiation {
1006                MiscType::SourceInstantiationStem
1007            } else {
1008                MiscType::SourceStem
1009            };
1010            write_hierarchy_attribute(
1011                output,
1012                AttributeType::Misc,
1013                subtpe,
1014                "",
1015                *line,
1016                Some(*path_id),
1017            )?
1018        }
1019        FstHierarchyEntry::Comment { string } => write_hierarchy_attribute(
1020            output,
1021            AttributeType::Misc,
1022            MiscType::Comment,
1023            string,
1024            0,
1025            None,
1026        )?,
1027        FstHierarchyEntry::EnumTable {
1028            name,
1029            handle,
1030            mapping,
1031        } => {
1032            let table_str = enum_table_to_string(name, mapping);
1033            write_hierarchy_attribute(
1034                output,
1035                AttributeType::Misc,
1036                MiscType::EnumTable,
1037                &table_str,
1038                *handle,
1039                None,
1040            )?
1041        }
1042        FstHierarchyEntry::EnumTableRef { handle } => write_hierarchy_attribute(
1043            output,
1044            AttributeType::Misc,
1045            MiscType::EnumTable,
1046            "",
1047            *handle,
1048            None,
1049        )?,
1050        FstHierarchyEntry::VhdlVarInfo {
1051            type_name,
1052            var_type,
1053            data_type,
1054        } => {
1055            let arg = ((*var_type as u64) << FST_SUP_VAR_DATA_TYPE_BITS) | (*data_type as u64);
1056            write_hierarchy_attribute(
1057                output,
1058                AttributeType::Misc,
1059                MiscType::SupVar,
1060                type_name,
1061                arg,
1062                None,
1063            )?;
1064        }
1065        FstHierarchyEntry::AttributeEnd => {
1066            write_u8(output, HIERARCHY_TPE_VCD_ATTRIBUTE_END)?;
1067        }
1068    }
1069
1070    Ok(())
1071}
1072
1073//////////////// Vale Change Data
1074
1075pub(crate) fn read_packed_signal_value_bytes(
1076    input: &mut (impl Read + Seek),
1077    len: u32,
1078    tpe: ValueChangePackType,
1079) -> ReadResult<Vec<u8>> {
1080    let (value, skiplen) = read_variant_u32(input)?;
1081    if value != 0 {
1082        let uncompressed_length = value as u64;
1083        let uncompressed: Vec<u8> = match tpe {
1084            ValueChangePackType::Lz4 => {
1085                let compressed_length = (len - skiplen) as u64;
1086                read_lz4_compressed_bytes(
1087                    input,
1088                    uncompressed_length as usize,
1089                    compressed_length as usize,
1090                )?
1091            }
1092            ValueChangePackType::FastLz => {
1093                let compressed_length = (len - skiplen) as u64;
1094                crate::fastlz::decompress(
1095                    input,
1096                    compressed_length as usize,
1097                    uncompressed_length as usize,
1098                )?
1099            }
1100            ValueChangePackType::Zlib => {
1101                let compressed_length = len as u64;
1102                // Important: for signals, we do not skip decompression,
1103                // even if the compressed and uncompressed length are the same
1104                read_zlib_compressed_bytes(input, uncompressed_length, compressed_length, false)?
1105            }
1106        };
1107        Ok(uncompressed)
1108    } else {
1109        let dest_length = len - skiplen;
1110        let bytes = read_bytes(input, dest_length as usize)?;
1111        Ok(bytes)
1112    }
1113}
1114
1115pub(crate) fn read_time_table(
1116    input: &mut (impl Read + Seek),
1117    section_start: u64,
1118    section_length: u64,
1119) -> ReadResult<(u64, Vec<u64>)> {
1120    // the time block meta data is in the last 24 bytes at the end of the section
1121    input.seek(SeekFrom::Start(section_start + section_length - 3 * 8))?;
1122    let uncompressed_length = read_u64(input)?;
1123    let compressed_length = read_u64(input)?;
1124    let number_of_items = read_u64(input)?;
1125    assert!(compressed_length <= section_length);
1126
1127    // now that we know how long the block actually is, we can go back to it
1128    input.seek(SeekFrom::Current(-(3 * 8) - (compressed_length as i64)))?;
1129    let bytes = read_zlib_compressed_bytes(input, uncompressed_length, compressed_length, true)?;
1130    let mut byte_reader: &[u8] = &bytes;
1131    let mut time_table: Vec<u64> = Vec::with_capacity(number_of_items as usize);
1132    let mut time_val: u64 = 0; // running time counter
1133
1134    for _ in 0..number_of_items {
1135        let (value, _) = read_variant_u64(&mut byte_reader)?;
1136        time_val += value;
1137        time_table.push(time_val);
1138    }
1139
1140    let time_section_length = compressed_length + 3 * 8;
1141    Ok((time_section_length, time_table))
1142}
1143
1144#[cfg(test)]
1145pub(crate) fn write_time_table(
1146    output: &mut (impl Write + Seek),
1147    compression: Option<u8>,
1148    table: &[u64],
1149) -> WriteResult<()> {
1150    // delta compress
1151    let num_entries = table.len();
1152    let table = delta_compress_time_table(table)?;
1153    // write data
1154    let (uncompressed_len, compressed_len) = match compression {
1155        Some(comp) => {
1156            let compressed = miniz_oxide::deflate::compress_to_vec_zlib(table.as_slice(), comp);
1157            // is compression worth it?
1158            if compressed.len() < table.len() {
1159                output.write_all(compressed.as_slice())?;
1160                (table.len(), compressed.len())
1161            } else {
1162                // it is more space efficient to stick with the uncompressed version
1163                output.write_all(table.as_slice())?;
1164                (table.len(), table.len())
1165            }
1166        }
1167        None => {
1168            output.write_all(table.as_slice())?;
1169            (table.len(), table.len())
1170        }
1171    };
1172    write_u64(output, uncompressed_len as u64)?;
1173    write_u64(output, compressed_len as u64)?;
1174    write_u64(output, num_entries as u64)?;
1175
1176    Ok(())
1177}
1178
1179#[cfg(test)]
1180#[inline]
1181fn delta_compress_time_table(table: &[u64]) -> WriteResult<Vec<u8>> {
1182    let mut output = vec![];
1183    let mut prev_time = 0u64;
1184    for time in table {
1185        let delta = *time - prev_time;
1186        prev_time = *time;
1187        write_variant_u64(&mut output, delta)?;
1188    }
1189    Ok(output)
1190}
1191#[allow(clippy::too_many_arguments)]
1192#[inline]
1193pub(crate) fn read_frame(
1194    input: &mut (impl Read + Seek),
1195    section_start: u64,
1196    section_length: u64,
1197    signals: &[SignalInfo],
1198    signal_filter: &BitMask,
1199    float_endian: FloatingPointEndian,
1200    start_time: u64,
1201    callback: &mut impl FnMut(u64, FstSignalHandle, FstSignalValue),
1202) -> ReadResult<()> {
1203    // we skip the section header (section_length, start_time, end_time, ???)
1204    input.seek(SeekFrom::Start(section_start + 4 * 8))?;
1205    let (uncompressed_length, _) = read_variant_u64(input)?;
1206    let (compressed_length, _) = read_variant_u64(input)?;
1207    let (max_handle, _) = read_variant_u64(input)?;
1208    assert!(compressed_length <= section_length);
1209    let bytes_vec =
1210        read_zlib_compressed_bytes(input, uncompressed_length, compressed_length, true)?;
1211    let mut bytes = std::io::Cursor::new(bytes_vec);
1212
1213    assert_eq!(signals.len(), max_handle as usize);
1214    for (idx, signal) in signals.iter().enumerate() {
1215        let signal_length = signal.len();
1216        if signal_filter.is_set(idx) {
1217            let handle = FstSignalHandle::from_index(idx);
1218            match signal_length {
1219                0 => {} // ignore since variable-length records have no initial value
1220                len => {
1221                    if !signal.is_real() {
1222                        let value = read_bytes(&mut bytes, len as usize)?;
1223                        callback(start_time, handle, FstSignalValue::String(&value));
1224                    } else {
1225                        let value = read_f64(&mut bytes, float_endian)?;
1226                        callback(start_time, handle, FstSignalValue::Real(value));
1227                    }
1228                }
1229            }
1230        } else {
1231            // skip
1232            bytes.seek(SeekFrom::Current(signal_length as i64))?;
1233        }
1234    }
1235    Ok(())
1236}
1237
1238#[inline]
1239pub(crate) fn skip_frame(input: &mut (impl Read + Seek), section_start: u64) -> ReadResult<()> {
1240    // we skip the section header (section_length, start_time, end_time, ???)
1241    input.seek(SeekFrom::Start(section_start + 4 * 8))?;
1242    let (_uncompressed_length, _) = read_variant_u64(input)?;
1243    let (compressed_length, _) = read_variant_u64(input)?;
1244    let (_max_handle, _) = read_variant_u64(input)?;
1245    input.seek(SeekFrom::Current(compressed_length as i64))?;
1246    Ok(())
1247}
1248
1249/// Table of signal offsets inside a data block.
1250#[derive(Debug)]
1251pub(crate) struct OffsetTable(Vec<SignalDataLoc>);
1252
1253impl From<Vec<SignalDataLoc>> for OffsetTable {
1254    fn from(value: Vec<SignalDataLoc>) -> Self {
1255        Self(value)
1256    }
1257}
1258
1259impl OffsetTable {
1260    pub(crate) fn iter(&self) -> OffsetTableIter {
1261        OffsetTableIter {
1262            table: self,
1263            signal_idx: 0,
1264        }
1265    }
1266
1267    #[allow(dead_code)]
1268    pub(crate) fn len(&self) -> usize {
1269        self.0.len()
1270    }
1271
1272    fn get_entry(&self, signal_idx: usize) -> Option<OffsetEntry> {
1273        match &self.0[signal_idx] {
1274            SignalDataLoc::None => None,
1275            // aliases should always directly point to an offset,
1276            // so we should not have to recurse!
1277            SignalDataLoc::Alias(alias_idx) => match &self.0[*alias_idx as usize] {
1278                SignalDataLoc::Offset(offset, len) => Some(OffsetEntry {
1279                    signal_idx,
1280                    offset: offset.get() as u64,
1281                    len: len.get(),
1282                }),
1283                _ => unreachable!("aliases should always directly point to an offset"),
1284            },
1285            SignalDataLoc::Offset(offset, len) => Some(OffsetEntry {
1286                signal_idx,
1287                offset: offset.get() as u64,
1288                len: len.get(),
1289            }),
1290        }
1291    }
1292}
1293
1294pub(crate) struct OffsetTableIter<'a> {
1295    table: &'a OffsetTable,
1296    signal_idx: usize,
1297}
1298
1299#[derive(Debug)]
1300pub(crate) struct OffsetEntry {
1301    pub(crate) signal_idx: usize,
1302    pub(crate) offset: u64,
1303    pub(crate) len: u32,
1304}
1305impl Iterator for OffsetTableIter<'_> {
1306    type Item = OffsetEntry;
1307
1308    fn next(&mut self) -> Option<Self::Item> {
1309        // get the first entry which is not None
1310        while self.signal_idx < self.table.0.len()
1311            && matches!(self.table.0[self.signal_idx], SignalDataLoc::None)
1312        {
1313            self.signal_idx += 1
1314        }
1315
1316        // did we reach the end?
1317        if self.signal_idx >= self.table.0.len() {
1318            return None;
1319        }
1320
1321        // increment id for next call
1322        self.signal_idx += 1;
1323
1324        // return result
1325        let res = self.table.get_entry(self.signal_idx - 1);
1326        debug_assert!(res.is_some());
1327        res
1328    }
1329}
1330
1331fn read_value_change_alias2(
1332    mut chain_bytes: &[u8],
1333    max_handle: u64,
1334    last_table_entry: u32,
1335) -> ReadResult<OffsetTable> {
1336    let mut table = Vec::with_capacity(max_handle as usize);
1337    let mut offset: Option<NonZeroU32> = None;
1338    let mut prev_alias = 0u32;
1339    let mut prev_offset_idx = 0usize;
1340    while !chain_bytes.is_empty() {
1341        let idx = table.len();
1342        let kind = chain_bytes[0];
1343        if (kind & 1) == 1 {
1344            let shval = read_variant_i64(&mut chain_bytes)? >> 1;
1345            match shval.cmp(&0) {
1346                Ordering::Greater => {
1347                    // a new incremental offset
1348                    let new_offset = NonZeroU32::new(
1349                        (offset.map(|o| o.get()).unwrap_or_default() as i64 + shval) as u32,
1350                    )
1351                    .unwrap();
1352                    // if there was a previous entry, we need to update the length
1353                    if let Some(prev_offset) = offset {
1354                        let len = NonZeroU32::new(new_offset.get() - prev_offset.get()).unwrap();
1355                        table[prev_offset_idx] = SignalDataLoc::Offset(prev_offset, len);
1356                    }
1357                    offset = Some(new_offset);
1358                    prev_offset_idx = idx;
1359                    // push a placeholder which will be replaced as soon as we know the length
1360                    table.push(SignalDataLoc::None);
1361                }
1362                Ordering::Less => {
1363                    // new signal alias
1364                    prev_alias = (-shval - 1) as u32;
1365                    table.push(SignalDataLoc::Alias(prev_alias));
1366                }
1367                Ordering::Equal => {
1368                    // same signal alias as previous signal
1369                    table.push(SignalDataLoc::Alias(prev_alias));
1370                }
1371            }
1372        } else {
1373            // a block of signals that do not have any data
1374            let (value, _) = read_variant_u32(&mut chain_bytes)?;
1375            let zeros = value >> 1;
1376            for _ in 0..zeros {
1377                table.push(SignalDataLoc::None);
1378            }
1379        }
1380    }
1381
1382    // if there was a previous entry, we need to update the length
1383    if let Some(prev_offset) = offset {
1384        let len = NonZeroU32::new(last_table_entry - prev_offset.get()).unwrap();
1385        table[prev_offset_idx] = SignalDataLoc::Offset(prev_offset, len);
1386    }
1387
1388    Ok(table.into())
1389}
1390
1391fn read_value_change_alias(
1392    mut chain_bytes: &[u8],
1393    max_handle: u64,
1394    last_table_entry: u32,
1395) -> ReadResult<OffsetTable> {
1396    let mut table = Vec::with_capacity(max_handle as usize);
1397    let mut prev_offset_idx = 0usize;
1398    let mut offset: Option<NonZeroU32> = None;
1399    while !chain_bytes.is_empty() {
1400        let (raw_val, _) = read_variant_u32(&mut chain_bytes)?;
1401        let idx = table.len();
1402        if raw_val == 0 {
1403            let (raw_alias, _) = read_variant_u32(&mut chain_bytes)?;
1404            let alias = ((raw_alias as i64) - 1) as u32;
1405            table.push(SignalDataLoc::Alias(alias));
1406        } else if (raw_val & 1) == 1 {
1407            // a new incremental offset
1408            let new_offset =
1409                NonZeroU32::new(offset.map(|o| o.get()).unwrap_or_default() + (raw_val >> 1))
1410                    .unwrap();
1411            // if there was a previous entry, we need to update the length
1412            if let Some(prev_offset) = offset {
1413                let len = NonZeroU32::new(new_offset.get() - prev_offset.get()).unwrap();
1414                table[prev_offset_idx] = SignalDataLoc::Offset(prev_offset, len);
1415            }
1416            offset = Some(new_offset);
1417            prev_offset_idx = idx;
1418            // push a placeholder which will be replaced as soon as we know the length
1419            table.push(SignalDataLoc::None);
1420        } else {
1421            // a block of signals that do not have any data
1422            let zeros = raw_val >> 1;
1423            for _ in 0..zeros {
1424                table.push(SignalDataLoc::None);
1425            }
1426        }
1427    }
1428
1429    // if there was a previous entry, we need to update the length
1430    if let Some(prev_offset) = offset {
1431        let len = NonZeroU32::new(last_table_entry - prev_offset.get()).unwrap();
1432        table[prev_offset_idx] = SignalDataLoc::Offset(prev_offset, len);
1433    }
1434
1435    Ok(table.into())
1436}
1437
1438/// Indicates the location of the signal data for the current block.
1439#[derive(Debug, Copy, Clone)]
1440enum SignalDataLoc {
1441    /// The signal has no value changes in the current block.
1442    None,
1443    /// The signal has the same offset as another signal.
1444    Alias(u32),
1445    /// The signal has a new offset.
1446    Offset(NonZeroU32, NonZeroU32),
1447}
1448
1449pub(crate) fn read_signal_locs(
1450    input: &mut (impl Read + Seek),
1451    chain_len_offset: u64,
1452    section_kind: DataSectionKind,
1453    max_handle: u64,
1454    start: u64,
1455) -> ReadResult<OffsetTable> {
1456    input.seek(SeekFrom::Start(chain_len_offset))?;
1457    let chain_compressed_length = read_u64(input)?;
1458
1459    // the chain starts _chain_length_ bytes before the chain length
1460    let chain_start = chain_len_offset - chain_compressed_length;
1461    input.seek(SeekFrom::Start(chain_start))?;
1462    let chain_bytes = read_bytes(input, chain_compressed_length as usize)?;
1463
1464    let last_table_entry = (chain_start - start) as u32; // indx_pos - vc_start
1465    if section_kind == DataSectionKind::DynamicAlias2 {
1466        read_value_change_alias2(&chain_bytes, max_handle, last_table_entry)
1467    } else {
1468        read_value_change_alias(&chain_bytes, max_handle, last_table_entry)
1469    }
1470}
1471
1472#[cfg(test)]
1473mod tests {
1474    use super::*;
1475    use proptest::prelude::*;
1476
1477    #[test]
1478    fn data_struct_sizes() {
1479        assert_eq!(
1480            std::mem::size_of::<SignalDataLoc>(),
1481            std::mem::size_of::<u64>() + std::mem::size_of::<u32>()
1482        );
1483    }
1484
1485    #[test]
1486    fn test_read_variant_i64() {
1487        // a positive value from a real fst file (solution from gtkwave)
1488        let in1 = [0x13];
1489        assert_eq!(read_variant_i64(&mut in1.as_slice()).unwrap(), 19);
1490        // a negative value from a real fst file (solution from gtkwave)
1491        let in0 = [0x7b];
1492        assert_eq!(read_variant_i64(&mut in0.as_slice()).unwrap(), -5);
1493    }
1494
1495    #[test]
1496    fn regression_test_read_write_variant_i64() {
1497        do_test_read_write_variant_i64(-36028797018963969);
1498        do_test_read_write_variant_i64(-4611686018427387905);
1499    }
1500
1501    fn do_test_read_write_variant_i64(value: i64) {
1502        let mut buf = std::io::Cursor::new(vec![0u8; 24]);
1503        write_variant_i64(&mut buf, value).unwrap();
1504        buf.seek(SeekFrom::Start(0)).unwrap();
1505        let read_value = read_variant_i64(&mut buf).unwrap();
1506        assert_eq!(read_value, value);
1507    }
1508
1509    proptest! {
1510         #[test]
1511        fn test_read_write_variant_u64(value: u64) {
1512            let mut buf = std::io::Cursor::new(vec![0u8; 24]);
1513            write_variant_u64(&mut buf, value).unwrap();
1514            buf.seek(SeekFrom::Start(0)).unwrap();
1515            let (read_value, _) = read_variant_u64(&mut buf).unwrap();
1516            assert_eq!(read_value, value);
1517        }
1518
1519         #[test]
1520        fn test_read_write_variant_i64(value: i64) {
1521            do_test_read_write_variant_i64(value);
1522        }
1523    }
1524
1525    #[test]
1526    fn test_read_c_str_fixed_length() {
1527        let input = [b'h', b'i', 0u8, b'x'];
1528        assert_eq!(
1529            read_c_str_fixed_length(&mut input.as_slice(), 4).unwrap(),
1530            "hi"
1531        );
1532        let input2 = [b'h', b'i', b'i', 0u8, b'x'];
1533        assert_eq!(
1534            read_c_str_fixed_length(&mut input2.as_slice(), 5).unwrap(),
1535            "hii"
1536        );
1537    }
1538
1539    /// makes sure that there are no zero bytes inside the string and that the max length is obeyed
1540    fn is_valid_c_str(value: &str, max_len: usize) -> bool {
1541        let string_bytes: &[u8] = value.as_bytes();
1542        let len_constraint = string_bytes.len() < max_len;
1543        let non_zero_constraint = !string_bytes.contains(&0u8);
1544        len_constraint && non_zero_constraint
1545    }
1546
1547    fn is_valid_alphanumeric_c_str(value: &str, max_len: usize) -> bool {
1548        let alphanumeric_constraint = value.chars().all(|c| c.is_alphanumeric());
1549        is_valid_c_str(value, max_len) && alphanumeric_constraint
1550    }
1551
1552    proptest! {
1553        #[test]
1554        fn test_write_c_str_fixed_length(string: String, max_len in 1 .. 400usize) {
1555            prop_assume!(is_valid_c_str(&string, max_len));
1556            let mut buf = std::io::Cursor::new(vec![0u8; max_len]);
1557            write_c_str_fixed_length(&mut buf, &string, max_len).unwrap();
1558            buf.seek(SeekFrom::Start(0)).unwrap();
1559            assert_eq!(
1560                read_c_str_fixed_length(&mut buf, max_len).unwrap(),
1561                string
1562            );
1563        }
1564    }
1565
1566    proptest! {
1567        #[test]
1568        fn test_write_c_str(string: String, max_len in 1 .. 400usize) {
1569            prop_assume!(is_valid_c_str(&string, max_len));
1570            let mut buf = std::io::Cursor::new(vec![0u8; max_len]);
1571            write_c_str(&mut buf, &string).unwrap();
1572            buf.seek(SeekFrom::Start(0)).unwrap();
1573            assert_eq!(
1574                read_c_str(&mut buf, max_len).unwrap(),
1575                string
1576            );
1577        }
1578    }
1579
1580    proptest! {
1581        #[test]
1582        fn test_read_write_header(header: Header) {
1583            // return early if the header strings are too long
1584            prop_assume!(header.version.len() <= HEADER_VERSION_MAX_LEN);
1585            prop_assume!(header.date.len() <= HEADER_DATE_MAX_LEN );
1586
1587            let mut buf = [0u8; 512];
1588            write_header(&mut buf.as_mut(), &header).unwrap();
1589            let (actual_header, endian) = read_header(&mut buf.as_slice()).unwrap();
1590            assert_eq!(endian, FloatingPointEndian::Little);
1591            assert_eq!(actual_header, header);
1592        }
1593    }
1594
1595    proptest! {
1596        #[test]
1597        fn test_compress_bytes(bytes: Vec<u8>, allow_uncompressed: bool) {
1598            let mut buf = std::io::Cursor::new(vec![0u8; bytes.len() * 2]);
1599            let compressed_len = write_compressed_bytes(&mut buf, &bytes, 3, allow_uncompressed).unwrap();
1600            if allow_uncompressed {
1601                assert!(compressed_len <= bytes.len());
1602            }
1603            buf.seek(SeekFrom::Start(0)).unwrap();
1604            let uncompressed = read_zlib_compressed_bytes(&mut buf, bytes.len() as u64, compressed_len as u64, allow_uncompressed).unwrap();
1605            assert_eq!(uncompressed, bytes);
1606        }
1607    }
1608
1609    proptest! {
1610        #[test]
1611        fn test_read_write_blackout(mut blackouts: Vec<BlackoutData>) {
1612            // blackout times must be in increasing order => sort
1613            blackouts.sort_by(|a, b| a.time.cmp(&b.time));
1614
1615            // actual test
1616            let max_len = blackouts.len() * 5 + 3 * 8;
1617            let mut buf = std::io::Cursor::new(vec![0u8; max_len]);
1618            write_blackout(&mut buf, &blackouts).unwrap();
1619            buf.seek(SeekFrom::Start(0)).unwrap();
1620            let actual = read_blackout(&mut buf).unwrap();
1621            assert_eq!(actual.len(), blackouts.len());
1622            assert_eq!(actual, blackouts);
1623        }
1624    }
1625
1626    proptest! {
1627        #[test]
1628        fn test_read_write_geometry(signals: Vec<SignalInfo>) {
1629            let max_len = signals.len() * 4 + 3 * 8;
1630            let mut buf = std::io::Cursor::new(vec![0u8; max_len]);
1631            write_geometry(&mut buf, &signals, 3).unwrap();
1632            buf.seek(SeekFrom::Start(0)).unwrap();
1633            let actual = read_geometry(&mut buf).unwrap();
1634            assert_eq!(actual.len(), signals.len());
1635            assert_eq!(actual, signals);
1636        }
1637    }
1638
1639    /// ensures that no string contains zero bytes or is longer than max_len
1640    fn hierarchy_entry_with_valid_c_strings(entry: &FstHierarchyEntry) -> bool {
1641        match entry {
1642            FstHierarchyEntry::Scope {
1643                name, component, ..
1644            } => {
1645                is_valid_c_str(name, HIERARCHY_NAME_MAX_SIZE)
1646                    && is_valid_c_str(component, HIERARCHY_NAME_MAX_SIZE)
1647            }
1648            FstHierarchyEntry::UpScope => true,
1649            FstHierarchyEntry::Var { name, .. } => is_valid_c_str(name, HIERARCHY_NAME_MAX_SIZE),
1650            FstHierarchyEntry::PathName { name, .. } => {
1651                is_valid_c_str(name, HIERARCHY_ATTRIBUTE_MAX_SIZE)
1652            }
1653            FstHierarchyEntry::SourceStem { .. } => true,
1654            FstHierarchyEntry::Comment { string } => {
1655                is_valid_c_str(string, HIERARCHY_ATTRIBUTE_MAX_SIZE)
1656            }
1657            FstHierarchyEntry::EnumTable { name, mapping, .. } => {
1658                is_valid_alphanumeric_c_str(name, HIERARCHY_ATTRIBUTE_MAX_SIZE)
1659                    && mapping.iter().all(|(k, v)| {
1660                        is_valid_alphanumeric_c_str(k, HIERARCHY_ATTRIBUTE_MAX_SIZE)
1661                            && is_valid_alphanumeric_c_str(v, HIERARCHY_ATTRIBUTE_MAX_SIZE)
1662                    })
1663            }
1664            FstHierarchyEntry::EnumTableRef { .. } => true,
1665            FstHierarchyEntry::VhdlVarInfo { type_name, .. } => {
1666                is_valid_c_str(type_name, HIERARCHY_NAME_MAX_SIZE)
1667            }
1668            FstHierarchyEntry::AttributeEnd => true,
1669        }
1670    }
1671
1672    /// ensures that the mapping strings are non-empty and do not contain spaces
1673    fn hierarchy_entry_with_valid_mapping(entry: &FstHierarchyEntry) -> bool {
1674        match entry {
1675            FstHierarchyEntry::EnumTable { mapping, .. } => mapping
1676                .iter()
1677                .all(|(k, v)| is_valid_mapping_str(k) && is_valid_mapping_str(v)),
1678            _ => true,
1679        }
1680    }
1681    fn is_valid_mapping_str(value: &str) -> bool {
1682        !value.is_empty() && !value.contains(' ')
1683    }
1684
1685    /// ensures that ports are not too wide
1686    fn hierarchy_entry_with_valid_port_width(entry: &FstHierarchyEntry) -> bool {
1687        if let FstHierarchyEntry::Var {
1688            tpe: FstVarType::Port,
1689            length,
1690            ..
1691        } = entry
1692        {
1693            *length < (u32::MAX / 3) - 2
1694        } else {
1695            true
1696        }
1697    }
1698
1699    fn read_write_hierarchy_entry(entry: FstHierarchyEntry) {
1700        // the handle count is only important if we are writing a non-aliased variable
1701        let base_handle_count: u32 = match &entry {
1702            FstHierarchyEntry::Var {
1703                handle, is_alias, ..
1704            } => {
1705                if *is_alias {
1706                    0
1707                } else {
1708                    handle.get_index() as u32
1709                }
1710            }
1711            _ => 0,
1712        };
1713
1714        let max_len = 1024 * 64;
1715        let mut buf = std::io::Cursor::new(vec![0u8; max_len]);
1716        let mut handle_count = base_handle_count;
1717        write_hierarchy_entry(&mut buf, &mut handle_count, &entry).unwrap();
1718        if base_handle_count > 0 {
1719            assert_eq!(handle_count, base_handle_count + 1);
1720        }
1721        buf.seek(SeekFrom::Start(0)).unwrap();
1722        handle_count = base_handle_count;
1723        let actual = read_hierarchy_entry(&mut buf, &mut handle_count)
1724            .unwrap()
1725            .unwrap();
1726        assert_eq!(actual, entry);
1727    }
1728
1729    #[test]
1730    fn test_read_write_hierarchy_path_name_entry() {
1731        let entry = FstHierarchyEntry::PathName {
1732            id: 1,
1733            name: "".to_string(),
1734        };
1735        read_write_hierarchy_entry(entry);
1736    }
1737
1738    proptest! {
1739        #[test]
1740        fn test_prop_read_write_hierarchy_entry(entry: FstHierarchyEntry) {
1741            prop_assume!(hierarchy_entry_with_valid_c_strings(&entry));
1742            prop_assume!(hierarchy_entry_with_valid_mapping(&entry));
1743            prop_assume!(hierarchy_entry_with_valid_port_width(&entry));
1744            read_write_hierarchy_entry(entry);
1745        }
1746    }
1747
1748    // test with some manually chosen entries
1749    #[test]
1750    fn test_read_write_hierarchy_entry() {
1751        // make sure that we can write and read long attributes
1752        let entry = FstHierarchyEntry::Comment {
1753            string: "TEST ".repeat((8000 + 4) / 5),
1754        };
1755        read_write_hierarchy_entry(entry);
1756    }
1757
1758    fn do_test_read_write_hierarchy_bytes(tpe: HierarchyCompression, bytes: Vec<u8>) {
1759        let max_len = std::cmp::max(64, bytes.len() + 3 * 8);
1760        let mut buf = std::io::Cursor::new(vec![0u8; max_len]);
1761        write_hierarchy_bytes(&mut buf, tpe, &bytes).unwrap();
1762        buf.seek(SeekFrom::Start(0)).unwrap();
1763        let actual = read_hierarchy_bytes(&mut buf, tpe).unwrap();
1764        assert_eq!(actual, bytes);
1765    }
1766
1767    #[test]
1768    fn test_read_write_hierarchy_bytes_regression() {
1769        do_test_read_write_hierarchy_bytes(HierarchyCompression::Lz4, vec![]);
1770        do_test_read_write_hierarchy_bytes(HierarchyCompression::ZLib, vec![]);
1771    }
1772
1773    proptest! {
1774        #[test]
1775        fn test_prop_read_write_hierarchy_bytes(tpe: HierarchyCompression, bytes: Vec<u8>) {
1776            do_test_read_write_hierarchy_bytes(tpe, bytes);
1777        }
1778    }
1779
1780    fn read_write_time_table(mut table: Vec<u64>, compressed: bool) {
1781        // the table has to be sorted since we are computing and saving time deltas
1782        table.sort();
1783        let max_len = std::cmp::max(64, table.len() * 8 + 3 * 8);
1784        let mut buf = std::io::Cursor::new(vec![0u8; max_len]);
1785        let comp = if compressed { Some(3) } else { None };
1786        write_time_table(&mut buf, comp, &table).unwrap();
1787        let section_start = 0u64;
1788        let section_length = buf.stream_position().unwrap();
1789        buf.seek(SeekFrom::Start(0)).unwrap();
1790        let (actual_len, actual_table) =
1791            read_time_table(&mut buf, section_start, section_length).unwrap();
1792        assert_eq!(actual_len, section_length);
1793        assert_eq!(actual_table, table);
1794    }
1795
1796    #[test]
1797    fn test_read_write_time_table_uncompressed() {
1798        let table = vec![1, 0];
1799        read_write_time_table(table, false);
1800    }
1801
1802    #[test]
1803    fn test_read_write_time_table_compressed() {
1804        let table = (0..10000).collect();
1805        read_write_time_table(table, true);
1806    }
1807
1808    proptest! {
1809        #[test]
1810        fn test_prop_read_write_time_table(table: Vec<u64>, compressed: bool) {
1811            read_write_time_table(table, compressed);
1812        }
1813    }
1814}