1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
use super::{lz77, TextEncoding};
use crate::headers::palmdoch::Compression;
use byteorder::{BigEndian, ReadBytesExt};
use encoding::{all::WINDOWS_1252, DecoderTrap, Encoding};
use std::{
    fmt,
    io::{self, Cursor, ErrorKind},
};

const RECORDS_START_INDEX: u64 = 78;

#[derive(Debug, Clone)]
/// A "cell" in the whole books content
pub struct Record {
    record_data_offset: u32,
    id: u32,
    pub record_data: String,
    pub length: usize,
}
impl fmt::Display for Record {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        write!(f, "{}", self.record_data)
    }
}
impl Record {
    #[allow(dead_code)]
    fn new() -> Record {
        Record {
            record_data_offset: 0,
            id: 0,
            record_data: String::new(),
            length: 0,
        }
    }
    /// Reads the content of a record at specified offset
    fn record_data(
        record_data_offset: u32,
        next_record_data_offset: u32,
        extra_bytes: u32,
        compression_type: &Compression,
        content: &[u8],
        encoding: &TextEncoding,
    ) -> io::Result<String> {
        // #TODO: reconsider using string here due to possible different encodings?
        match compression_type {
            Compression::No => match encoding {
                TextEncoding::UTF8 => Ok(String::from_utf8_lossy(
                    &content[record_data_offset as usize..next_record_data_offset as usize],
                )
                .to_owned()
                .to_string()),
                TextEncoding::CP1252 => Ok(WINDOWS_1252
                    .decode(
                        &content[record_data_offset as usize..next_record_data_offset as usize],
                        DecoderTrap::Ignore,
                    )
                    .unwrap()), // unwraping is ok here because of less strict DecoderTrap which ignores errors
            },
            Compression::PalmDoc => {
                if record_data_offset < content.len() as u32
                    && record_data_offset < next_record_data_offset - extra_bytes
                {
                    lz77::decompress_lz77(
                        &content[record_data_offset as usize..(next_record_data_offset - extra_bytes) as usize],
                        &encoding,
                    )
                } else {
                    Err(io::Error::new(
                        ErrorKind::NotFound,
                        "record points to location out of bounds",
                    ))
                }
            }
            Compression::Huff => panic!("Huff compression is currently not supported"),
        }
    }
    /// Parses a record from the reader at current position
    fn parse_record(reader: &mut Cursor<&[u8]>) -> io::Result<Record> {
        Ok(Record {
            record_data_offset: reader.read_u32::<BigEndian>()?,
            id: reader.read_u32::<BigEndian>()?,
            record_data: String::new(),
            length: 0,
        })
    }
    /// Gets all records in the specified content
    pub(crate) fn parse_records(
        content: &[u8],
        num_of_records: u16,
        _extra_bytes: u32,
        compression_type: Compression,
        encoding: TextEncoding,
    ) -> io::Result<Vec<Record>> {
        let mut records_content = vec![];
        let mut reader = Cursor::new(content);
        reader.set_position(RECORDS_START_INDEX);
        for _i in 0..num_of_records {
            records_content.push(Record::parse_record(&mut reader)?);
        }
        for i in 0..records_content.len() {
            let mut current_rec = records_content[i].clone();
            if i != records_content.len() - 1 {
                let next_offset = records_content[i + 1].record_data_offset;
                if _extra_bytes < next_offset {
                    current_rec.record_data = match Record::record_data(
                        current_rec.record_data_offset,
                        next_offset,
                        _extra_bytes,
                        &compression_type,
                        content,
                        &encoding,
                    ) {
                        Ok(record) => record,
                        Err(e) => {
                            eprintln!(
                                "failed parsing record at offset {} - {}",
                                current_rec.record_data_offset, e
                            );
                            String::new()
                        }
                    };

                    current_rec.length = current_rec.record_data.len();
                }
                records_content.insert(i, current_rec);
                records_content.remove(i + 1);
            }
        }
        Ok(records_content)
    }
}