use log::debug;
use nom::error::ErrorKind;
use nom::{Err, IResult};
use nom::bytes::complete::take;
use nom::multi::count;
use nom::number::complete::{le_u16, le_u8};
use std::{
collections::HashMap,
fmt::{Display, Formatter, Result},
string::FromUtf8Error,
};
use crate::serialize::{little_endian_word_to_bytes, Serializer};
#[derive(Clone, Copy, Debug)]
pub enum FileType {
Text = 0,
IntegerBasic = 1,
AppleSoftBasic = 2,
Binary = 4,
SType = 8,
RelocatableObjectModule = 10,
AType = 20,
BType = 40,
Unknown,
}
impl Display for FileType {
fn fmt(&self, f: &mut Formatter) -> Result {
match self {
FileType::Text => write!(f, "T"),
FileType::IntegerBasic => write!(f, "I"),
FileType::AppleSoftBasic => write!(f, "A"),
FileType::Binary => write!(f, "B"),
FileType::SType => write!(f, "S"),
FileType::RelocatableObjectModule => write!(f, "R"),
FileType::AType => write!(f, "AT"),
FileType::BType => write!(f, "BT"),
FileType::Unknown => write!(f, "U"),
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct FileEntry<'a> {
pub track_of_first_track_sector_list_sector: u8,
pub sector_of_first_track_sector_list_sector: u8,
pub file_type: FileType,
pub locked: bool,
pub file_name: &'a [u8],
pub file_length_in_sectors: u16,
}
pub struct File<'a> {
track_sector_lists: TrackSectorLists<'a>,
pub data: Vec<u8>,
}
impl Display for File<'_> {
fn fmt(&self, f: &mut Formatter) -> Result {
for tsl in &self.track_sector_lists {
writeln!(f, "track_sector_list: {}", tsl)?;
}
writeln!(f, "length of data: {}", self.data.len())
}
}
pub type Files<'a> = HashMap<String, File<'a>>;
#[derive(Clone)]
pub struct TrackSectorList<'a> {
pub reserved: u8,
pub track_number_of_next_sector: Option<u8>,
pub sector_number_of_next_sector: Option<u8>,
pub reserved_2: &'a [u8],
pub sector_offset_in_file: &'a [u8],
pub reserved_3: &'a [u8],
pub track_sector_pairs: TrackSectorPairs, }
impl<'a> Display for TrackSectorList<'a> {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "reserved: {}", self.reserved)?;
match self.track_number_of_next_sector {
Some(x) => {
writeln!(f, "track_number_of_next_sector: 0x{:02X}", x)?;
}
None => {
writeln!(f, "track_number_of_next_sector: None")?;
}
}
match self.sector_number_of_next_sector {
Some(x) => {
writeln!(f, "sector_number_of_next_sector: 0x{:02X}, ", x)?;
}
None => {
writeln!(f, "sector_number_of_next_sector: None")?;
}
}
write!(f, "reserved_2: {:?}", self.reserved_2)?;
writeln!(f, "Track Sector Pairs:")?;
for tsp in &self.track_sector_pairs {
writeln!(f, "track_sector_pair: {}", tsp)?;
}
writeln!(f)
}
}
impl<'a> Serializer<'a> for TrackSectorList<'a> {
fn as_vec(&'a self) -> std::result::Result<Vec<u8>, crate::error::Error> {
let mut bytes = Vec::new();
bytes.push(self.reserved);
if let Some(track_number) = self.track_number_of_next_sector {
bytes.push(track_number);
} else {
bytes.push(0);
}
if let Some(sector_number) = self.sector_number_of_next_sector {
bytes.push(sector_number);
} else {
bytes.push(0);
}
bytes.append(&mut self.reserved_2.to_vec());
bytes.append(&mut self.sector_offset_in_file.to_vec());
bytes.append(&mut self.reserved_3.to_vec());
bytes.append(&mut self.track_sector_pairs.as_vec().unwrap());
Ok(bytes)
}
}
pub type TrackSectorLists<'a> = Vec<TrackSectorList<'a>>;
pub fn parse_track_sector_list(i: &[u8]) -> IResult<&[u8], TrackSectorList> {
let mut track_sector_pairs: Vec<TrackSectorPair> = Vec::new();
let (i, reserved) = le_u8(i)?;
let (i, track_number_of_next_sector) = le_u8(i)?;
let track_number_of_next_sector = if track_number_of_next_sector != 0 {
Some(track_number_of_next_sector)
} else {
None
};
let (i, sector_number_of_next_sector) = le_u8(i)?;
let sector_number_of_next_sector = if sector_number_of_next_sector != 0 {
Some(sector_number_of_next_sector)
} else {
None
};
let (i, reserved_2) = take(2_usize)(i)?;
let (i, sector_offset_in_file) = take(2_usize)(i)?;
let (i, reserved_3) = take(5_usize)(i)?;
let (mut i, mut track_sector_pair) = parse_track_sector_pair(i)?;
let max_tsps = 121;
let mut cnt = 1;
while (track_sector_pair.track_number != 0) && (cnt <= max_tsps) {
track_sector_pairs.push(track_sector_pair);
let (i2, tsp) = parse_track_sector_pair(i)?;
track_sector_pair = tsp;
i = i2;
cnt += 1;
}
Ok((
i,
TrackSectorList {
reserved,
track_number_of_next_sector,
sector_number_of_next_sector,
reserved_2,
sector_offset_in_file,
reserved_3,
track_sector_pairs,
},
))
}
#[derive(Clone, Copy, Debug)]
pub struct TrackSectorPair {
pub track_number: u8,
pub sector_number: u8,
}
impl Display for TrackSectorPair {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(
f,
"track_number: 0x{:02X}, sector_number: 0x{:02X}",
self.track_number, self.sector_number
)
}
}
impl<'a> Serializer<'a> for TrackSectorPair {
fn as_vec(&'a self) -> std::result::Result<Vec<u8>, crate::error::Error> {
let bytes: Vec<u8> = vec![self.track_number, self.sector_number];
Ok(bytes)
}
}
pub type TrackSectorPairs = Vec<TrackSectorPair>;
impl<'a> Serializer<'a> for TrackSectorPairs {
fn as_vec(&'a self) -> std::result::Result<Vec<u8>, crate::error::Error> {
let mut bytes: Vec<u8> = Vec::new();
for tsp in self {
bytes.append(&mut tsp.as_vec().unwrap());
}
Ok(bytes)
}
}
pub fn parse_track_sector_pair(i: &[u8]) -> IResult<&[u8], TrackSectorPair> {
let (i, track_number) = le_u8(i)?;
let (i, sector_number) = le_u8(i)?;
Ok((
i,
TrackSectorPair {
track_number,
sector_number,
},
))
}
impl<'a> FileEntry<'a> {
pub fn new(
track_of_first_track_sector_list_sector: u8,
sector_of_first_track_sector_list_sector: u8,
file_type: FileType,
locked: bool,
filename: &str,
file_length_in_sectors: u16,
) -> FileEntry {
FileEntry {
track_of_first_track_sector_list_sector,
sector_of_first_track_sector_list_sector,
file_type,
locked,
file_name: filename.as_bytes(),
file_length_in_sectors,
}
}
pub fn filename(&self) -> std::result::Result<String, FromUtf8Error> {
let filename_vector: Vec<u8> = self
.file_name
.iter()
.map(|c| if *c > 0x80 { *c - 0x80 } else { *c })
.collect();
let file_name = String::from_utf8(filename_vector)?;
let file_name = String::from(file_name.trim_end_matches(' '));
Ok(file_name)
}
pub fn get_data(
&self,
tracks: &[Vec<&[u8]>],
track_sector_lists: &TrackSectorLists,
) -> std::result::Result<Vec<u8>, crate::error::Error> {
let data: Vec<u8> = track_sector_lists
.iter()
.flat_map(|tsl| tsl.track_sector_pairs.clone())
.flat_map(|tsp| tracks[tsp.track_number as usize][tsp.sector_number as usize])
.copied()
.collect();
match self.file_type {
FileType::Binary => {
if data.len() >= 4 {
let (i, address) = le_u16(data.as_slice())?;
debug!("Binary file address: {}", address);
let (_i, len) = le_u16(i)?;
debug!("Binary file length: {}", len);
if (data.len() - 4) >= len.into() {
Ok(data[4..(len + 4) as usize].to_vec())
} else {
Ok(data)
}
} else {
Ok(data)
}
}
_ => {
let error = crate::error::Error::new(crate::error::ErrorKind::Invalid(
crate::error::InvalidErrorKind::Invalid(format!(
"Unsupported file type for export: {}",
self.file_type
)),
));
debug!("{}", error);
Err(error)
}
}
}
pub fn build_file(
&self,
tracks: &[Vec<&'a [u8]>],
) -> std::result::Result<TrackSectorLists<'a>, crate::error::Error> {
let mut track_sector_lists: TrackSectorLists = Vec::new();
let track = self.track_of_first_track_sector_list_sector;
let sector = self.sector_of_first_track_sector_list_sector;
let (_i, track_sector_list) =
parse_track_sector_list(tracks[track as usize][sector as usize]).unwrap();
track_sector_lists.push(track_sector_list.clone());
let mut track = track_sector_list.clone().track_number_of_next_sector;
let mut sector = track_sector_list.clone().sector_number_of_next_sector;
debug!("track sector list: {}", track_sector_lists.first().unwrap());
while track.is_some() {
debug!(
"TSList track {}, sector {}",
track.unwrap(),
sector.unwrap()
);
let (_i, track_sector_list) =
parse_track_sector_list(tracks[track.unwrap() as usize][sector.unwrap() as usize])
.unwrap();
track = track_sector_list.track_number_of_next_sector;
sector = track_sector_list.sector_number_of_next_sector;
track_sector_lists.push(track_sector_list);
}
Ok(track_sector_lists)
}
}
impl<'a> Serializer<'a> for FileEntry<'a> {
fn as_vec(&'a self) -> std::result::Result<Vec<u8>, crate::error::Error> {
let mut bytes: Vec<u8> = Vec::new();
bytes.push(self.track_of_first_track_sector_list_sector);
bytes.push(self.sector_of_first_track_sector_list_sector);
let file_type = if self.locked {
self.file_type as u8 + 0x80
} else {
self.file_type as u8
};
bytes.push(file_type);
let num_bytes = self.file_name.len();
if (num_bytes == 0) || (num_bytes > 30) {
return Err(crate::error::Error::new(crate::error::ErrorKind::Invalid(
crate::error::InvalidErrorKind::Invalid(format!(
"Filename size is invalid: {}",
num_bytes
)),
)));
}
let mut padding: Vec<u8> = vec![0; 30 - num_bytes];
padding.fill(0xA0);
let mut converted_filename: Vec<u8> =
self.file_name.to_vec().iter().map(|c| c + 0x80).collect();
bytes.append(&mut converted_filename);
bytes.append(&mut padding);
bytes.append(&mut little_endian_word_to_bytes(
self.file_length_in_sectors,
));
Ok(bytes)
}
}
pub struct FullFile<'a> {
pub file_entry: FileEntry<'a>,
pub data: Vec<u8>,
pub address: u16,
pub length: u16,
}
impl<'a> Serializer<'a> for FullFile<'a> {
fn as_vec(&'a self) -> std::result::Result<Vec<u8>, crate::error::Error> {
let mut bytes: Vec<u8> = Vec::new();
match self.file_entry.file_type {
FileType::Binary => {
bytes.append(&mut little_endian_word_to_bytes(self.address));
bytes.append(&mut little_endian_word_to_bytes(self.length));
bytes.append(&mut self.data.clone());
Ok(bytes)
}
_ => Err(crate::error::Error::new(
crate::error::ErrorKind::Unimplemented(format!(
"Unsupported file tyep for serialization: {}",
self.file_entry.file_type
)),
)),
}
}
}
impl Display for FileEntry<'_> {
fn fmt(&self, f: &mut Formatter) -> Result {
writeln!(
f,
"{:>3} {:>3} {} {:>3} {:<30}",
self.track_of_first_track_sector_list_sector,
self.sector_of_first_track_sector_list_sector,
self.file_type,
self.file_length_in_sectors,
self.filename().unwrap_or_else(|_| String::from("")),
)
}
}
pub fn parse_file_entry(i: &[u8]) -> IResult<&[u8], FileEntry> {
let (i, track_of_first_track_sector_list_sector) = le_u8(i)?;
let (i, sector_of_first_track_sector_list_sector) = le_u8(i)?;
let (i, file_type) = le_u8(i)?;
let locked = (file_type & 0x80) != 0;
let file_type = match file_type & 0x7F {
0 => FileType::Text,
1 => FileType::IntegerBasic,
2 => FileType::AppleSoftBasic,
4 => FileType::Binary,
8 => FileType::SType,
10 => FileType::RelocatableObjectModule,
20 => FileType::AType,
40 => FileType::BType,
_ => FileType::Unknown,
};
let (i, filename) = take(30_usize)(i)?;
let (i, file_length_in_sectors) = le_u16(i)?;
Ok((
i,
FileEntry {
track_of_first_track_sector_list_sector,
sector_of_first_track_sector_list_sector,
file_type,
locked,
file_name: filename,
file_length_in_sectors,
},
))
}
pub fn valid_file_entry(i: &[u8]) -> IResult<&[u8], bool> {
let (i, res1) = le_u8(i)?;
let (i, res2) = le_u8(i)?;
if (res1 != 0) && (res2 != 0) {
Ok((i, true))
} else {
Err(Err::Error(nom::error_position!(i, ErrorKind::Fail)))
}
}
#[derive(Clone)]
pub struct Catalog<'a> {
pub reserved: u8,
pub track_number_of_next_sector: u8,
pub sector_number_of_next_sector: u8,
pub reserved_2: &'a [u8],
pub file_entries: Vec<FileEntry<'a>>,
pub catalog_by_filename: HashMap<String, FileEntry<'a>>,
}
impl Display for Catalog<'_> {
fn fmt(&self, f: &mut Formatter) -> Result {
writeln!(
f,
"track number of next sector: {}",
self.track_number_of_next_sector
)?;
writeln!(
f,
"sector number of next sector: {}",
self.sector_number_of_next_sector
)?;
for file_entry in &self.file_entries {
write!(f, "{}", file_entry)?;
}
writeln!(f)
}
}
impl<'a> Serializer<'a> for Catalog<'a> {
fn as_vec(&'a self) -> std::result::Result<Vec<u8>, crate::error::Error> {
let mut v: Vec<u8> = Vec::new();
v.push(self.reserved);
v.push(self.track_number_of_next_sector);
v.push(self.sector_number_of_next_sector);
v.append(&mut self.reserved_2.to_vec());
let mut file_entries: Vec<u8> = self
.file_entries
.iter()
.flat_map(|fe| fe.as_vec())
.flatten()
.collect();
let padding_len = (7 - self.file_entries.len()) * 35;
let mut padding: Vec<u8> = vec![0; padding_len];
v.append(&mut file_entries);
v.append(&mut padding);
Ok(v)
}
}
pub fn valid_file(track_of_first_track_sector_list_sector: u8) -> bool {
(track_of_first_track_sector_list_sector != 0x00)
&& (track_of_first_track_sector_list_sector != 0xFF)
}
pub fn parse_catalog(i: &[u8]) -> IResult<&[u8], Catalog> {
let (i, reserved) = le_u8(i)?;
let (i, track_number_of_next_sector) = le_u8(i)?;
let (i, sector_number_of_next_sector) = le_u8(i)?;
let (i, reserved_2) = take(8_usize)(i)?;
let (i, file_entries) = count(parse_file_entry, 7)(i)?;
let file_entries: Vec<FileEntry> = file_entries
.iter()
.filter(|fe| valid_file(fe.track_of_first_track_sector_list_sector))
.copied()
.collect();
let mut catalog_by_filename: HashMap<String, FileEntry> = HashMap::new();
file_entries.iter().for_each(|fe| {
catalog_by_filename.insert(fe.filename().unwrap(), *fe);
});
Ok((
i,
Catalog {
reserved,
track_number_of_next_sector,
sector_number_of_next_sector,
reserved_2,
file_entries,
catalog_by_filename,
},
))
}
#[derive(Clone, Debug)]
pub struct FullCatalog<'a> {
pub file_entries: Vec<FileEntry<'a>>,
pub catalog_by_filename: HashMap<String, FileEntry<'a>>,
}
impl Display for FullCatalog<'_> {
fn fmt(&self, f: &mut Formatter) -> Result {
for file_entry in &self.file_entries {
write!(f, "{}", file_entry)?;
}
writeln!(f)
}
}
pub fn parse_catalogs<'a>(
tracks: &[Vec<&'a [u8]>],
catalog_track: u8,
catalog_sector: u8,
) -> std::result::Result<FullCatalog<'a>, crate::error::Error> {
let mut file_entries: Vec<FileEntry> = Vec::new();
let mut catalog_by_filename: HashMap<String, FileEntry> = HashMap::new();
let (_i, mut catalog) = parse_catalog(tracks[catalog_track as usize][catalog_sector as usize])?;
debug!("tracks length: {}", tracks.len());
debug!("track one length: {}", tracks[0].len());
for file in &catalog.file_entries {
file_entries.push(*file);
catalog_by_filename.insert(file.filename().unwrap(), *file);
}
while (catalog.track_number_of_next_sector != 0) && (catalog.sector_number_of_next_sector != 0)
{
let (_i, c) = parse_catalog(
tracks[catalog.track_number_of_next_sector as usize]
[catalog.sector_number_of_next_sector as usize],
)?;
debug!("parsed another catalog: {}", c);
catalog = c;
for file in &catalog.file_entries {
file_entries.push(*file);
catalog_by_filename.insert(file.filename().unwrap(), *file);
}
}
Ok(FullCatalog {
file_entries,
catalog_by_filename,
})
}
impl<'a> Catalog<'a> {
pub fn get_file(&self, filename: &str) -> Vec<u8> {
let _file_entry = self.catalog_by_filename.get(filename).unwrap();
let data: Vec<u8> = Vec::new();
data
}
}
pub fn build_files<'a>(
catalog: FullCatalog<'a>,
tracks: &[Vec<&'a [u8]>],
) -> std::result::Result<Files<'a>, crate::error::Error> {
let mut files: Files = HashMap::new();
for file_entry in &catalog.file_entries {
let track_sector_lists = file_entry.build_file(tracks)?;
debug!("Building file: {}", file_entry.filename().unwrap());
let res = file_entry.get_data(tracks, &track_sector_lists);
let data = match res {
Err(_e) => Vec::new(),
Ok(data) => data,
};
files.insert(
file_entry.filename().unwrap(),
File {
track_sector_lists,
data,
},
);
}
Ok(files)
}
#[cfg(test)]
mod tests {
use super::{
build_files, parse_catalog, parse_catalogs, parse_file_entry, Catalog, FileEntry, FileType,
TrackSectorList, TrackSectorPair, TrackSectorPairs,
};
use crate::serialize::{little_endian_word_to_bytes, Serializer};
use nom::AsBytes;
use pretty_assertions::assert_eq;
use std::collections::HashMap;
fn file_entry_as_bytes(
file_entry: &FileEntry,
) -> std::result::Result<[u8; 35], crate::error::Error> {
Ok(file_entry.as_vec()?.as_bytes().try_into().unwrap())
}
#[test]
fn parse_file_entry_works() {
let data: [u8; 35] = [
0x12, 0x0F, 0x02, 0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0x02, 0x00,
];
let result = parse_file_entry(&data);
match result {
Ok(file_entry) => {
assert_eq!(file_entry.1.track_of_first_track_sector_list_sector, 18);
assert_eq!(file_entry.1.sector_of_first_track_sector_list_sector, 15);
match file_entry.1.file_type {
FileType::AppleSoftBasic => {
assert_eq!(true, true);
}
_ => {
panic!("Invalid file type parsed");
}
}
assert!(!file_entry.1.locked);
assert_eq!(
file_entry.1.file_name,
[
0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
]
);
}
Err(e) => {
panic!("Error parsing: {}", e);
}
}
}
#[test]
fn parse_file_entry_locked_works() {
let data: [u8; 35] = [
0x12, 0x0F, 0x82, 0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0x02, 0x00,
];
let result = parse_file_entry(&data);
match result {
Ok(file_entry) => {
assert_eq!(file_entry.1.track_of_first_track_sector_list_sector, 18);
assert_eq!(file_entry.1.sector_of_first_track_sector_list_sector, 15);
match file_entry.1.file_type {
FileType::AppleSoftBasic => {
assert_eq!(true, true);
}
_ => {
panic!("Invalid file type parsed");
}
}
assert!(file_entry.1.locked);
assert_eq!(
file_entry.1.file_name,
[
0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
]
);
}
Err(e) => {
panic!("Error parsing: {}", e);
}
}
}
#[test]
fn serialize_file_entry_works() {
let expected_data: [u8; 35] = [
0x12, 0x0F, 0x02, 0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0x02, 0x00,
];
let data = file_entry_as_bytes(&FileEntry::new(
0x12,
0x0F,
FileType::AppleSoftBasic,
false,
"HELLO",
0x0002,
));
assert_eq!(data.unwrap(), expected_data);
}
#[test]
fn serialize_locked_file_entry_works() {
let expected_data: [u8; 35] = [
0x12, 0x0F, 0x82, 0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0x02, 0x00,
];
let data = file_entry_as_bytes(&FileEntry::new(
0x12,
0x0F,
FileType::AppleSoftBasic,
true,
"HELLO",
0x0002,
));
assert_eq!(data.unwrap(), expected_data);
}
#[test]
fn serialize_file_name_len_0_file_entry_fails() {
let file_entry = FileEntry::new(0x12, 0x0F, FileType::AppleSoftBasic, false, "", 0x0002);
let file_entry_as_vec = file_entry.as_vec();
match file_entry_as_vec {
Ok(_) => panic!("Shouldn't be a valid FileEntry"),
Err(e) => assert_eq!(
e.to_string(),
"Image is invalid: Filename size is invalid: 0"
),
}
}
#[test]
fn serialize_file_name_len_1_file_entry_works() {
let expected_data: [u8; 35] = [
0x12, 0x0F, 0x02, 0xC8, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0x02, 0x00,
];
let data = file_entry_as_bytes(&FileEntry::new(
0x12,
0x0F,
FileType::AppleSoftBasic,
false,
"H",
0x0002,
));
assert_eq!(data.unwrap(), expected_data);
}
#[test]
fn serialize_file_name_len_30_file_entry_works() {
let expected_data: [u8; 35] = [
0x12, 0x0F, 0x02, 0xB0, 0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xB0,
0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xB0, 0xB1, 0xB2, 0xB3, 0xB4,
0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0x02, 0x00,
];
let data = file_entry_as_bytes(&FileEntry::new(
0x12,
0x0F,
FileType::AppleSoftBasic,
false,
"012345678901234567890123456789",
0x0002,
));
assert_eq!(data.unwrap(), expected_data);
}
#[test]
fn serialize_file_name_len_31_file_entry_fails() {
let file_entry = FileEntry::new(
0x12,
0x0F,
FileType::AppleSoftBasic,
false,
"0123456789012345678901234567890",
0x0002,
);
let file_entry_as_vec = file_entry.as_vec();
match file_entry_as_vec {
Ok(_) => panic!("Shouldn't be a valid FileEntry"),
Err(e) => assert_eq!(
e.to_string(),
"Image is invalid: Filename size is invalid: 31"
),
}
}
#[test]
fn file_entry_filename_works() {
let data: [u8; 35] = [
0x12, 0x0F, 0x02, 0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0x02, 0x00,
];
let result = parse_file_entry(&data);
match result {
Ok(file_entry) => match file_entry.1.filename() {
Ok(filename) => {
assert_eq!(filename, "HELLO");
}
Err(e) => {
panic!("Invalid filename: {}", e);
}
},
Err(e) => {
panic!("Error parsing: {}", e);
}
}
}
#[test]
fn serialize_track_sector_pair_works() {
let tsp = TrackSectorPair {
track_number: 0x12,
sector_number: 0x34,
};
let data = tsp.as_vec().unwrap();
assert_eq!(data.len(), 2);
assert_eq!(data[0], 0x12);
assert_eq!(data[1], 0x34);
}
#[test]
fn serialize_track_sector_list_with_zero_track_sector_pair_works() {
let tsl = TrackSectorList {
reserved: 0x01,
track_number_of_next_sector: None,
sector_number_of_next_sector: None,
reserved_2: &[0x02, 0x03],
sector_offset_in_file: &[0x04, 0x05],
reserved_3: &[0x06, 0x07, 0x08, 0x09, 0x10],
track_sector_pairs: Vec::new(),
};
let data = tsl.as_vec().unwrap();
assert_eq!(data.len(), 12);
assert_eq!(data[0], 0x01);
assert_eq!(data[1], 0x00);
assert_eq!(data[2], 0x00);
assert_eq!(data[3], 0x02);
assert_eq!(data[4], 0x03);
assert_eq!(data[5], 0x04);
assert_eq!(data[6], 0x05);
assert_eq!(data[7], 0x06);
assert_eq!(data[8], 0x07);
assert_eq!(data[9], 0x08);
assert_eq!(data[10], 0x09);
assert_eq!(data[11], 0x10);
}
#[test]
fn serialize_track_sector_list_with_one_track_sector_pair_works() {
let tsp = TrackSectorPair {
track_number: 0x12,
sector_number: 0x34,
};
let tsps = Vec::from([tsp]);
let tsl = TrackSectorList {
reserved: 0x01,
track_number_of_next_sector: None,
sector_number_of_next_sector: None,
reserved_2: &[0x02, 0x03],
sector_offset_in_file: &[0x04, 0x05],
reserved_3: &[0x06, 0x07, 0x08, 0x09, 0x10],
track_sector_pairs: tsps,
};
let data = tsl.as_vec().unwrap();
assert_eq!(data.len(), 14);
assert_eq!(data[0], 0x01);
assert_eq!(data[1], 0x00);
assert_eq!(data[2], 0x00);
assert_eq!(data[3], 0x02);
assert_eq!(data[4], 0x03);
assert_eq!(data[5], 0x04);
assert_eq!(data[6], 0x05);
assert_eq!(data[7], 0x06);
assert_eq!(data[8], 0x07);
assert_eq!(data[9], 0x08);
assert_eq!(data[10], 0x09);
assert_eq!(data[11], 0x10);
assert_eq!(data[12], 0x12);
assert_eq!(data[13], 0x34);
}
#[test]
fn serialize_track_sector_list_with_two_track_sector_pair_works() {
let tsp1 = TrackSectorPair {
track_number: 0x12,
sector_number: 0x34,
};
let tsp2 = TrackSectorPair {
track_number: 0x56,
sector_number: 0x78,
};
let tsps = Vec::from([tsp1, tsp2]);
let tsl = TrackSectorList {
reserved: 0x01,
track_number_of_next_sector: None,
sector_number_of_next_sector: None,
reserved_2: &[0x02, 0x03],
sector_offset_in_file: &[0x04, 0x05],
reserved_3: &[0x06, 0x07, 0x08, 0x09, 0x10],
track_sector_pairs: tsps,
};
let data = tsl.as_vec().unwrap();
assert_eq!(data.len(), 16);
assert_eq!(data[0], 0x01);
assert_eq!(data[1], 0x00);
assert_eq!(data[2], 0x00);
assert_eq!(data[3], 0x02);
assert_eq!(data[4], 0x03);
assert_eq!(data[5], 0x04);
assert_eq!(data[6], 0x05);
assert_eq!(data[7], 0x06);
assert_eq!(data[8], 0x07);
assert_eq!(data[9], 0x08);
assert_eq!(data[10], 0x09);
assert_eq!(data[11], 0x10);
assert_eq!(data[12], 0x12);
assert_eq!(data[13], 0x34);
assert_eq!(data[14], 0x56);
assert_eq!(data[15], 0x78);
}
#[test]
fn parse_catalog_one_file_works() {
let data_header: [u8; 46] = [
0x00, 0x11, 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x0F, 0x02,
0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0x02, 0x00,
];
let data_footer: [u8; 210] = [0; 210];
let mut data: Vec<u8> = Vec::new();
data.extend(data_header);
data.extend(data_footer);
let result = parse_catalog(&data);
match result {
Ok(catalog) => {
assert_eq!(catalog.1.file_entries.len(), 1);
let file_entry = catalog.1.file_entries.first().unwrap_or_else(|| {
panic!("Error getting file entry");
});
let filename = file_entry.filename().unwrap_or_else(|e| {
panic!("Error getting file name: {}", e);
});
assert_eq!(filename, "HELLO");
}
Err(e) => {
panic!("Error parsing: {}", e);
}
}
}
#[test]
fn parse_catalog_two_files_works() {
let data_header: [u8; 81] = [
0x00, 0x11, 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x0F, 0x02,
0xC8, 0xC5, 0xCC, 0xCC, 0xCF, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0x02, 0x00, 0x12, 0x0F, 0x02, 0xC8, 0xC5, 0xCC, 0xD0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0,
0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0xA0, 0x02, 0x00,
];
let data_footer: [u8; 210] = [0; 210];
let mut data: Vec<u8> = Vec::new();
data.extend(data_header);
data.extend(data_footer);
let result = parse_catalog(&data);
match result {
Ok(catalog) => {
assert_eq!(catalog.1.file_entries.len(), 2);
let file_entry = catalog.1.file_entries.first().unwrap_or_else(|| {
panic!("Error getting file entry");
});
let filename = file_entry.filename().unwrap_or_else(|e| {
panic!("Error getting file name: {}", e);
});
assert_eq!(filename, "HELLO");
let file_entry = catalog
.1
.file_entries
.get(1)
.ok_or_else(|| {
panic!("Error getting file entry");
})
.unwrap_or_else(|_e| {
panic!("Error getting file entry");
});
let filename = file_entry.filename().unwrap_or_else(|e| {
panic!("Error getting file name: {}", e);
});
assert_eq!(filename, "HELP");
}
Err(e) => {
panic!("Error parsing: {}", e);
}
}
}
#[test]
fn parse_single_sector_catalog_works() {
let file_entries_1 = [FileEntry::new(
0x12,
0x0F,
FileType::AppleSoftBasic,
false,
"A",
0x0002,
)];
let mut catalog_by_filename_1: HashMap<String, FileEntry> = HashMap::new();
file_entries_1.iter().for_each(|fe| {
catalog_by_filename_1.insert(fe.filename().unwrap(), *fe);
});
let catalog_1 = Catalog {
reserved: 0x00,
track_number_of_next_sector: 0x00,
sector_number_of_next_sector: 0x00,
reserved_2: &[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
file_entries: file_entries_1.to_vec(),
catalog_by_filename: catalog_by_filename_1,
};
let catalog_1_bytes = catalog_1.as_vec().unwrap();
let mut tracks: Vec<Vec<&[u8]>> = Vec::new();
let mut disk_data: [[[u8; 256]; 16]; 35] = [[[0; 256]; 16]; 35];
for (i, byte) in catalog_1_bytes.iter().enumerate() {
disk_data[17][2][i] = *byte;
}
for track in &disk_data {
let mut track_vec: Vec<&[u8]> = Vec::new();
for sector in track {
track_vec.push(sector);
}
tracks.push(track_vec);
}
let catalog = parse_catalogs(&tracks, 17, 2).expect("Should be a valid FullCatalog");
assert_eq!(catalog.file_entries.len(), 1);
assert_eq!(
catalog
.file_entries
.first()
.expect("Should have at least one file")
.filename()
.expect("Should be a valid filename"),
"A"
);
}
#[test]
fn parse_multi_sector_catalog_works() {
let file_entries_1 = [
FileEntry::new(0x12, 0x0F, FileType::AppleSoftBasic, false, "A", 0x0002),
FileEntry::new(0x13, 0x0F, FileType::AppleSoftBasic, false, "B", 0x0002),
FileEntry::new(0x14, 0x0F, FileType::AppleSoftBasic, false, "C", 0x0002),
FileEntry::new(0x15, 0x0F, FileType::AppleSoftBasic, false, "D", 0x0002),
FileEntry::new(0x16, 0x0F, FileType::AppleSoftBasic, false, "E", 0x0002),
FileEntry::new(0x17, 0x0F, FileType::AppleSoftBasic, false, "F", 0x0002),
FileEntry::new(0x18, 0x0F, FileType::AppleSoftBasic, false, "G", 0x0002),
];
let file_entries_2 = [
FileEntry::new(0x19, 0x0F, FileType::AppleSoftBasic, false, "H", 0x0002),
FileEntry::new(0x1A, 0x0F, FileType::AppleSoftBasic, false, "I", 0x0002),
FileEntry::new(0x1B, 0x0F, FileType::AppleSoftBasic, false, "J", 0x0002),
];
let mut catalog_by_filename_1: HashMap<String, FileEntry> = HashMap::new();
file_entries_1.iter().for_each(|fe| {
catalog_by_filename_1.insert(fe.filename().unwrap(), *fe);
});
let mut catalog_by_filename_2: HashMap<String, FileEntry> = HashMap::new();
file_entries_2.iter().for_each(|fe| {
catalog_by_filename_2.insert(fe.filename().unwrap(), *fe);
});
let catalog_1 = Catalog {
reserved: 0x00,
track_number_of_next_sector: 0x11,
sector_number_of_next_sector: 0x01,
reserved_2: &[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
file_entries: file_entries_1.to_vec(),
catalog_by_filename: catalog_by_filename_1,
};
let catalog_2 = Catalog {
reserved: 0x00,
track_number_of_next_sector: 0x00,
sector_number_of_next_sector: 0x00,
reserved_2: &[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
file_entries: file_entries_2.to_vec(),
catalog_by_filename: catalog_by_filename_2,
};
let catalog_1_bytes = catalog_1.as_vec().unwrap();
let catalog_2_bytes = catalog_2.as_vec().unwrap();
let mut tracks: Vec<Vec<&[u8]>> = Vec::new();
let mut disk_data: [[[u8; 256]; 16]; 35] = [[[0; 256]; 16]; 35];
for (i, byte) in catalog_1_bytes.iter().enumerate() {
disk_data[17][2][i] = *byte;
}
for (i, byte) in catalog_2_bytes.iter().enumerate() {
disk_data[17][1][i] = *byte;
}
for track in &disk_data {
let mut track_vec: Vec<&[u8]> = Vec::new();
for sector in track {
track_vec.push(sector);
}
tracks.push(track_vec);
}
let catalog = parse_catalogs(&tracks, 17, 2).expect("Should be a valid FullCatalog");
assert_eq!(catalog.file_entries.len(), 10);
assert_eq!(
catalog
.file_entries
.first()
.expect("Should have at least one file")
.filename()
.expect("Should be a valid filename"),
"A"
);
}
fn build_binary_test_file(size: u16) -> Vec<u8> {
let mut data: Vec<u8> = Vec::new();
data.extend([0x00, 0x10]);
data.extend(little_endian_word_to_bytes(size));
match size {
0 => {}
1 => {
data.push(0x53);
}
2 => {
data.extend([0x53, 0x44]);
}
3 => {
data.extend([0x53, 0x54, 0x44]);
}
4 => {
data.extend([0x53, 0x54, 0x4e, 0x44]);
}
5 => {
data.extend([0x53, 0x54, 0x41, 0x4e, 0x44]);
}
6 => {
data.extend([0x53, 0x54, 0x41, 0x45, 0x4e, 0x44]);
}
7 => {
data.extend([0x53, 0x54, 0x41, 0x52, 0x45, 0x4e, 0x44]);
}
8 => {
data.extend([0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x4e, 0x44]);
}
_ => {
data.extend([0x53, 0x54, 0x41, 0x52, 0x54]);
for i in 0..size - 8 {
data.push((i % 0x100).try_into().unwrap());
}
data.extend([0x45, 0x4e, 0x44]);
}
}
data
}
#[test]
fn build_single_sector_binary_file_works() {
let file_entry = FileEntry::new(0x0A, 0x0D, FileType::Binary, false, "BLAH", 0x0001);
let file_entries_1 = [file_entry];
let mut catalog_by_filename_1: HashMap<String, FileEntry> = HashMap::new();
file_entries_1.iter().for_each(|fe| {
catalog_by_filename_1.insert(fe.filename().unwrap(), *fe);
});
let catalog_1 = Catalog {
reserved: 0x00,
track_number_of_next_sector: 0x00,
sector_number_of_next_sector: 0x00,
reserved_2: &[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
file_entries: file_entries_1.to_vec(),
catalog_by_filename: catalog_by_filename_1,
};
let catalog_1_bytes = catalog_1.as_vec().unwrap();
let mut tracks: Vec<Vec<&[u8]>> = Vec::new();
let mut disk_data: [[[u8; 256]; 16]; 35] = [[[0; 256]; 16]; 35];
for (i, byte) in catalog_1_bytes.iter().enumerate() {
disk_data[17][2][i] = *byte;
}
let data = build_binary_test_file(200);
for (i, byte) in data.iter().enumerate() {
disk_data[0x11][0x0B][i] = *byte;
}
let tsp = TrackSectorPair {
track_number: 0x11,
sector_number: 0x0B,
};
let mut tsps: TrackSectorPairs = Vec::new();
tsps.push(tsp);
let tsl = TrackSectorList {
reserved: 0,
track_number_of_next_sector: None,
sector_number_of_next_sector: None,
reserved_2: &[0, 0],
sector_offset_in_file: &[0, 0],
reserved_3: &[0, 0, 0, 0, 0],
track_sector_pairs: tsps,
};
for (i, byte) in tsl.as_vec().unwrap().iter().enumerate() {
disk_data[0x0A][0x0D][i] = *byte;
}
for track in &disk_data {
let mut track_vec: Vec<&[u8]> = Vec::new();
for sector in track {
track_vec.push(sector);
}
tracks.push(track_vec);
}
let catalog = parse_catalogs(&tracks, 17, 2).expect("Should be a valid FullCatalog");
assert_eq!(catalog.file_entries.len(), 1);
assert_eq!(
catalog
.file_entries
.first()
.expect("Should have at least one file")
.filename()
.expect("Should be a valid filename"),
"BLAH"
);
let files = build_files(catalog.clone(), &tracks).unwrap();
assert!(files.contains_key("BLAH"));
assert!(!files.contains_key("BLARGH"));
let file = files.get("BLAH").unwrap();
assert_eq!(file.data.len(), 200);
assert_eq!(&file.data[0..5], "START".as_bytes());
for i in 0..192 {
assert_eq!(file.data[(i as usize) + 5_usize], i);
}
assert_eq!(&file.data[197..200], "END".as_bytes());
}
#[test]
fn build_two_sector_binary_file_works() {
let file_entry = FileEntry::new(0x0A, 0x0D, FileType::Binary, false, "BLAH", 0x0002);
let file_entries_1 = [file_entry];
let mut catalog_by_filename_1: HashMap<String, FileEntry> = HashMap::new();
file_entries_1.iter().for_each(|fe| {
catalog_by_filename_1.insert(fe.filename().unwrap(), *fe);
});
let catalog_1 = Catalog {
reserved: 0x00,
track_number_of_next_sector: 0x00,
sector_number_of_next_sector: 0x00,
reserved_2: &[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
file_entries: file_entries_1.to_vec(),
catalog_by_filename: catalog_by_filename_1,
};
let catalog_1_bytes = catalog_1.as_vec().unwrap();
let mut tracks: Vec<Vec<&[u8]>> = Vec::new();
let mut disk_data: [[[u8; 256]; 16]; 35] = [[[0; 256]; 16]; 35];
for (i, byte) in catalog_1_bytes.iter().enumerate() {
disk_data[17][2][i] = *byte;
}
let data = build_binary_test_file(400);
for (i, byte) in data[0..=255].iter().enumerate() {
disk_data[0x11][0x0B][i] = *byte;
}
for (i, byte) in data[256..].iter().enumerate() {
disk_data[0x11][0x0C][i] = *byte;
}
let tsp1 = TrackSectorPair {
track_number: 0x11,
sector_number: 0x0B,
};
let tsp2 = TrackSectorPair {
track_number: 0x11,
sector_number: 0x0C,
};
let mut tsps: TrackSectorPairs = Vec::new();
tsps.push(tsp1);
tsps.push(tsp2);
let tsl = TrackSectorList {
reserved: 0,
track_number_of_next_sector: None,
sector_number_of_next_sector: None,
reserved_2: &[0, 0],
sector_offset_in_file: &[0, 0],
reserved_3: &[0, 0, 0, 0, 0],
track_sector_pairs: tsps,
};
for (i, byte) in tsl.as_vec().unwrap().iter().enumerate() {
disk_data[0x0A][0x0D][i] = *byte;
}
for track in &disk_data {
let mut track_vec: Vec<&[u8]> = Vec::new();
for sector in track {
track_vec.push(sector);
}
tracks.push(track_vec);
}
let catalog = parse_catalogs(&tracks, 17, 2).expect("Should be a valid FullCatalog");
assert_eq!(catalog.file_entries.len(), 1);
assert_eq!(
catalog
.file_entries
.first()
.expect("Should have at least one file")
.filename()
.expect("Should be a valid filename"),
"BLAH"
);
let files = build_files(catalog.clone(), &tracks).unwrap();
assert!(files.contains_key("BLAH"));
assert!(!files.contains_key("BLARGH"));
let file = files.get("BLAH").unwrap();
assert_eq!(file.data.len(), 400);
assert_eq!(&file.data[0..5], "START".as_bytes());
let expected_data: [u8; 392] = (0_u16..392_u16)
.map(|i| (i % 0x100) as u8)
.collect::<Vec<u8>>()
.try_into()
.unwrap();
assert_eq!(file.data[5..397], expected_data);
assert_eq!(&file.data[397..400], "END".as_bytes());
}
}