sarc/
writer.rs

1use binwrite::{BinWrite, writer_option_new};
2use super::*;
3use std::io::prelude::*;
4use std::io::BufWriter;
5use std::path::Path;
6use std::collections::HashMap;
7
8/// An error raised in the process of writing the sarc file
9#[derive(Debug)]
10pub enum Error {
11    IoError(std::io::Error),
12
13    #[cfg(feature = "yaz0_sarc")]
14    Yaz0Error(yaz0::Error),
15}
16
17impl From<std::io::Error> for Error {
18    fn from(e: std::io::Error) -> Self {
19        Self::IoError(e)
20    }
21}
22
23impl SarcFile {
24    /// Write 
25    pub fn write_to_file<P: AsRef<Path>>(&self, path: P) -> std::io::Result<()> {
26        self.write(&mut BufWriter::new(std::fs::File::create(path.as_ref())?))
27    }
28
29    /// Write to a compressed file. This writes the SARC with yaz0 compression. Requires either the
30    /// `yaz0_sarc` feature or `zstd_sarc` feature enabled.
31    ///
32    /// **Note:** If yaz0 compression is disabled and zstd compression is enabled, this will write
33    /// with zstd compression.
34    #[cfg(feature = "yaz0_sarc")]
35    pub fn write_to_compressed_file<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> {
36        self.write_yaz0(
37            &mut BufWriter::new(std::fs::File::create(path.as_ref())?)
38        )
39    }
40
41    /// Write to a compressed file. This writes the SARC with yaz0 compression. Requires either the
42    /// `yaz0_sarc` feature or `zstd_sarc` feature enabled.
43    ///
44    /// **Note:** If yaz0 compression is disabled and zstd compression is enabled, this will write
45    /// with zstd compression.
46    #[cfg(feature = "zstd_sarc")]
47    #[cfg(not(feature = "yaz0_sarc"))]
48    pub fn write_to_compressed_file<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> {
49        self.write_zstd(
50            &mut BufWriter::new(std::fs::File::create(path.as_ref())?)
51        )
52    }
53
54    /// Write to a compressed file. This writes the SARC with yaz0 compression. Requires `yaz0_sarc` feature
55    #[cfg(feature = "yaz0_sarc")]
56    pub fn write_to_yaz0_file<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> {
57        self.write_yaz0(
58            &mut std::fs::File::create(path.as_ref())
59                .map(BufWriter::new)
60                .map_err(|e| Error::IoError(e))?
61        )
62    }
63
64    /// Write to a writer that implements [`std::io::Write`](std::io::Write). This writes the SARC with yaz0 
65    /// compression. Requires `yaz0_sarc` feature.
66    #[cfg(feature = "yaz0_sarc")]
67    pub fn write_yaz0<W: Write>(&self, f: &mut W) -> Result<(), Error> {
68        let writer = yaz0::Yaz0Writer::new(f);
69        let mut temp = vec![];
70        self.write(&mut temp)?;
71        writer.compress_and_write(&temp, yaz0::CompressionLevel::Lookahead { quality: 10 })
72            .map_err(|e| Error::Yaz0Error(e))
73    }
74
75    /// Write to a writer that implements [`std::io::Write`](std::io::Write). This writes the SARC with zstd
76    /// compression. Requires `zstd_sarc` feature.
77    #[cfg(feature = "zstd_sarc")]
78    pub fn write_zstd<W: Write>(&self, f: &mut W) -> Result<(), Error> {
79        let mut writer =
80            zstd::stream::Encoder::new(f, zstd::DEFAULT_COMPRESSION_LEVEL)?;
81        self.write(&mut writer)?;
82        writer.finish().unwrap();
83        Ok(())
84    }
85
86    /// Write to a writer that implements [`std::io::Write`](std::io::Write). This writes the SARC with no 
87    /// compression.
88    pub fn write<W: Write>(&self, f: &mut W) -> std::io::Result<()> {
89        let (string_offsets, string_section) = self.generate_string_section();
90        let (data_offsets, data_section) = self.generate_data_section();
91
92        let num_files = self.files.len();
93        let data_padding_offset = SarcHeader::SIZE + Sfat::HEADER_SIZE
94            + (num_files * SfatEntry::SIZE) + SFNT_HEADER_SIZE + string_section.len();
95        let data_offset = (data_padding_offset + 0x1FFF) & !0x1FFF;
96        let data_padding = data_offset - data_padding_offset;
97
98        let file_size = (data_offset + data_section.len()) as u32;
99        let data_offset = data_offset as u32;
100
101        let options = &match self.byte_order {
102            Endian::Big => writer_option_new!(endian: binwrite::Endian::Big),
103            Endian::Little => writer_option_new!(endian: binwrite::Endian::Little)
104        };
105
106        SarcHeader {
107            file_size,
108            data_offset
109        }.write_options(f, options)?;
110
111        Sfat {
112            entries: self.get_sfat_entries(string_offsets, data_offsets)
113        }.write_options(f, options)?;
114
115        // SFNT Header
116        (
117            b"SFNT",
118            SFNT_HEADER_SIZE as u16,
119            u16::default()
120        ).write_options(f, options)?;
121        
122        string_section.write_options(f, options)?;
123
124        vec![0u8; data_padding].write_options(f, options)?;
125
126        data_section.write_options(f, options)?;
127
128        f.flush()
129    }
130
131    fn get_sfat_entries(&self, string_offsets: HashMap<u32, u32>, data_offsets: HashMap<u32, (u32, u32)>)
132        -> Vec<SfatEntry<'_>>
133    {
134        let mut sfat_entries: Vec<SfatEntry<'_>> = self.files
135            .iter()
136            .map(|file| {
137                let name: Option<&str> = file.name.as_deref();
138                SfatEntry {
139                    name,
140                    name_table_offset:
141                        name.map(sfat_hash)
142                            .and_then(|hash| string_offsets.get(&hash).copied()),
143                    file_range: data_offsets[&name.as_deref().map(sfat_hash).unwrap_or_default()]
144                }
145            })
146            .collect();
147        sfat_entries.sort_by_key(|e| e.name.map(sfat_hash).unwrap_or(0));
148        sfat_entries
149    }
150
151    fn generate_string_section(&self) -> (HashMap<u32, u32>, Vec<u8>) {
152        let mut names: Vec<&str> =
153            self.files.iter().filter_map(|a| Some(a.name.as_ref()?.as_str())).collect();
154
155        let mut string_section = vec![];
156        names.sort_by_key(|name| sfat_hash(name));
157        let offsets =
158            names
159                .into_iter()
160                .filter_map(|string| {
161                    let off = string_section.len() as u32;
162                    SarcString::from(string)
163                        .write(&mut string_section)
164                        .ok()?;
165                    Some((sfat_hash(string), off))
166                })
167                .collect();
168
169        (offsets, string_section)
170    }
171
172    fn generate_data_section(&self) -> (HashMap<u32, (u32, u32)>, Vec<u8>) {
173        let mut data = vec![];
174        let mut files: Vec<_> = self.files.iter()
175            .map(|file| (file.name.as_deref().map(sfat_hash).unwrap_or_default(), &file.data[..]))
176            .collect();
177        files.sort_by_key(|(hash, _)| *hash);
178        (
179            files.into_iter()
180                .map(|(hash, file)| {
181                    let start_padding = data.len();
182                    let start = (start_padding + 0x1fff) & !0x1fff;
183                    let padding = start - start_padding;
184                    let start = start as u32;
185                    vec![0u8; padding].write(&mut data).unwrap();
186                    file.write(&mut data).unwrap();
187                    let end = data.len() as u32;
188                    (hash, (start, end))
189                })
190                .collect(),
191            data
192        )
193    }
194}
195
196fn magic<B1: BinWrite + Copy, B2: BinWrite>(magic: B1) -> impl Fn(B2) -> (B1, B2) {
197    move |val| (magic, val)
198}
199
200fn after<B1: BinWrite + Copy, B2: BinWrite>(after: B1) -> impl Fn(B2) -> (B2, B1) {
201    move |val| (val, after)
202}
203
204#[derive(BinWrite)]
205struct SarcHeader {
206    #[binwrite(preprocessor(
207        magic((b"SARC", Self::SIZE as u16, Self::BOM))
208    ))]
209    file_size: u32,
210    #[binwrite(postprocessor(after(0x0100u16)), pad_after(2))]
211    data_offset: u32,
212}
213
214impl SarcHeader {
215    const SIZE: usize = 0x14;
216    const BOM: u16 = 0xFEFF;
217}
218
219#[derive(BinWrite, Clone)]
220struct SfatEntry<'a>{
221    #[binwrite(preprocessor(|name: &Option<&str>| name.map(sfat_hash).unwrap_or(0)))]
222    name: Option<&'a str>,
223    
224    #[binwrite(preprocessor(|a| {
225        if let &Some(a) = a {
226            (a / 4) | 0x01000000
227        } else {
228            0
229        }
230    } ))]
231    name_table_offset: Option<u32>,
232
233    file_range: (u32, u32)
234}
235
236impl<'a> SfatEntry<'a> {
237    const SIZE: usize = 0x10;
238}
239
240fn sfat_header<'a>(entries: &'a Vec<SfatEntry>) -> impl BinWrite + 'a {
241    (
242        b"SFAT",
243        Sfat::HEADER_SIZE as u16,
244        entries.len() as u16,
245        Sfat::HASH_KEY,
246        entries
247    )
248}
249
250#[derive(BinWrite)]
251struct Sfat<'a> {
252    #[binwrite(preprocessor(sfat_header))]
253    entries: Vec<SfatEntry<'a>>
254}
255
256impl<'a> Sfat<'a> {
257    const HEADER_SIZE: usize = 0xC;
258    const HASH_KEY: u32 = 0x00000065;
259}
260
261#[derive(BinWrite)]
262struct SarcString<'a> {
263    #[binwrite(cstr, align_after(4))]
264    inner: &'a str
265}
266
267impl<'b> SarcString<'b> {
268    fn from(inner: &'b str) -> Self {
269        Self {
270            inner 
271        }
272    }
273}
274
275const SFNT_HEADER_SIZE: usize = 8;