pokemon_sprite_compression/
gen1.rs1#[rustfmt::skip]
2const INV_XOR_TABLE: [[u8; 16]; 4] = [
3 [0x0, 0x1, 0x3, 0x2, 0x7, 0x6, 0x4, 0x5, 0xf, 0xe, 0xc, 0xd, 0x8, 0x9, 0xb, 0xa],
4 [0xf, 0xe, 0xc, 0xd, 0x8, 0x9, 0xb, 0xa, 0x0, 0x1, 0x3, 0x2, 0x7, 0x6, 0x4, 0x5], [0x0, 0x8, 0xc, 0x4, 0xe, 0x6, 0x2, 0xa, 0xf, 0x7, 0x3, 0xb, 0x1, 0x9, 0xd, 0x5],
6 [0xf, 0x7, 0x3, 0xb, 0x1, 0x9, 0xd, 0x5, 0x0, 0x8, 0xc, 0x4, 0xe, 0x6, 0x2, 0xa], ];
8
9const TILESIZE: usize = 8;
10
11struct BitStream<'a> {
12 data: &'a [u8],
13 bit_offset: usize,
14 byte_offset: usize,
15}
16
17impl<'a> BitStream<'a> {
18 fn new(data: &'a [u8]) -> Self {
19 Self {
20 data,
21 bit_offset: 0,
22 byte_offset: 0,
23 }
24 }
25
26 fn next(&mut self) -> bool {
27 let bit = ((self.data[self.byte_offset]) >> (7 - self.bit_offset)) & 1;
28
29 self.bit_offset += 1;
30 if self.bit_offset == 8 {
31 self.bit_offset = 0;
32 self.byte_offset += 1;
33 }
34
35 bit != 0
36 }
37
38 fn read_int(&mut self, mut count: usize) -> usize {
39 let mut n = 0;
40
41 while count > 0 {
42 n <<= 1;
43 n |= self.next() as usize;
44 count -= 1;
45 }
46
47 n
48 }
49
50 fn read_compress_int(&mut self) -> usize {
51 let mut n = 1;
52
53 while self.next() {
54 n += 1;
55 }
56
57 ((1 << n) | self.read_int(n)) - 1
58 }
59}
60
61struct Decompressor<'a> {
62 data: BitStream<'a>,
63 width: usize,
64 height: usize,
65}
66
67impl<'a> Decompressor<'a> {
68 fn new(data: &'a [u8]) -> Self {
69 let mut data = BitStream::new(data);
70
71 let width = data.read_int(4);
72 let height = data.read_int(4);
73
74 Self {
75 data,
76 width,
77 height,
78 }
79 }
80
81 fn decompress(&mut self) -> Vec<u8> {
82 let order_reversed = self.data.next();
83
84 let mut ram0 = self.fillram();
85
86 let mode = if self.data.next() {
87 if self.data.next() {
88 2
89 } else {
90 1
91 }
92 } else {
93 0
94 };
95
96 let mut ram1 = self.fillram();
97
98 match mode {
99 0 => {
100 self.decode(&mut ram0);
101 self.decode(&mut ram1);
102 }
103 1 => {
104 self.decode(&mut ram0);
105 self.xor(&ram0, &mut ram1);
106 }
107 2 => {
108 self.decode(&mut ram1);
109 self.decode(&mut ram0);
110 self.xor(&ram0, &mut ram1);
111 }
112 _ => unreachable!(),
113 }
114
115 let mut result = Vec::with_capacity(ram0.len() + ram1.len());
116
117 for (a, b) in ram0.iter().zip(ram1.iter()) {
118 if order_reversed {
119 result.push(*b);
120 result.push(*a);
121 } else {
122 result.push(*a);
123 result.push(*b);
124 }
125 }
126
127 result
128 }
129
130 fn fillram(&mut self) -> Vec<u8> {
131 let plane_width = self.width * TILESIZE;
132 let size = plane_width * self.height;
133
134 let mut z = if self.data.next() {
135 0
136 } else {
137 self.data.read_compress_int()
138 };
139
140 let mut interlaced = Vec::with_capacity(size);
141
142 while interlaced.len() < size {
143 let mut byte: u8 = 0;
144
145 for shift in [6, 4, 2, 0] {
146 if z > 0 {
147 z -= 1;
148 continue;
149 }
150
151 let bitgroup = self.data.read_int(2) as u8;
152
153 if bitgroup == 0 {
154 z = self.data.read_compress_int() - 1;
155 continue;
156 }
157
158 byte |= bitgroup << shift;
159 }
160
161 interlaced.push(byte);
162 }
163
164 let mut deinterlaced = Vec::with_capacity(size);
165
166 for y in 0..self.height {
167 for x in 0..plane_width {
168 let bit_shift = 6 - ((x % 4) * 2);
169 let byte_index = (y * plane_width) + (x / 4);
170
171 deinterlaced.push(
172 ((interlaced[byte_index] >> bit_shift) & 0b11) << 6
173 | ((interlaced[byte_index + self.width * 2] >> bit_shift) & 0b11) << 4
174 | ((interlaced[byte_index + self.width * 4] >> bit_shift) & 0b11) << 2
175 | ((interlaced[byte_index + self.width * 6] >> bit_shift) & 0b11),
176 );
177 }
178 }
179
180 deinterlaced
181 }
182
183 fn decode(&self, ram: &mut [u8]) {
184 let plane_width = self.width * TILESIZE;
185
186 for x in 0..plane_width {
187 let mut bit = 0;
188 for y in 0..self.height {
189 let i = y * plane_width + x;
190
191 let mut a = (ram[i] >> 4) & 0xf;
192 let mut b = ram[i] & 0xf;
193
194 a = INV_XOR_TABLE[bit as usize][a as usize];
195 bit = a & 1;
196
197 b = INV_XOR_TABLE[bit as usize][b as usize];
198 bit = b & 1;
199
200 ram[i] = (a << 4) | b;
201 }
202 }
203 }
204
205 fn xor(&self, ram0: &[u8], ram1: &mut [u8]) {
206 for i in 0..ram1.len() {
207 ram1[i] ^= ram0[i];
208 }
209 }
210}
211
212pub fn decompress(input: &[u8]) -> Vec<u8> {
214 Decompressor::new(input).decompress()
215}
216
217pub fn transpose(input: &[u8]) -> Vec<u8> {
221 let mut transposed = vec![0; input.len()];
222
223 let width = match input.len() {
224 0x010 => 1,
225 0x040 => 2,
226 0x090 => 3,
227 0x100 => 4,
228 0x190 => 5,
229 0x240 => 6,
230 0x310 => 7,
231 0x400 => 8,
232 0x490 => 9,
233 0x540 => 10,
234 0x610 => 11,
235 0x700 => 12,
236 0x790 => 13,
237 0x840 => 14,
238 0x910 => 15,
239 _ => panic!("input is not a square, or is larger than 15x15 tiles"),
240 };
241
242 for i in 0..input.len() {
243 let j = (i / 0x10) * width * 0x10;
244 let j = (j % input.len()) + 0x10 * (j / input.len()) + (i % 0x10);
245 transposed[j] = input[i];
246 }
247
248 transposed
249}
250
251#[cfg(test)]
252mod tests {
253 use super::*;
254
255 #[test]
256 fn fossilaerodactyl() {
257 let input = include_bytes!("../fixtures/gen1/fossilaerodactyl.pic");
258 let expected = include_bytes!("../fixtures/gen1/fossilaerodactyl.2bpp");
259
260 let actual = transpose(&decompress(input));
261
262 assert_eq!(actual, expected);
263 }
264
265 #[test]
266 fn fossilkabutops() {
267 let input = include_bytes!("../fixtures/gen1/fossilkabutops.pic");
268 let expected = include_bytes!("../fixtures/gen1/fossilkabutops.2bpp");
269
270 let actual = transpose(&decompress(input));
271
272 assert_eq!(actual, expected);
273 }
274
275 #[test]
276 fn ghost() {
277 let input = include_bytes!("../fixtures/gen1/ghost.pic");
278 let expected = include_bytes!("../fixtures/gen1/ghost.2bpp");
279
280 let actual = transpose(&decompress(input));
281
282 assert_eq!(actual, expected);
283 }
284}