1use std::collections::HashMap;
2use std::io::Write;
3
4use byteorder::{BigEndian, ByteOrder};
5use colored::Colorize;
6
7use crate::cli::{wprintln, wprint, create_progress_bar};
8use crate::innodb::checksum;
9use crate::innodb::page::{FilHeader, FspHeader};
10use crate::innodb::page_types::PageType;
11use crate::innodb::tablespace::Tablespace;
12use crate::util::hex::format_offset;
13use crate::IdbError;
14
15pub struct ParseOptions {
17 pub file: String,
18 pub page: Option<u64>,
19 pub verbose: bool,
20 pub no_empty: bool,
21 pub page_size: Option<u32>,
22 pub json: bool,
23}
24
25#[derive(serde::Serialize)]
27struct PageJson {
28 page_number: u64,
29 header: FilHeader,
30 page_type_name: String,
31 page_type_description: String,
32 byte_start: u64,
33 byte_end: u64,
34 #[serde(skip_serializing_if = "Option::is_none")]
35 fsp_header: Option<crate::innodb::page::FspHeader>,
36}
37
38pub fn execute(opts: &ParseOptions, writer: &mut dyn Write) -> Result<(), IdbError> {
40 let mut ts = match opts.page_size {
41 Some(ps) => Tablespace::open_with_page_size(&opts.file, ps)?,
42 None => Tablespace::open(&opts.file)?,
43 };
44
45 let page_size = ts.page_size();
46
47 if opts.json {
48 return execute_json(opts, &mut ts, page_size, writer);
49 }
50
51 if let Some(page_num) = opts.page {
52 let page_data = ts.read_page(page_num)?;
54 print_page_info(writer, &page_data, page_num, page_size, opts.verbose)?;
55 } else {
56 let page0 = ts.read_page(0)?;
59 if let Some(fsp) = FspHeader::parse(&page0) {
60 print_fsp_header(writer, &fsp)?;
61 wprintln!(writer)?;
62 }
63
64 wprintln!(
65 writer,
66 "Pages in {} ({} pages, page size {}):",
67 opts.file,
68 ts.page_count(),
69 page_size
70 )?;
71 wprintln!(writer, "{}", "-".repeat(50))?;
72
73 let mut type_counts: HashMap<PageType, u64> = HashMap::new();
74
75 let pb = create_progress_bar(ts.page_count(), "pages");
76
77 for page_num in 0..ts.page_count() {
78 pb.inc(1);
79 let page_data = ts.read_page(page_num)?;
80 let header = match FilHeader::parse(&page_data) {
81 Some(h) => h,
82 None => continue,
83 };
84
85 *type_counts.entry(header.page_type).or_insert(0) += 1;
86
87 if opts.no_empty && header.checksum == 0 && header.page_type == PageType::Allocated {
89 continue;
90 }
91
92 if header.checksum == 0 && page_num != 0 && !opts.verbose {
94 continue;
95 }
96
97 print_page_info(writer, &page_data, page_num, page_size, opts.verbose)?;
98 }
99
100 pb.finish_and_clear();
101
102 wprintln!(writer)?;
104 wprintln!(writer, "{}", "Page Type Summary".bold())?;
105 let mut sorted_types: Vec<_> = type_counts.iter().collect();
106 sorted_types.sort_by(|a, b| b.1.cmp(a.1));
107 for (pt, count) in sorted_types {
108 let label = if *count == 1 { "page" } else { "pages" };
109 wprintln!(writer, " {:20} {:>6} {}", pt.name(), count, label)?;
110 }
111 }
112
113 Ok(())
114}
115
116fn execute_json(
118 opts: &ParseOptions,
119 ts: &mut Tablespace,
120 page_size: u32,
121 writer: &mut dyn Write,
122) -> Result<(), IdbError> {
123 let mut pages = Vec::new();
124
125 let range: Box<dyn Iterator<Item = u64>> = if let Some(p) = opts.page {
126 Box::new(std::iter::once(p))
127 } else {
128 Box::new(0..ts.page_count())
129 };
130
131 for page_num in range {
132 let page_data = ts.read_page(page_num)?;
133 let header = match FilHeader::parse(&page_data) {
134 Some(h) => h,
135 None => continue,
136 };
137
138 if opts.no_empty && header.checksum == 0 && header.page_type == PageType::Allocated {
139 continue;
140 }
141
142 let pt = header.page_type;
143 let byte_start = page_num * page_size as u64;
144
145 let fsp_header = if page_num == 0 {
146 FspHeader::parse(&page_data)
147 } else {
148 None
149 };
150
151 pages.push(PageJson {
152 page_number: page_num,
153 page_type_name: pt.name().to_string(),
154 page_type_description: pt.description().to_string(),
155 byte_start,
156 byte_end: byte_start + page_size as u64,
157 header,
158 fsp_header,
159 });
160 }
161
162 let json = serde_json::to_string_pretty(&pages)
163 .map_err(|e| IdbError::Parse(format!("JSON serialization error: {}", e)))?;
164 wprintln!(writer, "{}", json)?;
165 Ok(())
166}
167
168fn print_page_info(writer: &mut dyn Write, page_data: &[u8], page_num: u64, page_size: u32, verbose: bool) -> Result<(), IdbError> {
170 let header = match FilHeader::parse(page_data) {
171 Some(h) => h,
172 None => {
173 eprintln!("Could not parse FIL header for page {}", page_num);
174 return Ok(());
175 }
176 };
177
178 let byte_start = page_num * page_size as u64;
179 let byte_end = byte_start + page_size as u64;
180
181 let pt = header.page_type;
182
183 wprintln!(writer, "Page: {}", header.page_number)?;
184 wprintln!(writer, "{}", "-".repeat(20))?;
185 wprintln!(writer, "{}", "HEADER".bold())?;
186 wprintln!(writer, "Byte Start: {}", format_offset(byte_start))?;
187 wprintln!(
188 writer,
189 "Page Type: {}\n-- {}: {} - {}",
190 pt.as_u16(),
191 pt.name(),
192 pt.description(),
193 pt.usage()
194 )?;
195
196 if verbose {
197 wprintln!(writer, "PAGE_N_HEAP (Amount of records in page): {}", read_page_n_heap(page_data))?;
198 }
199
200 wprint!(writer, "Prev Page: ")?;
201 if !header.has_prev() {
202 wprintln!(writer, "Not used.")?;
203 } else {
204 wprintln!(writer, "{}", header.prev_page)?;
205 }
206
207 wprint!(writer, "Next Page: ")?;
208 if !header.has_next() {
209 wprintln!(writer, "Not used.")?;
210 } else {
211 wprintln!(writer, "{}", header.next_page)?;
212 }
213
214 wprintln!(writer, "LSN: {}", header.lsn)?;
215 wprintln!(writer, "Space ID: {}", header.space_id)?;
216 wprintln!(writer, "Checksum: {}", header.checksum)?;
217
218 let csum_result = checksum::validate_checksum(page_data, page_size);
220 if verbose {
221 let status = if csum_result.valid {
222 "OK".green().to_string()
223 } else {
224 "MISMATCH".red().to_string()
225 };
226 wprintln!(
227 writer,
228 "Checksum Status: {} ({:?}, stored={}, calculated={})",
229 status, csum_result.algorithm, csum_result.stored_checksum, csum_result.calculated_checksum
230 )?;
231 }
232
233 wprintln!(writer)?;
234
235 let ps = page_size as usize;
237 if page_data.len() >= ps {
238 let trailer_offset = ps - 8;
239 if let Some(trailer) = crate::innodb::page::FilTrailer::parse(&page_data[trailer_offset..]) {
240 wprintln!(writer, "{}", "TRAILER".bold())?;
241 wprintln!(writer, "Old-style Checksum: {}", trailer.checksum)?;
242 wprintln!(writer, "Low 32 bits of LSN: {}", trailer.lsn_low32)?;
243 wprintln!(writer, "Byte End: {}", format_offset(byte_end))?;
244
245 if verbose {
247 let lsn_valid = checksum::validate_lsn(page_data, page_size);
248 let lsn_status = if lsn_valid {
249 "OK".green().to_string()
250 } else {
251 "MISMATCH".red().to_string()
252 };
253 wprintln!(writer, "LSN Consistency: {}", lsn_status)?;
254 }
255 }
256 }
257 wprintln!(writer, "{}", "-".repeat(20))?;
258 Ok(())
259}
260
261fn print_fsp_header(writer: &mut dyn Write, fsp: &FspHeader) -> Result<(), IdbError> {
263 wprintln!(writer, "{}", "-".repeat(20))?;
264 wprintln!(writer, "{}", "FSP_HDR - Filespace Header".bold())?;
265 wprintln!(writer, "{}", "-".repeat(20))?;
266 wprintln!(writer, "Space ID: {}", fsp.space_id)?;
267 wprintln!(writer, "Size (pages): {}", fsp.size)?;
268 wprintln!(writer, "Page Free Limit: {}", fsp.free_limit)?;
269 wprintln!(writer, "Flags: {}", fsp.flags)?;
270 Ok(())
271}
272
273fn read_page_n_heap(page_data: &[u8]) -> u16 {
275 let offset = crate::innodb::constants::FIL_PAGE_DATA + 4; if page_data.len() < offset + 2 {
277 return 0;
278 }
279 BigEndian::read_u16(&page_data[offset..])
280}