use byteorder::{BigEndian, ByteOrder};
use serde::Serialize;
use crate::innodb::constants::*;
use crate::innodb::index::IndexHeader;
use crate::innodb::page::FilHeader;
use crate::innodb::page_types::PageType;
#[cfg(not(target_arch = "wasm32"))]
use crate::IdbError;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
pub enum VerifyCheckKind {
PageNumberSequence,
SpaceIdConsistency,
LsnMonotonicity,
BTreeLevelConsistency,
PageChainBounds,
TrailerLsnMatch,
}
impl std::fmt::Display for VerifyCheckKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
VerifyCheckKind::PageNumberSequence => write!(f, "page_number_sequence"),
VerifyCheckKind::SpaceIdConsistency => write!(f, "space_id_consistency"),
VerifyCheckKind::LsnMonotonicity => write!(f, "lsn_monotonicity"),
VerifyCheckKind::BTreeLevelConsistency => write!(f, "btree_level_consistency"),
VerifyCheckKind::PageChainBounds => write!(f, "page_chain_bounds"),
VerifyCheckKind::TrailerLsnMatch => write!(f, "trailer_lsn_match"),
}
}
}
#[derive(Debug, Clone, Serialize)]
pub struct VerifyFinding {
pub kind: VerifyCheckKind,
pub page_number: u64,
pub message: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub expected: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub actual: Option<String>,
}
#[derive(Debug, Clone, Serialize)]
pub struct CheckSummary {
pub kind: VerifyCheckKind,
pub pages_checked: u64,
pub issues_found: u64,
pub passed: bool,
}
pub struct VerifyConfig {
pub check_page_numbers: bool,
pub check_space_ids: bool,
pub check_lsn_monotonicity: bool,
pub check_btree_levels: bool,
pub check_chain_bounds: bool,
pub check_trailer_lsn: bool,
}
impl Default for VerifyConfig {
fn default() -> Self {
Self {
check_page_numbers: true,
check_space_ids: true,
check_lsn_monotonicity: true,
check_btree_levels: true,
check_chain_bounds: true,
check_trailer_lsn: true,
}
}
}
#[derive(Debug, Clone, Serialize)]
pub struct VerifyReport {
pub file: String,
pub total_pages: u64,
pub page_size: u32,
pub passed: bool,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub findings: Vec<VerifyFinding>,
pub summary: Vec<CheckSummary>,
}
pub fn verify_tablespace(
all_pages: &[u8],
page_size: u32,
space_id: u32,
file: &str,
config: &VerifyConfig,
) -> VerifyReport {
let ps = page_size as usize;
let total_pages = (all_pages.len() / ps) as u64;
let mut findings = Vec::new();
let mut page_num_checked = 0u64;
let mut page_num_issues = 0u64;
let mut space_id_checked = 0u64;
let mut space_id_issues = 0u64;
let mut lsn_checked = 0u64;
let mut lsn_issues = 0u64;
let mut btree_checked = 0u64;
let mut btree_issues = 0u64;
let mut chain_checked = 0u64;
let mut chain_issues = 0u64;
let mut trailer_checked = 0u64;
let mut trailer_issues = 0u64;
let mut prev_lsn: u64 = 0;
for page_idx in 0..total_pages {
let offset = page_idx as usize * ps;
let page_data = &all_pages[offset..offset + ps];
if page_data.iter().all(|&b| b == 0) {
continue;
}
let header = match FilHeader::parse(page_data) {
Some(h) => h,
None => continue,
};
if config.check_page_numbers {
page_num_checked += 1;
if header.page_number as u64 != page_idx {
page_num_issues += 1;
findings.push(VerifyFinding {
kind: VerifyCheckKind::PageNumberSequence,
page_number: page_idx,
message: format!(
"Page {} has page_number {} in header",
page_idx, header.page_number
),
expected: Some(page_idx.to_string()),
actual: Some(header.page_number.to_string()),
});
}
}
if config.check_space_ids {
space_id_checked += 1;
if header.space_id != space_id {
space_id_issues += 1;
findings.push(VerifyFinding {
kind: VerifyCheckKind::SpaceIdConsistency,
page_number: page_idx,
message: format!(
"Page {} has space_id {} (expected {})",
page_idx, header.space_id, space_id
),
expected: Some(space_id.to_string()),
actual: Some(header.space_id.to_string()),
});
}
}
if config.check_lsn_monotonicity && page_idx > 0 {
lsn_checked += 1;
if header.lsn > 0 && prev_lsn > 0 && header.lsn < prev_lsn / 2 {
lsn_issues += 1;
findings.push(VerifyFinding {
kind: VerifyCheckKind::LsnMonotonicity,
page_number: page_idx,
message: format!(
"Page {} LSN {} is significantly lower than previous {}",
page_idx, header.lsn, prev_lsn
),
expected: Some(format!(">= {}", prev_lsn / 2)),
actual: Some(header.lsn.to_string()),
});
}
}
if header.lsn > 0 {
prev_lsn = header.lsn;
}
if config.check_btree_levels && header.page_type == PageType::Index {
if let Some(idx_header) = IndexHeader::parse(page_data) {
btree_checked += 1;
if idx_header.level > 64 {
btree_issues += 1;
findings.push(VerifyFinding {
kind: VerifyCheckKind::BTreeLevelConsistency,
page_number: page_idx,
message: format!(
"Page {} has unreasonable B+Tree level {}",
page_idx, idx_header.level
),
expected: Some("<= 64".to_string()),
actual: Some(idx_header.level.to_string()),
});
}
}
}
if config.check_chain_bounds {
chain_checked += 1;
if header.prev_page != FIL_NULL && header.prev_page as u64 >= total_pages {
chain_issues += 1;
findings.push(VerifyFinding {
kind: VerifyCheckKind::PageChainBounds,
page_number: page_idx,
message: format!(
"Page {} prev pointer {} is out of bounds (total: {})",
page_idx, header.prev_page, total_pages
),
expected: Some(format!("< {} or FIL_NULL", total_pages)),
actual: Some(header.prev_page.to_string()),
});
}
if header.next_page != FIL_NULL && header.next_page as u64 >= total_pages {
chain_issues += 1;
findings.push(VerifyFinding {
kind: VerifyCheckKind::PageChainBounds,
page_number: page_idx,
message: format!(
"Page {} next pointer {} is out of bounds (total: {})",
page_idx, header.next_page, total_pages
),
expected: Some(format!("< {} or FIL_NULL", total_pages)),
actual: Some(header.next_page.to_string()),
});
}
}
if config.check_trailer_lsn {
trailer_checked += 1;
let trailer_offset = ps - SIZE_FIL_TRAILER;
if page_data.len() >= trailer_offset + 8 {
let trailer_lsn_low =
BigEndian::read_u32(&page_data[trailer_offset + 4..trailer_offset + 8]);
let header_lsn_low = (header.lsn & 0xFFFFFFFF) as u32;
if trailer_lsn_low != header_lsn_low {
trailer_issues += 1;
findings.push(VerifyFinding {
kind: VerifyCheckKind::TrailerLsnMatch,
page_number: page_idx,
message: format!(
"Page {} header LSN low32 0x{:08X} != trailer 0x{:08X}",
page_idx, header_lsn_low, trailer_lsn_low
),
expected: Some(format!("0x{:08X}", header_lsn_low)),
actual: Some(format!("0x{:08X}", trailer_lsn_low)),
});
}
}
}
}
let mut summary = Vec::new();
if config.check_page_numbers {
summary.push(CheckSummary {
kind: VerifyCheckKind::PageNumberSequence,
pages_checked: page_num_checked,
issues_found: page_num_issues,
passed: page_num_issues == 0,
});
}
if config.check_space_ids {
summary.push(CheckSummary {
kind: VerifyCheckKind::SpaceIdConsistency,
pages_checked: space_id_checked,
issues_found: space_id_issues,
passed: space_id_issues == 0,
});
}
if config.check_lsn_monotonicity {
summary.push(CheckSummary {
kind: VerifyCheckKind::LsnMonotonicity,
pages_checked: lsn_checked,
issues_found: lsn_issues,
passed: lsn_issues == 0,
});
}
if config.check_btree_levels {
summary.push(CheckSummary {
kind: VerifyCheckKind::BTreeLevelConsistency,
pages_checked: btree_checked,
issues_found: btree_issues,
passed: btree_issues == 0,
});
}
if config.check_chain_bounds {
summary.push(CheckSummary {
kind: VerifyCheckKind::PageChainBounds,
pages_checked: chain_checked,
issues_found: chain_issues,
passed: chain_issues == 0,
});
}
if config.check_trailer_lsn {
summary.push(CheckSummary {
kind: VerifyCheckKind::TrailerLsnMatch,
pages_checked: trailer_checked,
issues_found: trailer_issues,
passed: trailer_issues == 0,
});
}
let passed = summary.iter().all(|s| s.passed);
VerifyReport {
file: file.to_string(),
total_pages,
page_size,
passed,
findings,
summary,
}
}
#[derive(Debug, Clone, Serialize)]
pub struct RedoVerifyResult {
pub redo_file: String,
pub checkpoint_lsn: u64,
pub tablespace_max_lsn: u64,
pub covers_tablespace: bool,
pub lsn_gap: u64,
}
#[cfg(not(target_arch = "wasm32"))]
pub fn verify_redo_continuity(
redo_path: &str,
all_pages: &[u8],
page_size: u32,
) -> Result<RedoVerifyResult, IdbError> {
use crate::innodb::log::LogFile;
let mut log = LogFile::open(redo_path)?;
let cp0 = log.read_checkpoint(0)?;
let cp1 = log.read_checkpoint(1)?;
let checkpoint_lsn = cp0.lsn.max(cp1.lsn);
let ps = page_size as usize;
let total_pages = all_pages.len() / ps;
let mut max_lsn: u64 = 0;
for i in 0..total_pages {
let page_data = &all_pages[i * ps..(i + 1) * ps];
if page_data.iter().all(|&b| b == 0) {
continue;
}
if let Some(header) = FilHeader::parse(page_data) {
if header.lsn > max_lsn {
max_lsn = header.lsn;
}
}
}
let covers_tablespace = checkpoint_lsn >= max_lsn;
let lsn_gap = if covers_tablespace {
0
} else {
max_lsn - checkpoint_lsn
};
Ok(RedoVerifyResult {
redo_file: redo_path.to_string(),
checkpoint_lsn,
tablespace_max_lsn: max_lsn,
covers_tablespace,
lsn_gap,
})
}
#[derive(Debug, Clone, Serialize)]
pub struct ChainFileInfo {
pub file: String,
pub space_id: u32,
pub max_lsn: u64,
pub min_lsn: u64,
pub total_pages: u64,
}
#[derive(Debug, Clone, Serialize)]
pub struct ChainGap {
pub from_file: String,
pub from_max_lsn: u64,
pub to_file: String,
pub to_min_lsn: u64,
pub gap_size: u64,
}
#[derive(Debug, Clone, Serialize)]
pub struct ChainReport {
pub files: Vec<ChainFileInfo>,
pub gaps: Vec<ChainGap>,
pub contiguous: bool,
pub consistent_space_id: bool,
}
pub fn extract_chain_file_info(all_pages: &[u8], page_size: u32, file: &str) -> ChainFileInfo {
let ps = page_size as usize;
let total_pages = (all_pages.len() / ps) as u64;
let mut max_lsn: u64 = 0;
let mut min_lsn: u64 = u64::MAX;
let mut space_id: u32 = 0;
for i in 0..total_pages as usize {
let page_data = &all_pages[i * ps..(i + 1) * ps];
if page_data.iter().all(|&b| b == 0) {
continue;
}
if let Some(header) = FilHeader::parse(page_data) {
if i == 0 {
space_id = header.space_id;
}
if header.lsn > max_lsn {
max_lsn = header.lsn;
}
if header.lsn > 0 && header.lsn < min_lsn {
min_lsn = header.lsn;
}
}
}
if min_lsn == u64::MAX {
min_lsn = 0;
}
ChainFileInfo {
file: file.to_string(),
space_id,
max_lsn,
min_lsn,
total_pages,
}
}
pub fn verify_backup_chain(mut files_info: Vec<ChainFileInfo>) -> ChainReport {
if files_info.is_empty() {
return ChainReport {
files: vec![],
gaps: vec![],
contiguous: true,
consistent_space_id: true,
};
}
files_info.sort_by_key(|f| f.max_lsn);
let first_space_id = files_info[0].space_id;
let consistent_space_id = files_info.iter().all(|f| f.space_id == first_space_id);
let mut gaps = Vec::new();
for pair in files_info.windows(2) {
let prev = &pair[0];
let next = &pair[1];
if next.min_lsn > prev.max_lsn {
gaps.push(ChainGap {
from_file: prev.file.clone(),
from_max_lsn: prev.max_lsn,
to_file: next.file.clone(),
to_min_lsn: next.min_lsn,
gap_size: next.min_lsn - prev.max_lsn,
});
}
}
let contiguous = gaps.is_empty();
ChainReport {
files: files_info,
gaps,
contiguous,
consistent_space_id,
}
}