use std::collections::{BTreeMap, BTreeSet, HashMap};
use crate::coords::get_plane_vector;
use crate::decode::{compute_cstar_from_c_and_u, decode_uncoupled_layer, get_companion_layer, DecodeParams};
use crate::error::ClayError;
use crate::transforms::{compute_u_from_c_and_ustar, prt_compute_both_oriented};
pub type RepairParams = DecodeParams;
fn checked_pow(base: usize, exp: usize) -> Option<usize> {
let mut result: usize = 1;
let mut b = base;
let mut e = exp;
while e > 0 {
if e & 1 == 1 {
result = result.checked_mul(b)?;
}
e >>= 1;
if e > 0 {
b = b.checked_mul(b)?;
}
}
Some(result)
}
pub fn get_repair_subchunk_indices(
params: &RepairParams,
lost_node: usize,
) -> Result<Vec<usize>, ClayError> {
let y_lost = lost_node / params.q;
let x_lost = lost_node % params.q;
let seq_sc_count = checked_pow(params.q, params.t - 1 - y_lost).ok_or_else(|| {
ClayError::Overflow(format!(
"q^(t-1-y) = {}^{} overflows",
params.q,
params.t - 1 - y_lost
))
})?;
let num_seq = checked_pow(params.q, y_lost).ok_or_else(|| {
ClayError::Overflow(format!("q^y = {}^{} overflows", params.q, y_lost))
})?;
let beta = params.sub_chunk_no / params.q;
let mut result = Vec::with_capacity(beta);
for seq in 0..num_seq {
let base = x_lost * seq_sc_count + seq * params.q * seq_sc_count;
for offset in 0..seq_sc_count {
result.push(base + offset);
}
}
Ok(result)
}
pub fn minimum_to_repair(
params: &RepairParams,
lost_node: usize,
available: &[usize],
) -> Result<Vec<(usize, Vec<usize>)>, ClayError> {
if lost_node >= params.n {
return Err(ClayError::InvalidParameters(format!(
"Invalid lost node index: {} >= {}",
lost_node, params.n
)));
}
let lost_internal = if lost_node < params.k {
lost_node
} else {
lost_node + params.nu
};
let repair_sub_chunk_indices = get_repair_subchunk_indices(params, lost_internal)?;
let d = params.k + params.q - 1; let mut result = Vec::new();
let y_section = lost_internal / params.q;
for x in 0..params.q {
let node = y_section * params.q + x;
if node != lost_internal {
let external_idx = if node < params.k {
node
} else if node >= params.k + params.nu {
node - params.nu
} else {
continue; };
if available.contains(&external_idx) {
result.push((external_idx, repair_sub_chunk_indices.clone()));
}
}
}
for &node in available {
if result.len() >= d {
break;
}
if !result.iter().any(|(n, _)| *n == node) && node != lost_node {
result.push((node, repair_sub_chunk_indices.clone()));
}
}
if result.len() < d {
return Err(ClayError::InsufficientHelpers {
needed: d,
provided: result.len(),
});
}
result.truncate(d);
Ok(result)
}
pub fn repair(
params: &RepairParams,
lost_node: usize,
helper_data: &HashMap<usize, Vec<u8>>,
chunk_size: usize,
) -> Result<Vec<u8>, ClayError> {
let d = params.k + params.q - 1;
if lost_node >= params.n {
return Err(ClayError::InvalidParameters(format!(
"Invalid lost node index: {} >= {}",
lost_node, params.n
)));
}
if helper_data.len() < d {
return Err(ClayError::InsufficientHelpers {
needed: d,
provided: helper_data.len(),
});
}
if chunk_size == 0 || chunk_size % params.sub_chunk_no != 0 {
return Err(ClayError::InvalidChunkSize {
expected: params.sub_chunk_no,
actual: chunk_size,
});
}
let lost_internal = if lost_node < params.k {
lost_node
} else {
lost_node + params.nu
};
let repair_sub_chunk_indices = get_repair_subchunk_indices(params, lost_internal)?;
let sub_chunk_size = chunk_size / params.sub_chunk_no;
let expected_helper_bytes = repair_sub_chunk_indices.len() * sub_chunk_size;
let total_nodes = params.q * params.t;
let lost_y = lost_internal / params.q;
for x in 0..params.q {
let node = lost_y * params.q + x;
if node == lost_internal {
continue; }
if node >= params.k && node < params.k + params.nu {
continue;
}
let external_idx = if node < params.k {
node
} else {
node - params.nu
};
if !helper_data.contains_key(&external_idx) {
return Err(ClayError::MissingYSectionHelper {
lost_node,
missing_helper: external_idx,
});
}
}
let mut u_buf: Vec<Vec<u8>> = vec![vec![0u8; chunk_size]; total_nodes];
let mut u_computed: Vec<Vec<bool>> = vec![vec![false; params.sub_chunk_no]; total_nodes];
let mut recovered = vec![0u8; chunk_size];
let mut helper_internal: HashMap<usize, Vec<u8>> = HashMap::new();
for (&ext_idx, data) in helper_data.iter() {
if ext_idx >= params.n {
return Err(ClayError::InvalidParameters(format!(
"Helper index {} out of range [0, {})",
ext_idx, params.n
)));
}
let internal = if ext_idx < params.k {
ext_idx
} else {
ext_idx + params.nu
};
if data.len() != expected_helper_bytes {
return Err(ClayError::InsufficientHelperData {
helper: ext_idx,
expected: expected_helper_bytes,
actual: data.len(),
});
}
helper_internal.insert(internal, data.clone());
}
let mut aloof_nodes: BTreeSet<usize> = BTreeSet::new();
for i in 0..total_nodes {
if i != lost_internal && !helper_internal.contains_key(&i) {
if i < params.k || i >= params.k + params.nu {
aloof_nodes.insert(i);
}
}
}
let zero_data = vec![0u8; expected_helper_bytes];
for i in params.k..(params.k + params.nu) {
helper_internal.insert(i, zero_data.clone());
}
let mut repair_plane_to_ind: HashMap<usize, usize> = HashMap::new();
for (idx, &z) in repair_sub_chunk_indices.iter().enumerate() {
repair_plane_to_ind.insert(z, idx);
}
let mut ordered_planes: BTreeMap<usize, Vec<usize>> = BTreeMap::new();
for &z in &repair_sub_chunk_indices {
let z_vec = get_plane_vector(z, params.t, params.q);
let mut order = 0;
if lost_internal % params.q == z_vec[lost_internal / params.q] {
order += 1;
}
for &node in &aloof_nodes {
if node % params.q == z_vec[node / params.q] {
order += 1;
}
}
ordered_planes.entry(order).or_default().push(z);
}
let mut base_erasures: BTreeSet<usize> = BTreeSet::new();
for x in 0..params.q {
base_erasures.insert(lost_y * params.q + x);
}
for &node in &aloof_nodes {
base_erasures.insert(node);
}
for (&_order, planes) in &ordered_planes {
for &z in planes {
let z_vec = get_plane_vector(z, params.t, params.q);
let mut layer_erasures = base_erasures.clone();
for y in 0..params.t {
for x in 0..params.q {
let node_xy = y * params.q + x;
if !base_erasures.contains(&node_xy) {
if let Some(helper_chunk) = helper_internal.get(&node_xy) {
let z_y = z_vec[y];
let z_sw = get_companion_layer(params, z, x, y, z_y);
let node_sw = y * params.q + z_y;
if z_y == x {
let c_offset = repair_plane_to_ind[&z] * sub_chunk_size;
u_buf[node_xy][z * sub_chunk_size..(z + 1) * sub_chunk_size]
.copy_from_slice(
&helper_chunk[c_offset..c_offset + sub_chunk_size],
);
u_computed[node_xy][z] = true;
} else if aloof_nodes.contains(&node_sw) {
if u_computed[node_sw][z_sw] {
let c_offset = repair_plane_to_ind[&z] * sub_chunk_size;
let c_xy =
&helper_chunk[c_offset..c_offset + sub_chunk_size];
let u_sw = &u_buf[node_sw]
[z_sw * sub_chunk_size..(z_sw + 1) * sub_chunk_size];
let u_xy = compute_u_from_c_and_ustar(c_xy, u_sw);
u_buf[node_xy][z * sub_chunk_size..(z + 1) * sub_chunk_size]
.copy_from_slice(&u_xy);
u_computed[node_xy][z] = true;
} else {
layer_erasures.insert(node_xy);
}
} else if let Some(helper_sw) = helper_internal.get(&node_sw) {
if let Some(&sw_idx) = repair_plane_to_ind.get(&z_sw) {
let c_xy_offset = repair_plane_to_ind[&z] * sub_chunk_size;
let c_sw_offset = sw_idx * sub_chunk_size;
let c_xy =
&helper_chunk[c_xy_offset..c_xy_offset + sub_chunk_size];
let c_sw =
&helper_sw[c_sw_offset..c_sw_offset + sub_chunk_size];
let (u_xy, u_sw_val) =
prt_compute_both_oriented(c_xy, c_sw, x < z_y);
u_buf[node_xy][z * sub_chunk_size..(z + 1) * sub_chunk_size]
.copy_from_slice(&u_xy);
u_buf[node_sw]
[z_sw * sub_chunk_size..(z_sw + 1) * sub_chunk_size]
.copy_from_slice(&u_sw_val);
u_computed[node_xy][z] = true;
u_computed[node_sw][z_sw] = true;
}
} else {
layer_erasures.insert(node_xy);
}
} else {
layer_erasures.insert(node_xy);
}
}
}
}
decode_uncoupled_layer(params, &layer_erasures, z, sub_chunk_size, &mut u_buf)?;
for &node in &layer_erasures {
u_computed[node][z] = true;
}
for &node in &base_erasures {
if aloof_nodes.contains(&node) {
continue;
}
let x = node % params.q;
let y = node / params.q;
let z_y = z_vec[y];
let node_sw = y * params.q + z_y;
let z_sw = get_companion_layer(params, z, x, y, z_y);
if x == z_y {
if node == lost_internal {
recovered[z * sub_chunk_size..(z + 1) * sub_chunk_size].copy_from_slice(
&u_buf[node][z * sub_chunk_size..(z + 1) * sub_chunk_size],
);
}
} else if node_sw == lost_internal {
if let Some(helper_chunk) = helper_internal.get(&node) {
let c_offset = repair_plane_to_ind[&z] * sub_chunk_size;
let c_node = &helper_chunk[c_offset..c_offset + sub_chunk_size];
let u_node = &u_buf[node][z * sub_chunk_size..(z + 1) * sub_chunk_size];
let c_lost = compute_cstar_from_c_and_u(c_node, u_node);
recovered[z_sw * sub_chunk_size..(z_sw + 1) * sub_chunk_size]
.copy_from_slice(&c_lost);
}
}
}
}
}
Ok(recovered)
}
#[cfg(test)]
mod tests {
use super::*;
fn test_params() -> RepairParams {
RepairParams {
k: 4,
m: 2,
n: 6,
q: 2,
t: 3,
nu: 0,
sub_chunk_no: 8,
original_count: 4,
recovery_count: 2,
}
}
#[test]
fn test_repair_subchunk_indices_count() {
let params = test_params();
let beta = params.sub_chunk_no / params.q;
for lost_node in 0..params.n {
let internal = if lost_node < params.k {
lost_node
} else {
lost_node + params.nu
};
let indices = get_repair_subchunk_indices(¶ms, internal).unwrap();
assert_eq!(
indices.len(),
beta,
"Expected {} sub-chunks for node {}",
beta,
lost_node
);
}
}
#[test]
fn test_minimum_to_repair_helpers_count() {
let params = test_params();
let d = params.k + params.q - 1;
let available: Vec<usize> = (1..params.n).collect();
let helper_info = minimum_to_repair(¶ms, 0, &available).unwrap();
assert_eq!(helper_info.len(), d);
}
#[test]
fn test_minimum_to_repair_includes_y_section() {
let params = test_params();
let available: Vec<usize> = (1..params.n).collect();
let helper_info = minimum_to_repair(¶ms, 0, &available).unwrap();
let helpers: Vec<usize> = helper_info.iter().map(|(h, _)| *h).collect();
assert!(
helpers.contains(&1),
"Y-section partner (node 1) should be included for repairing node 0"
);
}
#[test]
fn test_minimum_to_repair_insufficient_helpers() {
let params = test_params();
let d = params.k + params.q - 1;
let available: Vec<usize> = (1..d).collect();
let result = minimum_to_repair(¶ms, 0, &available);
assert!(matches!(
result,
Err(ClayError::InsufficientHelpers { .. })
));
}
}