use crate::field_decoder::build_decoders;
use crate::header::decode_header;
use crate::types::{CompressionOption, EncodingInfo, POINTS_PER_CHUNK};
use crate::{Error, Result};
pub struct PointcloudDecoder {
#[cfg(feature = "parallel")]
threads: bool,
}
impl PointcloudDecoder {
pub fn new() -> Self {
Self {
#[cfg(feature = "parallel")]
threads: false,
}
}
pub fn with_threads(self, threads: bool) -> Self {
let _ = threads;
#[cfg(feature = "parallel")]
return Self { threads, ..self };
#[cfg(not(feature = "parallel"))]
return self;
}
pub fn decode(&self, data: &[u8]) -> Result<(EncodingInfo, Vec<u8>)> {
let (info, header_len) = decode_header(data)?;
let compressed_data = &data[header_len..];
let point_data = self.decode_with_info(&info, compressed_data)?;
Ok((info, point_data))
}
pub fn decode_with_info(&self, info: &EncodingInfo, compressed_data: &[u8]) -> Result<Vec<u8>> {
let total_points = info.width as usize * info.height as usize;
if info.version >= 3 {
let chunk_infos = collect_chunks(compressed_data, total_points)?;
#[cfg(feature = "parallel")]
if self.threads {
return decode_parallel(info, &chunk_infos, total_points);
}
decode_sequential(info, &chunk_infos, total_points)
} else {
let mut output = vec![0u8; total_points * info.point_step as usize];
let mut decoders = build_decoders(&info.fields, info.encoding_opt);
let written = decode_one_chunk(
info,
compressed_data,
&mut output,
&mut decoders,
total_points,
)?;
output.truncate(written);
Ok(output)
}
}
}
impl Default for PointcloudDecoder {
fn default() -> Self {
Self::new()
}
}
struct ChunkInfo<'a> {
bytes: &'a [u8],
points: usize,
}
fn collect_chunks<'a>(
compressed_data: &'a [u8],
total_points: usize,
) -> Result<Vec<ChunkInfo<'a>>> {
let mut chunks = Vec::new();
let mut pos = 0;
let mut points_remaining = total_points;
while pos < compressed_data.len() {
if pos + 4 > compressed_data.len() {
return Err(Error::Truncated("chunk size prefix".into()));
}
let chunk_size = u32::from_le_bytes([
compressed_data[pos],
compressed_data[pos + 1],
compressed_data[pos + 2],
compressed_data[pos + 3],
]) as usize;
pos += 4;
if pos + chunk_size > compressed_data.len() {
return Err(Error::Truncated("chunk size exceeds buffer".into()));
}
let chunk_points = points_remaining.min(POINTS_PER_CHUNK);
chunks.push(ChunkInfo {
bytes: &compressed_data[pos..pos + chunk_size],
points: chunk_points,
});
points_remaining -= chunk_points;
pos += chunk_size;
}
Ok(chunks)
}
fn decode_sequential(
info: &EncodingInfo,
chunks: &[ChunkInfo<'_>],
total_points: usize,
) -> Result<Vec<u8>> {
let point_step = info.point_step as usize;
let mut output = vec![0u8; total_points * point_step];
let mut decoders = build_decoders(&info.fields, info.encoding_opt);
let mut output_pos = 0;
for chunk in chunks {
let out_slice = &mut output[output_pos..output_pos + chunk.points * point_step];
let written = decode_one_chunk(info, chunk.bytes, out_slice, &mut decoders, chunk.points)?;
output_pos += written;
}
output.truncate(output_pos);
Ok(output)
}
#[cfg(feature = "parallel")]
fn decode_parallel(
info: &EncodingInfo,
chunks: &[ChunkInfo<'_>],
total_points: usize,
) -> Result<Vec<u8>> {
use rayon::prelude::*;
let chunk_outputs: Vec<Result<Vec<u8>>> = chunks
.par_iter()
.map(|chunk| {
let point_step = info.point_step as usize;
let max_output = total_points * point_step;
let decompressed = decompress_chunk(chunk.bytes, info.compression_opt, max_output)?;
let encoded_data = decompressed.as_deref().unwrap_or(chunk.bytes);
let mut decoders = build_decoders(&info.fields, info.encoding_opt);
let mut out = vec![0u8; chunk.points * point_step];
let mut enc_pos = 0;
let mut out_pos = 0;
for _ in 0..chunk.points {
let point_out = &mut out[out_pos..out_pos + point_step];
for dec in decoders.iter_mut() {
let consumed = dec.decode(&encoded_data[enc_pos..], point_out)?;
enc_pos += consumed;
}
out_pos += point_step;
}
Ok(out)
})
.collect();
let mut output = Vec::with_capacity(total_points * info.point_step as usize);
for result in chunk_outputs {
output.extend_from_slice(&result?);
}
Ok(output)
}
fn decode_one_chunk(
info: &EncodingInfo,
chunk: &[u8],
output: &mut [u8],
decoders: &mut Vec<crate::field_decoder::FieldDecoder>,
chunk_points: usize,
) -> Result<usize> {
let max_output = info.width as usize * info.height as usize * info.point_step as usize;
let decompressed = decompress_chunk(chunk, info.compression_opt, max_output)?;
let encoded_data = decompressed.as_deref().unwrap_or(chunk);
for dec in decoders.iter_mut() {
dec.reset();
}
let point_step = info.point_step as usize;
let mut enc_pos = 0;
let mut out_pos = 0;
for _ in 0..chunk_points {
let point_out = &mut output[out_pos..out_pos + point_step];
for dec in decoders.iter_mut() {
let consumed = dec.decode(&encoded_data[enc_pos..], point_out)?;
enc_pos += consumed;
}
out_pos += point_step;
}
Ok(out_pos)
}
fn decompress_chunk(
data: &[u8],
opt: CompressionOption,
max_output: usize,
) -> Result<Option<Vec<u8>>> {
match opt {
CompressionOption::None => Ok(None),
CompressionOption::Lz4 => {
let mut out = vec![0u8; max_output];
let n = lz4_flex::block::decompress_into(data, &mut out)
.map_err(|e| Error::Lz4(e.to_string()))?;
out.truncate(n);
Ok(Some(out))
}
CompressionOption::Zstd => {
let out =
zstd::bulk::decompress(data, max_output).map_err(|e| Error::Zstd(e.to_string()))?;
Ok(Some(out))
}
}
}