1use std::cmp::Reverse;
2use std::collections::BinaryHeap;
3
4use lerc_core::{
5 bits_required, fletcher32, BandSetView, DataType, Error, MaskView, RasterView, Result, Sample,
6};
7
8const MAGIC_LERC2: &[u8; 6] = b"Lerc2 ";
9const VERSION_4: i32 = 4;
10const VERSION_5: i32 = 5;
11const FIXED_HEADER_LEN_V4_V5: usize = 66;
12const FIXED_HEADER_LEN_V6: usize = 90;
13const MASK_COUNT_LEN: usize = 4;
14
15#[derive(Debug, Clone, Copy, PartialEq)]
16pub struct EncodeOptions {
17 pub max_z_error: f64,
18 pub micro_block_size: u32,
19}
20
21impl Default for EncodeOptions {
22 fn default() -> Self {
23 Self {
24 max_z_error: 0.0,
25 micro_block_size: 8,
26 }
27 }
28}
29
30#[derive(Debug, Clone)]
31struct RasterAnalysis {
32 data_type: DataType,
33 width: u32,
34 height: u32,
35 depth: u32,
36 valid_pixel_count: u32,
37 max_z_error: f64,
38 micro_block_size: u32,
39 z_min: f64,
40 z_max: f64,
41 min_values: Option<Vec<f64>>,
42 max_values: Option<Vec<f64>>,
43 plan: EncodePlan,
44}
45
46#[derive(Debug, Clone)]
47struct EncodePlan {
48 version: i32,
49 body: BodyPlan,
50}
51
52#[derive(Debug, Clone)]
53enum BodyPlan {
54 Constant,
55 PerDepthConstant,
56 OneSweep,
57 Tiled,
58 Huffman(HuffmanPlan),
59}
60
61#[derive(Debug, Clone)]
62struct HuffmanPlan {
63 mode: HuffmanMode,
64 table_bytes: Vec<u8>,
65 codes: Vec<Option<HuffmanCode>>,
66 data_len: usize,
67}
68
69#[derive(Debug, Clone, Copy, PartialEq, Eq)]
70enum HuffmanMode {
71 Delta = 1,
72 Plain = 2,
73}
74
75#[derive(Debug, Clone, Copy, PartialEq, Eq)]
76struct HuffmanCode {
77 bit_len: u8,
78 bits: u32,
79}
80
81#[derive(Debug, Clone)]
82enum HuffmanNodeKind {
83 Leaf(u16),
84 Branch { left: usize, right: usize },
85}
86
87#[derive(Debug, Clone)]
88struct HuffmanNode {
89 kind: HuffmanNodeKind,
90}
91
92#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
93struct HuffmanHeapEntry {
94 freq: u64,
95 min_symbol: u16,
96 node_index: usize,
97}
98
99#[derive(Debug, Clone)]
100enum BlockBody {
101 ZeroOrEmpty,
102 Raw {
103 byte_len: usize,
104 },
105 Constant {
106 offset: f64,
107 offset_type: DataType,
108 type_code: u8,
109 },
110 Bitstuff {
111 offset: f64,
112 offset_type: DataType,
113 type_code: u8,
114 payload_len: usize,
115 },
116}
117
118#[derive(Debug, Clone)]
119struct BlockPlan {
120 is_diff: bool,
121 body: BlockBody,
122}
123
124impl BlockPlan {
125 fn encoded_len(&self) -> usize {
126 match self.body {
127 BlockBody::ZeroOrEmpty => 1,
128 BlockBody::Raw { byte_len } => 1 + byte_len,
129 BlockBody::Constant { offset_type, .. } => 1 + offset_type.byte_len(),
130 BlockBody::Bitstuff {
131 offset_type,
132 payload_len,
133 ..
134 } => 1 + offset_type.byte_len() + payload_len,
135 }
136 }
137
138 fn header_byte(&self, check_code: u8, version: i32) -> u8 {
139 let mut header = tile_header(
140 check_code,
141 match self.body {
142 BlockBody::ZeroOrEmpty => 2,
143 BlockBody::Raw { .. } => 0,
144 BlockBody::Constant { .. } => 3,
145 BlockBody::Bitstuff { .. } => 1,
146 },
147 );
148 if self.is_diff && version >= VERSION_5 {
149 header |= 4;
150 }
151 match self.body {
152 BlockBody::Constant { type_code, .. } | BlockBody::Bitstuff { type_code, .. } => {
153 header |= type_code << 6;
154 }
155 BlockBody::ZeroOrEmpty | BlockBody::Raw { .. } => {}
156 }
157 header
158 }
159}
160
161trait ByteSink {
162 fn push(&mut self, byte: u8) -> Result<()>;
163 fn extend_from_slice(&mut self, bytes: &[u8]) -> Result<()>;
164 fn len(&self) -> usize;
165}
166
167struct SliceSink<'a> {
168 out: &'a mut [u8],
169 len: usize,
170}
171
172impl<'a> SliceSink<'a> {
173 fn new(out: &'a mut [u8]) -> Self {
174 Self { out, len: 0 }
175 }
176}
177
178impl ByteSink for SliceSink<'_> {
179 fn push(&mut self, byte: u8) -> Result<()> {
180 if self.len >= self.out.len() {
181 return Err(Error::OutputTooSmall {
182 needed: self.len + 1,
183 available: self.out.len(),
184 });
185 }
186 self.out[self.len] = byte;
187 self.len += 1;
188 Ok(())
189 }
190
191 fn extend_from_slice(&mut self, bytes: &[u8]) -> Result<()> {
192 let end = self
193 .len
194 .checked_add(bytes.len())
195 .ok_or_else(|| Error::InvalidArgument("encoded blob size overflows usize".into()))?;
196 if end > self.out.len() {
197 return Err(Error::OutputTooSmall {
198 needed: end,
199 available: self.out.len(),
200 });
201 }
202 self.out[self.len..end].copy_from_slice(bytes);
203 self.len = end;
204 Ok(())
205 }
206
207 fn len(&self) -> usize {
208 self.len
209 }
210}
211
212#[derive(Debug, Default)]
213struct TileScratch {
214 raw_bytes: Vec<u8>,
215 values_f64: Vec<f64>,
216 prev_values_f64: Vec<f64>,
217 diff_values_f64: Vec<f64>,
218 quantized: Vec<u32>,
219 bitstuff_payload: Vec<u8>,
220}
221
222impl TileScratch {
223 fn clear(&mut self) {
224 self.raw_bytes.clear();
225 self.values_f64.clear();
226 self.prev_values_f64.clear();
227 self.diff_values_f64.clear();
228 self.quantized.clear();
229 self.bitstuff_payload.clear();
230 }
231}
232
233#[derive(Debug, Default)]
234struct MsbBitWriter {
235 words: Vec<u32>,
236 current: u32,
237 bits_used: u8,
238}
239
240impl MsbBitWriter {
241 fn push_bits(&mut self, bits: u32, bit_len: u8) -> Result<()> {
242 let mut remaining = bit_len as usize;
243 while remaining > 0 {
244 let space = 32usize - self.bits_used as usize;
245 let take = remaining.min(space);
246 let shift_out = remaining - take;
247 let chunk_mask = if take == 32 {
248 u32::MAX
249 } else {
250 (1u32 << take) - 1
251 };
252 let chunk = (bits >> shift_out) & chunk_mask;
253 self.current |= chunk << (space - take);
254 self.bits_used += take as u8;
255 remaining -= take;
256 if self.bits_used == 32 {
257 self.words.push(self.current);
258 self.current = 0;
259 self.bits_used = 0;
260 }
261 }
262 Ok(())
263 }
264
265 fn into_aligned_bytes(mut self) -> Vec<u8> {
266 if self.bits_used != 0 {
267 self.words.push(self.current);
268 }
269 words_to_le_bytes(&self.words)
270 }
271
272 fn into_bytes_with_trailing_word(mut self) -> Vec<u8> {
273 if self.bits_used != 0 {
274 self.words.push(self.current);
275 }
276 self.words.push(0);
277 words_to_le_bytes(&self.words)
278 }
279}
280
281trait RasterSource<T: Sample>: Copy {
282 fn width(self) -> u32;
283 fn height(self) -> u32;
284 fn depth(self) -> u32;
285 fn data_type(self) -> DataType;
286 fn pixel_count(self) -> Result<usize>;
287 fn sample(self, pixel: usize, dim: usize) -> T;
288}
289
290impl<T: Sample> RasterSource<T> for RasterView<'_, T> {
291 fn width(self) -> u32 {
292 self.width()
293 }
294
295 fn height(self) -> u32 {
296 self.height()
297 }
298
299 fn depth(self) -> u32 {
300 self.depth()
301 }
302
303 fn data_type(self) -> DataType {
304 self.data_type()
305 }
306
307 fn pixel_count(self) -> Result<usize> {
308 self.pixel_count()
309 }
310
311 fn sample(self, pixel: usize, dim: usize) -> T {
312 self.sample(pixel, dim)
313 }
314}
315
316#[derive(Debug, Clone, Copy)]
317struct BandRasterView<'a, T: Sample> {
318 band_set: BandSetView<'a, T>,
319 band_index: usize,
320}
321
322impl<T: Sample> RasterSource<T> for BandRasterView<'_, T> {
323 fn width(self) -> u32 {
324 self.band_set.width()
325 }
326
327 fn height(self) -> u32 {
328 self.band_set.height()
329 }
330
331 fn depth(self) -> u32 {
332 self.band_set.depth()
333 }
334
335 fn data_type(self) -> DataType {
336 self.band_set.data_type()
337 }
338
339 fn pixel_count(self) -> Result<usize> {
340 self.band_set.pixel_count()
341 }
342
343 fn sample(self, pixel: usize, dim: usize) -> T {
344 self.band_set.sample(self.band_index, pixel, dim)
345 }
346}
347
348#[derive(Debug, Clone, Copy)]
349enum MaskKind<'a> {
350 None,
351 Explicit(&'a [u8]),
352 External(&'a [u8]),
353}
354
355impl<'a> MaskKind<'a> {
356 fn data(self) -> Option<&'a [u8]> {
357 match self {
358 Self::None => None,
359 Self::Explicit(mask) | Self::External(mask) => Some(mask),
360 }
361 }
362
363 fn stored_payload_len(self, pixel_count: usize, valid_pixel_count: usize) -> Result<usize> {
364 match self {
365 Self::Explicit(mask) => explicit_mask_payload_len(mask, pixel_count, valid_pixel_count),
366 Self::None | Self::External(_) => Ok(0),
367 }
368 }
369}
370
371fn shared_mask_for_band(mask: Option<&[u8]>, band_index: usize) -> MaskKind<'_> {
372 match mask {
373 Some(mask) if band_index == 0 => MaskKind::Explicit(mask),
374 Some(mask) => MaskKind::External(mask),
375 None => MaskKind::None,
376 }
377}
378
379fn validate_encode_options(options: EncodeOptions) -> Result<()> {
380 if !options.max_z_error.is_finite() || options.max_z_error < 0.0 {
381 return Err(Error::InvalidArgument(
382 "max_z_error must be finite and non-negative".into(),
383 ));
384 }
385 if options.micro_block_size == 0 {
386 return Err(Error::InvalidArgument(
387 "micro_block_size must be greater than zero".into(),
388 ));
389 }
390 if options.micro_block_size > i32::MAX as u32 {
391 return Err(Error::InvalidArgument(
392 "micro_block_size exceeds the Lerc2 header limit".into(),
393 ));
394 }
395 Ok(())
396}
397
398fn validate_mask_dimensions(width: u32, height: u32, mask: Option<MaskView<'_>>) -> Result<()> {
399 if let Some(mask) = mask {
400 if mask.width() != width || mask.height() != height {
401 return Err(Error::InvalidArgument(
402 "mask dimensions must match the raster dimensions".into(),
403 ));
404 }
405 }
406 Ok(())
407}
408
409fn validate_mask_slice(mask: Option<&[u8]>, pixel_count: usize) -> Result<()> {
410 if let Some(mask) = mask {
411 if mask.len() != pixel_count {
412 return Err(Error::InvalidArgument(
413 "mask length does not match the raster dimensions".into(),
414 ));
415 }
416 }
417 Ok(())
418}
419
420pub fn encoded_len_upper_bound<T: Sample>(
421 raster: RasterView<'_, T>,
422 mask: Option<MaskView<'_>>,
423 options: EncodeOptions,
424) -> Result<usize> {
425 validate_encode_options(options)?;
426 validate_mask_dimensions(raster.width(), raster.height(), mask)?;
427 encoded_len_upper_bound_for_raster(
428 raster,
429 mask.map_or(MaskKind::None, |mask| MaskKind::Explicit(mask.data())),
430 options,
431 )
432}
433
434pub fn encoded_band_set_len_upper_bound<T: Sample>(
435 band_set: BandSetView<'_, T>,
436 mask: Option<MaskView<'_>>,
437 options: EncodeOptions,
438) -> Result<usize> {
439 validate_encode_options(options)?;
440 validate_mask_dimensions(band_set.width(), band_set.height(), mask)?;
441
442 let shared_mask = mask.map(MaskView::data);
443 let mut total = 0usize;
444 for band_index in 0..band_set.band_count() {
445 let band = BandRasterView {
446 band_set,
447 band_index,
448 };
449 total = total
450 .checked_add(encoded_len_upper_bound_for_raster(
451 band,
452 shared_mask_for_band(shared_mask, band_index),
453 options,
454 )?)
455 .ok_or_else(|| {
456 Error::InvalidArgument("encoded band set size overflows usize".into())
457 })?;
458 }
459 Ok(total)
460}
461
462pub fn encode<T: Sample>(
463 raster: RasterView<'_, T>,
464 mask: Option<MaskView<'_>>,
465 options: EncodeOptions,
466) -> Result<Vec<u8>> {
467 validate_encode_options(options)?;
468 validate_mask_dimensions(raster.width(), raster.height(), mask)?;
469
470 let mask = mask.map_or(MaskKind::None, |mask| MaskKind::Explicit(mask.data()));
471 let analysis = analyze_raster(raster, mask, options)?;
472 let upper_bound = encoded_len_upper_bound_from_analysis(raster, mask, options, &analysis)?;
473 let mut out = vec![0u8; upper_bound];
474 let written = encode_into_with_analysis(raster, mask, options, &analysis, &mut out)?;
475 out.truncate(written);
476 Ok(out)
477}
478
479pub fn encode_band_set<T: Sample>(
480 band_set: BandSetView<'_, T>,
481 mask: Option<MaskView<'_>>,
482 options: EncodeOptions,
483) -> Result<Vec<u8>> {
484 validate_encode_options(options)?;
485 validate_mask_dimensions(band_set.width(), band_set.height(), mask)?;
486
487 let shared_mask = mask.map(MaskView::data);
488 let mut analyses = Vec::with_capacity(band_set.band_count());
489 let mut upper_bound = 0usize;
490
491 for band_index in 0..band_set.band_count() {
492 let band = BandRasterView {
493 band_set,
494 band_index,
495 };
496 let mask_kind = shared_mask_for_band(shared_mask, band_index);
497 let analysis = analyze_raster(band, mask_kind, options)?;
498 upper_bound = upper_bound
499 .checked_add(encoded_len_upper_bound_from_analysis(
500 band, mask_kind, options, &analysis,
501 )?)
502 .ok_or_else(|| {
503 Error::InvalidArgument("encoded band set size overflows usize".into())
504 })?;
505 analyses.push(analysis);
506 }
507
508 let mut out = vec![0u8; upper_bound];
509 let written =
510 encode_band_set_into_with_analysis(band_set, shared_mask, options, &analyses, &mut out)?;
511 out.truncate(written);
512 Ok(out)
513}
514
515pub fn encode_into<T: Sample>(
516 raster: RasterView<'_, T>,
517 mask: Option<MaskView<'_>>,
518 options: EncodeOptions,
519 out: &mut [u8],
520) -> Result<usize> {
521 validate_encode_options(options)?;
522 validate_mask_dimensions(raster.width(), raster.height(), mask)?;
523
524 let mask = mask.map_or(MaskKind::None, |mask| MaskKind::Explicit(mask.data()));
525 let analysis = analyze_raster(raster, mask, options)?;
526 encode_into_with_analysis(raster, mask, options, &analysis, out)
527}
528
529pub fn encode_band_set_into<T: Sample>(
530 band_set: BandSetView<'_, T>,
531 mask: Option<MaskView<'_>>,
532 options: EncodeOptions,
533 out: &mut [u8],
534) -> Result<usize> {
535 validate_encode_options(options)?;
536 validate_mask_dimensions(band_set.width(), band_set.height(), mask)?;
537
538 let shared_mask = mask.map(MaskView::data);
539 let mut analyses = Vec::with_capacity(band_set.band_count());
540 for band_index in 0..band_set.band_count() {
541 let band = BandRasterView {
542 band_set,
543 band_index,
544 };
545 analyses.push(analyze_raster(
546 band,
547 shared_mask_for_band(shared_mask, band_index),
548 options,
549 )?);
550 }
551
552 encode_band_set_into_with_analysis(band_set, shared_mask, options, &analyses, out)
553}
554
555fn encoded_len_upper_bound_for_raster<T: Sample, R: RasterSource<T>>(
556 raster: R,
557 mask: MaskKind<'_>,
558 options: EncodeOptions,
559) -> Result<usize> {
560 let pixel_count = raster.pixel_count()?;
561 validate_mask_slice(mask.data(), pixel_count)?;
562
563 let valid_pixel_count = mask
564 .data()
565 .map(|mask| mask.iter().filter(|&&value| value != 0).count())
566 .unwrap_or(pixel_count);
567 let depth = raster.depth() as usize;
568 let num_tiles = tile_count(raster.width() as usize, raster.height() as usize, options)?;
569 let byte_len = raster.data_type().byte_len();
570 let mask_len = mask.stored_payload_len(pixel_count, valid_pixel_count)?;
571 let range_len = if valid_pixel_count == 0 {
572 0
573 } else {
574 depth
575 .checked_mul(2)
576 .and_then(|len| len.checked_mul(byte_len))
577 .ok_or_else(|| Error::InvalidArgument("range byte count overflows usize".into()))?
578 };
579 let prefix_len = body_prefix_len(raster.data_type(), options.max_z_error);
580 let tile_header_len = num_tiles
581 .checked_mul(depth)
582 .ok_or_else(|| Error::InvalidArgument("tile header length overflows usize".into()))?;
583 let raw_data_len = valid_pixel_count
584 .checked_mul(depth)
585 .and_then(|len| len.checked_mul(byte_len))
586 .ok_or_else(|| Error::InvalidArgument("raw tile payload length overflows usize".into()))?;
587
588 header_len(VERSION_4)
589 .checked_add(MASK_COUNT_LEN)
590 .and_then(|len| len.checked_add(mask_len))
591 .and_then(|len| len.checked_add(range_len))
592 .and_then(|len| len.checked_add(prefix_len))
593 .and_then(|len| len.checked_add(tile_header_len))
594 .and_then(|len| len.checked_add(raw_data_len))
595 .ok_or_else(|| Error::InvalidArgument("encoded upper bound overflows usize".into()))
596}
597
598fn analyze_raster<T: Sample, R: RasterSource<T>>(
599 raster: R,
600 mask: MaskKind<'_>,
601 options: EncodeOptions,
602) -> Result<RasterAnalysis> {
603 let pixel_count = raster.pixel_count()?;
604 validate_mask_slice(mask.data(), pixel_count)?;
605 let depth = raster.depth() as usize;
606 let data_type = raster.data_type();
607
608 let mut valid_pixel_count = 0usize;
609 let mut z_min = f64::INFINITY;
610 let mut z_max = f64::NEG_INFINITY;
611 let mut min_values = vec![f64::INFINITY; depth];
612 let mut max_values = vec![f64::NEG_INFINITY; depth];
613
614 for pixel in 0..pixel_count {
615 if !pixel_is_valid(mask.data(), pixel) {
616 continue;
617 }
618 valid_pixel_count += 1;
619 for dim in 0..depth {
620 let value = raster.sample(pixel, dim).to_f64();
621 if !value.is_finite() {
622 return Err(Error::InvalidArgument(
623 "valid raster samples must be finite".into(),
624 ));
625 }
626 z_min = z_min.min(value);
627 z_max = z_max.max(value);
628 min_values[dim] = min_values[dim].min(value);
629 max_values[dim] = max_values[dim].max(value);
630 }
631 }
632
633 let valid_pixel_count = u32::try_from(valid_pixel_count)
634 .map_err(|_| Error::InvalidArgument("valid pixel count exceeds u32".into()))?;
635 if valid_pixel_count == 0 {
636 z_min = 0.0;
637 z_max = 0.0;
638 }
639
640 let (min_values, max_values) = if valid_pixel_count != 0 && z_min != z_max {
641 (Some(min_values), Some(max_values))
642 } else {
643 (None, None)
644 };
645
646 let mut analysis = RasterAnalysis {
647 data_type,
648 width: raster.width(),
649 height: raster.height(),
650 depth: raster.depth(),
651 valid_pixel_count,
652 max_z_error: options.max_z_error,
653 micro_block_size: options.micro_block_size,
654 z_min,
655 z_max,
656 min_values,
657 max_values,
658 plan: EncodePlan {
659 version: VERSION_4,
660 body: BodyPlan::Constant,
661 },
662 };
663 analysis.plan = plan_raster(raster, mask.data(), &analysis)?;
664 Ok(analysis)
665}
666
667fn encoded_len_upper_bound_from_analysis<T: Sample, R: RasterSource<T>>(
668 raster: R,
669 mask: MaskKind<'_>,
670 options: EncodeOptions,
671 analysis: &RasterAnalysis,
672) -> Result<usize> {
673 let pixel_count = raster.pixel_count()?;
674 validate_mask_slice(mask.data(), pixel_count)?;
675 let valid_pixel_count = analysis.valid_pixel_count as usize;
676 let depth = raster.depth() as usize;
677 let num_tiles = tile_count(raster.width() as usize, raster.height() as usize, options)?;
678 let byte_len = raster.data_type().byte_len();
679 let mask_len = mask.stored_payload_len(pixel_count, valid_pixel_count)?;
680 let range_len = depth_range_len(analysis)?;
681 let prefix_len = if analysis.valid_pixel_count == 0
682 || analysis.z_min == analysis.z_max
683 || has_per_depth_constant(analysis)
684 {
685 0
686 } else {
687 body_prefix_len(raster.data_type(), options.max_z_error)
688 };
689 let tile_header_len = num_tiles
690 .checked_mul(depth)
691 .ok_or_else(|| Error::InvalidArgument("tile header length overflows usize".into()))?;
692 let raw_data_len = valid_pixel_count
693 .checked_mul(depth)
694 .and_then(|len| len.checked_mul(byte_len))
695 .ok_or_else(|| Error::InvalidArgument("raw tile payload length overflows usize".into()))?;
696
697 header_len(analysis.plan.version)
698 .checked_add(MASK_COUNT_LEN)
699 .and_then(|len| len.checked_add(mask_len))
700 .and_then(|len| len.checked_add(range_len))
701 .and_then(|len| len.checked_add(prefix_len))
702 .and_then(|len| len.checked_add(tile_header_len))
703 .and_then(|len| len.checked_add(raw_data_len))
704 .ok_or_else(|| Error::InvalidArgument("encoded upper bound overflows usize".into()))
705}
706
707fn encode_into_with_analysis<T: Sample, R: RasterSource<T>>(
708 raster: R,
709 mask: MaskKind<'_>,
710 _options: EncodeOptions,
711 analysis: &RasterAnalysis,
712 out: &mut [u8],
713) -> Result<usize> {
714 let pixel_count = raster.pixel_count()?;
715 validate_mask_slice(mask.data(), pixel_count)?;
716
717 let mut sink = SliceSink::new(out);
718 let mut scratch = TileScratch::default();
719 write_header_prefix(&mut sink, analysis)?;
720 write_u32(
721 &mut sink,
722 mask.stored_payload_len(pixel_count, analysis.valid_pixel_count as usize)? as u32,
723 )?;
724 write_mask_rle(
725 &mut sink,
726 mask,
727 pixel_count,
728 analysis.valid_pixel_count as usize,
729 )?;
730 write_depth_ranges(&mut sink, analysis)?;
731 write_body(&mut sink, &mut scratch, raster, mask.data(), analysis)?;
732
733 let written = sink.len();
734 if written > i32::MAX as usize {
735 return Err(Error::InvalidArgument(
736 "encoded blob size exceeds the Lerc2 header limit".into(),
737 ));
738 }
739
740 out[34..38].copy_from_slice(&(written as i32).to_le_bytes());
741 let checksum = fletcher32(&out[14..written]);
742 out[10..14].copy_from_slice(&checksum.to_le_bytes());
743 Ok(written)
744}
745
746fn encode_band_set_into_with_analysis<T: Sample>(
747 band_set: BandSetView<'_, T>,
748 shared_mask: Option<&[u8]>,
749 options: EncodeOptions,
750 analyses: &[RasterAnalysis],
751 out: &mut [u8],
752) -> Result<usize> {
753 if analyses.len() != band_set.band_count() {
754 return Err(Error::InvalidArgument(
755 "band analysis count does not match band_count".into(),
756 ));
757 }
758
759 let mut offset = 0usize;
760 for (band_index, analysis) in analyses.iter().enumerate() {
761 let band = BandRasterView {
762 band_set,
763 band_index,
764 };
765 let written = encode_into_with_analysis(
766 band,
767 shared_mask_for_band(shared_mask, band_index),
768 options,
769 analysis,
770 &mut out[offset..],
771 )?;
772 offset = offset.checked_add(written).ok_or_else(|| {
773 Error::InvalidArgument("encoded band set size overflows usize".into())
774 })?;
775 }
776 Ok(offset)
777}
778
779fn write_header_prefix(sink: &mut impl ByteSink, analysis: &RasterAnalysis) -> Result<()> {
780 sink.extend_from_slice(MAGIC_LERC2)?;
781 write_i32(sink, analysis.plan.version)?;
782 write_u32(sink, 0)?;
783 write_u32(sink, analysis.height)?;
784 write_u32(sink, analysis.width)?;
785 write_u32(sink, analysis.depth)?;
786 write_u32(sink, analysis.valid_pixel_count)?;
787 write_i32(sink, analysis.micro_block_size as i32)?;
788 write_i32(sink, 0)?;
789 write_i32(sink, analysis.data_type.code() as i32)?;
790 write_f64(sink, analysis.max_z_error)?;
791 write_f64(sink, analysis.z_min)?;
792 write_f64(sink, analysis.z_max)?;
793 Ok(())
794}
795
796fn write_depth_ranges(sink: &mut impl ByteSink, analysis: &RasterAnalysis) -> Result<()> {
797 if let (Some(min_values), Some(max_values)) = (&analysis.min_values, &analysis.max_values) {
798 for &value in min_values {
799 write_value_as(sink, value, analysis.data_type)?;
800 }
801 for &value in max_values {
802 write_value_as(sink, value, analysis.data_type)?;
803 }
804 }
805 Ok(())
806}
807
808fn write_body<T: Sample, R: RasterSource<T>>(
809 sink: &mut impl ByteSink,
810 scratch: &mut TileScratch,
811 raster: R,
812 mask: Option<&[u8]>,
813 analysis: &RasterAnalysis,
814) -> Result<()> {
815 match &analysis.plan.body {
816 BodyPlan::Constant | BodyPlan::PerDepthConstant => Ok(()),
817 BodyPlan::OneSweep => write_one_sweep_body(sink, raster, mask),
818 BodyPlan::Tiled => write_tiled_body(sink, scratch, raster, mask, analysis),
819 BodyPlan::Huffman(plan) => write_huffman_body(sink, raster, mask, analysis, plan),
820 }
821}
822
823#[derive(Debug, Clone, Copy)]
824struct TilingPlan {
825 version: i32,
826 data_len: usize,
827}
828
829fn plan_raster<T: Sample, R: RasterSource<T>>(
830 raster: R,
831 mask: Option<&[u8]>,
832 analysis: &RasterAnalysis,
833) -> Result<EncodePlan> {
834 if analysis.valid_pixel_count == 0 || analysis.z_min == analysis.z_max {
835 return Ok(EncodePlan {
836 version: VERSION_4,
837 body: BodyPlan::Constant,
838 });
839 }
840 if has_per_depth_constant(analysis) {
841 return Ok(EncodePlan {
842 version: VERSION_4,
843 body: BodyPlan::PerDepthConstant,
844 });
845 }
846
847 let tiling = plan_tiled_body(raster, mask, analysis)?;
848 let mut best_body = BodyPlan::Tiled;
849 let mut best_version = tiling.version;
850 let mut best_non_one_len =
851 tiling.data_len + usize::from(needs_huffman_flag(analysis.data_type, analysis.max_z_error));
852
853 if let Some(huffman) = build_huffman_plan(raster, mask, analysis)? {
854 let huffman_total_len = huffman.data_len.checked_add(1).ok_or_else(|| {
855 Error::InvalidArgument("Huffman payload length overflows usize".into())
856 })?;
857 if huffman_total_len < best_non_one_len {
858 best_non_one_len = huffman_total_len;
859 best_version = VERSION_4;
860 best_body = BodyPlan::Huffman(huffman);
861 }
862 }
863
864 let one_sweep_len = (analysis.valid_pixel_count as usize)
865 .checked_mul(analysis.depth as usize)
866 .and_then(|len| len.checked_mul(analysis.data_type.byte_len()))
867 .ok_or_else(|| Error::InvalidArgument("one-sweep byte count overflows usize".into()))?;
868 if one_sweep_len <= best_non_one_len {
869 return Ok(EncodePlan {
870 version: VERSION_4,
871 body: BodyPlan::OneSweep,
872 });
873 }
874
875 Ok(EncodePlan {
876 version: best_version,
877 body: best_body,
878 })
879}
880
881fn plan_tiled_body<T: Sample, R: RasterSource<T>>(
882 raster: R,
883 mask: Option<&[u8]>,
884 analysis: &RasterAnalysis,
885) -> Result<TilingPlan> {
886 let width = raster.width() as usize;
887 let height = raster.height() as usize;
888 let depth = raster.depth() as usize;
889 let micro = analysis.micro_block_size as usize;
890 let num_blocks_x = width.div_ceil(micro);
891 let num_blocks_y = height.div_ceil(micro);
892 let last_block_width = if width % micro == 0 {
893 micro
894 } else {
895 width % micro
896 };
897 let last_block_height = if height % micro == 0 {
898 micro
899 } else {
900 height % micro
901 };
902 let diff_supported =
903 supports_diff_tiles(analysis.data_type, analysis.max_z_error, analysis.depth);
904
905 let mut scratch = TileScratch::default();
906 let mut version4_len = 0usize;
907 let mut version5_len = 0usize;
908 let mut used_diff = false;
909
910 for block_y in 0..num_blocks_y {
911 let block_height = if block_y + 1 == num_blocks_y {
912 last_block_height
913 } else {
914 micro
915 };
916 for block_x in 0..num_blocks_x {
917 let block_width = if block_x + 1 == num_blocks_x {
918 last_block_width
919 } else {
920 micro
921 };
922
923 for dim in 0..depth {
924 collect_block_values(
925 &mut scratch,
926 raster,
927 mask,
928 width,
929 micro,
930 block_x,
931 block_y,
932 block_width,
933 block_height,
934 dim,
935 diff_supported && dim > 0,
936 );
937
938 let absolute_plan = choose_absolute_block_plan(
939 &scratch.values_f64,
940 scratch.raw_bytes.len(),
941 analysis.data_type,
942 analysis.max_z_error,
943 &mut scratch.quantized,
944 &mut scratch.bitstuff_payload,
945 )?;
946 version4_len = version4_len
947 .checked_add(absolute_plan.encoded_len())
948 .ok_or_else(|| {
949 Error::InvalidArgument("tile payload length overflows usize".into())
950 })?;
951 version5_len = version5_len
952 .checked_add(absolute_plan.encoded_len())
953 .ok_or_else(|| {
954 Error::InvalidArgument("tile payload length overflows usize".into())
955 })?;
956
957 if diff_supported
958 && dim > 0
959 && build_diff_values(
960 &scratch.values_f64,
961 &scratch.prev_values_f64,
962 &mut scratch.diff_values_f64,
963 )?
964 {
965 if let Some(diff_plan) = choose_diff_block_plan(
966 &scratch.diff_values_f64,
967 analysis.max_z_error,
968 &mut scratch.quantized,
969 &mut scratch.bitstuff_payload,
970 )? {
971 if diff_plan.encoded_len() < absolute_plan.encoded_len() {
972 version5_len = version5_len
973 .checked_sub(absolute_plan.encoded_len())
974 .and_then(|len| len.checked_add(diff_plan.encoded_len()))
975 .ok_or_else(|| {
976 Error::InvalidArgument(
977 "tile payload length overflows usize".into(),
978 )
979 })?;
980 used_diff = true;
981 }
982 }
983 }
984 }
985 }
986 }
987
988 Ok(if used_diff {
989 TilingPlan {
990 version: VERSION_5,
991 data_len: version5_len,
992 }
993 } else {
994 TilingPlan {
995 version: VERSION_4,
996 data_len: version4_len,
997 }
998 })
999}
1000
1001fn write_one_sweep_body<T: Sample, R: RasterSource<T>>(
1002 sink: &mut impl ByteSink,
1003 raster: R,
1004 mask: Option<&[u8]>,
1005) -> Result<()> {
1006 sink.push(1)?;
1007 let pixel_count = raster.pixel_count()?;
1008 let depth = raster.depth() as usize;
1009 for pixel in 0..pixel_count {
1010 if !pixel_is_valid(mask, pixel) {
1011 continue;
1012 }
1013 for dim in 0..depth {
1014 write_value_as(sink, raster.sample(pixel, dim).to_f64(), raster.data_type())?;
1015 }
1016 }
1017 Ok(())
1018}
1019
1020fn write_tiled_body<T: Sample, R: RasterSource<T>>(
1021 sink: &mut impl ByteSink,
1022 scratch: &mut TileScratch,
1023 raster: R,
1024 mask: Option<&[u8]>,
1025 analysis: &RasterAnalysis,
1026) -> Result<()> {
1027 let width = raster.width() as usize;
1028 let height = raster.height() as usize;
1029 let depth = raster.depth() as usize;
1030 let micro = analysis.micro_block_size as usize;
1031 let num_blocks_x = width.div_ceil(micro);
1032 let num_blocks_y = height.div_ceil(micro);
1033 let last_block_width = if width % micro == 0 {
1034 micro
1035 } else {
1036 width % micro
1037 };
1038 let last_block_height = if height % micro == 0 {
1039 micro
1040 } else {
1041 height % micro
1042 };
1043 let diff_supported = analysis.plan.version >= VERSION_5
1044 && supports_diff_tiles(analysis.data_type, analysis.max_z_error, analysis.depth);
1045
1046 sink.push(0)?;
1047 if needs_huffman_flag(analysis.data_type, analysis.max_z_error) {
1048 sink.push(0)?;
1049 }
1050
1051 for block_y in 0..num_blocks_y {
1052 let block_height = if block_y + 1 == num_blocks_y {
1053 last_block_height
1054 } else {
1055 micro
1056 };
1057 for block_x in 0..num_blocks_x {
1058 let block_width = if block_x + 1 == num_blocks_x {
1059 last_block_width
1060 } else {
1061 micro
1062 };
1063
1064 for dim in 0..depth {
1065 collect_block_values(
1066 scratch,
1067 raster,
1068 mask,
1069 width,
1070 micro,
1071 block_x,
1072 block_y,
1073 block_width,
1074 block_height,
1075 dim,
1076 diff_supported && dim > 0,
1077 );
1078 let check_code = (((block_x * micro) >> 3) as u8) & 15;
1079 let absolute_plan = choose_absolute_block_plan(
1080 &scratch.values_f64,
1081 scratch.raw_bytes.len(),
1082 analysis.data_type,
1083 analysis.max_z_error,
1084 &mut scratch.quantized,
1085 &mut scratch.bitstuff_payload,
1086 )?;
1087
1088 let mut chosen_plan = absolute_plan.clone();
1089 let mut chose_diff = false;
1090 if diff_supported
1091 && dim > 0
1092 && build_diff_values(
1093 &scratch.values_f64,
1094 &scratch.prev_values_f64,
1095 &mut scratch.diff_values_f64,
1096 )?
1097 {
1098 if let Some(diff_plan) = choose_diff_block_plan(
1099 &scratch.diff_values_f64,
1100 analysis.max_z_error,
1101 &mut scratch.quantized,
1102 &mut scratch.bitstuff_payload,
1103 )? {
1104 if diff_plan.encoded_len() < absolute_plan.encoded_len() {
1105 chosen_plan = diff_plan;
1106 chose_diff = true;
1107 }
1108 }
1109 }
1110
1111 if !chose_diff {
1112 chosen_plan = choose_absolute_block_plan(
1113 &scratch.values_f64,
1114 scratch.raw_bytes.len(),
1115 analysis.data_type,
1116 analysis.max_z_error,
1117 &mut scratch.quantized,
1118 &mut scratch.bitstuff_payload,
1119 )?;
1120 }
1121
1122 write_block_plan(
1123 sink,
1124 &chosen_plan,
1125 check_code,
1126 analysis.plan.version,
1127 &scratch.raw_bytes,
1128 &scratch.bitstuff_payload,
1129 )?;
1130 }
1131 }
1132 }
1133
1134 Ok(())
1135}
1136
1137fn write_huffman_body<T: Sample, R: RasterSource<T>>(
1138 sink: &mut impl ByteSink,
1139 raster: R,
1140 mask: Option<&[u8]>,
1141 analysis: &RasterAnalysis,
1142 plan: &HuffmanPlan,
1143) -> Result<()> {
1144 let _ = analysis;
1145 sink.push(0)?;
1146 sink.push(plan.mode as u8)?;
1147 sink.extend_from_slice(&plan.table_bytes)?;
1148
1149 let width = raster.width() as usize;
1150 let height = raster.height() as usize;
1151 let depth = raster.depth() as usize;
1152 let data_type = raster.data_type();
1153 let mut writer = MsbBitWriter::default();
1154
1155 match plan.mode {
1156 HuffmanMode::Delta => {
1157 for dim in 0..depth {
1158 let mut prev_value = 0i32;
1159 for row in 0..height {
1160 for col in 0..width {
1161 let pixel = row * width + col;
1162 if !pixel_is_valid(mask, pixel) {
1163 continue;
1164 }
1165 let value =
1166 huffman_sample_value(raster.sample(pixel, dim).to_f64(), data_type);
1167 let predictor = if col > 0 && pixel_is_valid(mask, pixel - 1) {
1168 prev_value
1169 } else if row > 0 && pixel_is_valid(mask, pixel - width) {
1170 huffman_sample_value(
1171 raster.sample(pixel - width, dim).to_f64(),
1172 data_type,
1173 )
1174 } else {
1175 prev_value
1176 };
1177 let symbol = huffman_delta_symbol(value - predictor, data_type);
1178 let code = plan.codes[symbol].ok_or_else(|| {
1179 Error::InvalidArgument("missing Huffman delta symbol".into())
1180 })?;
1181 writer.push_bits(code.bits, code.bit_len)?;
1182 prev_value = value;
1183 }
1184 }
1185 }
1186 }
1187 HuffmanMode::Plain => {
1188 let pixel_count = raster.pixel_count()?;
1189 for pixel in 0..pixel_count {
1190 if !pixel_is_valid(mask, pixel) {
1191 continue;
1192 }
1193 for dim in 0..depth {
1194 let value = huffman_sample_value(raster.sample(pixel, dim).to_f64(), data_type);
1195 let symbol = huffman_symbol(value, data_type);
1196 let code = plan.codes[symbol]
1197 .ok_or_else(|| Error::InvalidArgument("missing Huffman symbol".into()))?;
1198 writer.push_bits(code.bits, code.bit_len)?;
1199 }
1200 }
1201 }
1202 }
1203
1204 sink.extend_from_slice(&writer.into_bytes_with_trailing_word())?;
1205 Ok(())
1206}
1207
1208#[allow(clippy::too_many_arguments)]
1209fn collect_block_values<T: Sample, R: RasterSource<T>>(
1210 scratch: &mut TileScratch,
1211 raster: R,
1212 mask: Option<&[u8]>,
1213 width: usize,
1214 micro: usize,
1215 block_x: usize,
1216 block_y: usize,
1217 block_width: usize,
1218 block_height: usize,
1219 dim: usize,
1220 include_prev_values: bool,
1221) {
1222 scratch.clear();
1223 for row in 0..block_height {
1224 let pixel_row = block_y * micro + row;
1225 for col in 0..block_width {
1226 let pixel = pixel_row * width + block_x * micro + col;
1227 if !pixel_is_valid(mask, pixel) {
1228 continue;
1229 }
1230 let value = raster.sample(pixel, dim);
1231 value.append_le_bytes(&mut scratch.raw_bytes);
1232 scratch.values_f64.push(value.to_f64());
1233 if include_prev_values {
1234 scratch
1235 .prev_values_f64
1236 .push(raster.sample(pixel, dim - 1).to_f64());
1237 }
1238 }
1239 }
1240}
1241
1242fn choose_absolute_block_plan(
1243 values: &[f64],
1244 raw_byte_len: usize,
1245 base_type: DataType,
1246 max_z_error: f64,
1247 quantized: &mut Vec<u32>,
1248 payload: &mut Vec<u8>,
1249) -> Result<BlockPlan> {
1250 let raw_len = raw_byte_len
1251 .checked_add(1)
1252 .ok_or_else(|| Error::InvalidArgument("raw block length overflows usize".into()))?;
1253 choose_block_plan(
1254 values,
1255 Some(raw_len),
1256 base_type,
1257 max_z_error,
1258 false,
1259 quantized,
1260 payload,
1261 )?
1262 .ok_or_else(|| Error::InvalidArgument("absolute tile plan unexpectedly missing".into()))
1263}
1264
1265fn choose_diff_block_plan(
1266 diff_values: &[f64],
1267 max_z_error: f64,
1268 quantized: &mut Vec<u32>,
1269 payload: &mut Vec<u8>,
1270) -> Result<Option<BlockPlan>> {
1271 choose_block_plan(
1272 diff_values,
1273 None,
1274 DataType::I32,
1275 max_z_error,
1276 true,
1277 quantized,
1278 payload,
1279 )
1280}
1281
1282fn choose_block_plan(
1283 values: &[f64],
1284 raw_len: Option<usize>,
1285 base_type: DataType,
1286 max_z_error: f64,
1287 is_diff: bool,
1288 quantized: &mut Vec<u32>,
1289 payload: &mut Vec<u8>,
1290) -> Result<Option<BlockPlan>> {
1291 if values.is_empty() {
1292 return Ok(Some(BlockPlan {
1293 is_diff,
1294 body: BlockBody::ZeroOrEmpty,
1295 }));
1296 }
1297
1298 let (min, max) = min_max(values);
1299 if min == 0.0 && max == 0.0 {
1300 return Ok(Some(BlockPlan {
1301 is_diff,
1302 body: BlockBody::ZeroOrEmpty,
1303 }));
1304 }
1305 if min == max {
1306 let (type_code, offset_type) = reduce_data_type(min, base_type)?;
1307 return Ok(Some(BlockPlan {
1308 is_diff,
1309 body: BlockBody::Constant {
1310 offset: min,
1311 offset_type,
1312 type_code,
1313 },
1314 }));
1315 }
1316
1317 if let Some(bitstuff) =
1318 try_bitstuff_tile(values, min, max, max_z_error, base_type, quantized, payload)?
1319 {
1320 let plan = BlockPlan {
1321 is_diff,
1322 body: BlockBody::Bitstuff {
1323 offset: bitstuff.offset,
1324 offset_type: bitstuff.offset_type,
1325 type_code: bitstuff.type_code,
1326 payload_len: bitstuff.payload_len,
1327 },
1328 };
1329 if raw_len.is_none() || plan.encoded_len() < raw_len.unwrap() {
1330 return Ok(Some(plan));
1331 }
1332 }
1333
1334 Ok(raw_len.map(|raw_len| BlockPlan {
1335 is_diff: false,
1336 body: BlockBody::Raw {
1337 byte_len: raw_len - 1,
1338 },
1339 }))
1340}
1341
1342fn write_block_plan(
1343 sink: &mut impl ByteSink,
1344 plan: &BlockPlan,
1345 check_code: u8,
1346 version: i32,
1347 raw_bytes: &[u8],
1348 bitstuff_payload: &[u8],
1349) -> Result<()> {
1350 sink.push(plan.header_byte(check_code, version))?;
1351 match plan.body {
1352 BlockBody::ZeroOrEmpty => Ok(()),
1353 BlockBody::Raw { .. } => sink.extend_from_slice(raw_bytes),
1354 BlockBody::Constant {
1355 offset,
1356 offset_type,
1357 ..
1358 } => write_value_as(sink, offset, offset_type),
1359 BlockBody::Bitstuff {
1360 offset,
1361 offset_type,
1362 payload_len,
1363 ..
1364 } => {
1365 write_value_as(sink, offset, offset_type)?;
1366 sink.extend_from_slice(&bitstuff_payload[..payload_len])
1367 }
1368 }
1369}
1370
1371fn build_diff_values(current: &[f64], previous: &[f64], out: &mut Vec<f64>) -> Result<bool> {
1372 if current.len() != previous.len() {
1373 return Err(Error::InvalidArgument(
1374 "diff input lengths do not match".into(),
1375 ));
1376 }
1377
1378 out.clear();
1379 out.reserve(current.len());
1380 for (&value, &prev) in current.iter().zip(previous) {
1381 let diff = value - prev;
1382 if diff < i32::MIN as f64 || diff > i32::MAX as f64 {
1383 out.clear();
1384 return Ok(false);
1385 }
1386 out.push(diff);
1387 }
1388 Ok(true)
1389}
1390
1391fn min_max(values: &[f64]) -> (f64, f64) {
1392 let mut min = f64::INFINITY;
1393 let mut max = f64::NEG_INFINITY;
1394 for &value in values {
1395 min = min.min(value);
1396 max = max.max(value);
1397 }
1398 (min, max)
1399}
1400
1401fn supports_diff_tiles(data_type: DataType, max_z_error: f64, depth: u32) -> bool {
1402 depth > 1 && is_integer_lossless(data_type, max_z_error)
1403}
1404
1405fn is_integer_lossless(data_type: DataType, max_z_error: f64) -> bool {
1406 matches!(
1407 data_type,
1408 DataType::I8 | DataType::U8 | DataType::I16 | DataType::U16 | DataType::I32 | DataType::U32
1409 ) && (max_z_error - 0.5).abs() < 1e-5
1410}
1411
1412fn build_huffman_plan<T: Sample, R: RasterSource<T>>(
1413 raster: R,
1414 mask: Option<&[u8]>,
1415 analysis: &RasterAnalysis,
1416) -> Result<Option<HuffmanPlan>> {
1417 if !needs_huffman_flag(analysis.data_type, analysis.max_z_error) {
1418 return Ok(None);
1419 }
1420
1421 let width = raster.width() as usize;
1422 let height = raster.height() as usize;
1423 let depth = raster.depth() as usize;
1424 let mut hist = [0u64; 256];
1425 let mut delta_hist = [0u64; 256];
1426
1427 for dim in 0..depth {
1428 let mut prev_value = 0i32;
1429 for row in 0..height {
1430 for col in 0..width {
1431 let pixel = row * width + col;
1432 if !pixel_is_valid(mask, pixel) {
1433 continue;
1434 }
1435 let value =
1436 huffman_sample_value(raster.sample(pixel, dim).to_f64(), analysis.data_type);
1437 hist[huffman_symbol(value, analysis.data_type)] += 1;
1438
1439 let predictor = if col > 0 && pixel_is_valid(mask, pixel - 1) {
1440 prev_value
1441 } else if row > 0 && pixel_is_valid(mask, pixel - width) {
1442 huffman_sample_value(
1443 raster.sample(pixel - width, dim).to_f64(),
1444 analysis.data_type,
1445 )
1446 } else {
1447 prev_value
1448 };
1449 delta_hist[huffman_delta_symbol(value - predictor, analysis.data_type)] += 1;
1450 prev_value = value;
1451 }
1452 }
1453 }
1454
1455 let plain = build_huffman_candidate_from_hist(&hist)?;
1456 let delta = build_huffman_candidate_from_hist(&delta_hist)?;
1457 let selected = match (plain, delta) {
1458 (Some(plain), Some(delta)) => {
1459 if plain.data_len <= delta.data_len {
1460 Some((HuffmanMode::Plain, plain))
1461 } else {
1462 Some((HuffmanMode::Delta, delta))
1463 }
1464 }
1465 (Some(plain), None) => Some((HuffmanMode::Plain, plain)),
1466 (None, Some(delta)) => Some((HuffmanMode::Delta, delta)),
1467 (None, None) => None,
1468 };
1469
1470 Ok(selected.map(|(mode, candidate)| HuffmanPlan {
1471 mode,
1472 table_bytes: candidate.table_bytes,
1473 codes: candidate.codes,
1474 data_len: candidate.data_len,
1475 }))
1476}
1477
1478struct HuffmanCandidate {
1479 table_bytes: Vec<u8>,
1480 codes: Vec<Option<HuffmanCode>>,
1481 data_len: usize,
1482}
1483
1484fn build_huffman_candidate_from_hist(hist: &[u64; 256]) -> Result<Option<HuffmanCandidate>> {
1485 let Some(codes) = build_huffman_codes(hist)? else {
1486 return Ok(None);
1487 };
1488 let table_bytes = build_huffman_table_bytes(&codes)?;
1489 let payload_bits = hist
1490 .iter()
1491 .zip(&codes)
1492 .try_fold(0usize, |acc, (&count, code)| {
1493 let Some(code) = code else {
1494 return Ok(acc);
1495 };
1496 acc.checked_add((count as usize) * code.bit_len as usize)
1497 .ok_or_else(|| {
1498 Error::InvalidArgument("Huffman payload bit count overflows usize".into())
1499 })
1500 })?;
1501 let payload_bytes = (((payload_bits.div_ceil(32)) + 1) * 4)
1502 .checked_add(table_bytes.len())
1503 .ok_or_else(|| Error::InvalidArgument("Huffman payload length overflows usize".into()))?;
1504
1505 Ok(Some(HuffmanCandidate {
1506 table_bytes,
1507 codes,
1508 data_len: payload_bytes,
1509 }))
1510}
1511
1512fn build_huffman_codes(hist: &[u64; 256]) -> Result<Option<Vec<Option<HuffmanCode>>>> {
1513 let mut nodes = Vec::<HuffmanNode>::new();
1514 let mut heap = BinaryHeap::new();
1515
1516 for (symbol, &freq) in hist.iter().enumerate() {
1517 if freq == 0 {
1518 continue;
1519 }
1520 let node_index = nodes.len();
1521 nodes.push(HuffmanNode {
1522 kind: HuffmanNodeKind::Leaf(symbol as u16),
1523 });
1524 heap.push(Reverse(HuffmanHeapEntry {
1525 freq,
1526 min_symbol: symbol as u16,
1527 node_index,
1528 }));
1529 }
1530
1531 if heap.is_empty() {
1532 return Ok(None);
1533 }
1534
1535 while heap.len() > 1 {
1536 let Reverse(left) = heap.pop().unwrap();
1537 let Reverse(right) = heap.pop().unwrap();
1538 let node_index = nodes.len();
1539 nodes.push(HuffmanNode {
1540 kind: HuffmanNodeKind::Branch {
1541 left: left.node_index,
1542 right: right.node_index,
1543 },
1544 });
1545 heap.push(Reverse(HuffmanHeapEntry {
1546 freq: left.freq.checked_add(right.freq).ok_or_else(|| {
1547 Error::InvalidArgument("Huffman frequency count overflows u64".into())
1548 })?,
1549 min_symbol: left.min_symbol.min(right.min_symbol),
1550 node_index,
1551 }));
1552 }
1553
1554 let root = heap.pop().unwrap().0.node_index;
1555 let mut codes = vec![None; 256];
1556 if assign_huffman_codes(&nodes, root, 0, 0, &mut codes).is_err() {
1557 return Ok(None);
1558 }
1559 Ok(Some(codes))
1560}
1561
1562fn assign_huffman_codes(
1563 nodes: &[HuffmanNode],
1564 node_index: usize,
1565 bits: u32,
1566 bit_len: u8,
1567 codes: &mut [Option<HuffmanCode>],
1568) -> Result<()> {
1569 match nodes[node_index].kind {
1570 HuffmanNodeKind::Leaf(symbol) => {
1571 let bit_len = bit_len.max(1);
1572 if bit_len > 31 {
1573 return Err(Error::InvalidArgument(
1574 "Huffman code length exceeds Lerc2 limits".into(),
1575 ));
1576 }
1577 codes[symbol as usize] = Some(HuffmanCode { bit_len, bits });
1578 }
1579 HuffmanNodeKind::Branch { left, right } => {
1580 if bit_len >= 31 {
1581 return Err(Error::InvalidArgument(
1582 "Huffman code length exceeds Lerc2 limits".into(),
1583 ));
1584 }
1585 assign_huffman_codes(nodes, left, bits << 1, bit_len + 1, codes)?;
1586 assign_huffman_codes(nodes, right, (bits << 1) | 1, bit_len + 1, codes)?;
1587 }
1588 }
1589 Ok(())
1590}
1591
1592fn build_huffman_table_bytes(codes: &[Option<HuffmanCode>]) -> Result<Vec<u8>> {
1593 let Some(first_symbol) = codes.iter().position(Option::is_some) else {
1594 return Err(Error::InvalidArgument(
1595 "Huffman code table cannot be empty".into(),
1596 ));
1597 };
1598 let last_symbol = codes.iter().rposition(Option::is_some).unwrap() + 1;
1599 let code_lengths: Vec<u32> = codes[first_symbol..last_symbol]
1600 .iter()
1601 .map(|code| code.map_or(0, |code| code.bit_len as u32))
1602 .collect();
1603 let mut out = Vec::new();
1604 out.extend_from_slice(&2i32.to_le_bytes());
1605 out.extend_from_slice(&(codes.len() as i32).to_le_bytes());
1606 out.extend_from_slice(&(first_symbol as i32).to_le_bytes());
1607 out.extend_from_slice(&(last_symbol as i32).to_le_bytes());
1608 write_raw_bitstuff_block(&mut out, &code_lengths)?;
1609
1610 let mut writer = MsbBitWriter::default();
1611 for code in codes[first_symbol..last_symbol].iter().flatten() {
1612 writer.push_bits(code.bits, code.bit_len)?;
1613 }
1614 out.extend_from_slice(&writer.into_aligned_bytes());
1615 Ok(out)
1616}
1617
1618fn write_raw_bitstuff_block(out: &mut Vec<u8>, values: &[u32]) -> Result<()> {
1619 let max_value = values.iter().copied().max().unwrap_or(0);
1620 let bits = bits_required(max_value as usize);
1621 if bits > 31 {
1622 return Err(Error::InvalidArgument(
1623 "bit-stuffed payload exceeds the Lerc2 bit width limit".into(),
1624 ));
1625 }
1626 let (count_code, count_bytes) = count_field(values.len())?;
1627 out.push((count_code << 6) | bits);
1628 append_count(out, values.len(), count_bytes)?;
1629 if bits != 0 {
1630 pack_lsb_bits_into(values, bits, out);
1631 }
1632 Ok(())
1633}
1634
1635#[derive(Debug, Clone)]
1636struct BitstuffTile {
1637 offset: f64,
1638 offset_type: DataType,
1639 type_code: u8,
1640 payload_len: usize,
1641}
1642
1643fn try_bitstuff_tile(
1644 values: &[f64],
1645 offset: f64,
1646 max_value: f64,
1647 max_z_error: f64,
1648 base_type: DataType,
1649 quantized: &mut Vec<u32>,
1650 payload: &mut Vec<u8>,
1651) -> Result<Option<BitstuffTile>> {
1652 if max_z_error <= 0.0 {
1653 return Ok(None);
1654 }
1655 let scale = 2.0 * max_z_error;
1656 let nmax_f = ((max_value - offset) / scale).ceil();
1657 if !nmax_f.is_finite() || !(0.0..=(u32::MAX as f64)).contains(&nmax_f) {
1658 return Ok(None);
1659 }
1660 let nmax = nmax_f as u32;
1661 if nmax == 0 {
1662 return Ok(None);
1663 }
1664
1665 let epsilon = max_z_error.abs() * 1e-12 + 1e-12;
1666 quantized.clear();
1667 quantized.reserve(values.len());
1668 let mut max_quantized = 0u32;
1669 for &value in values {
1670 let quantized_value = ((value - offset) / scale).round().clamp(0.0, nmax as f64) as u32;
1671 let reconstructed = if (quantized_value as f64) < nmax as f64 {
1672 offset + quantized_value as f64 * scale
1673 } else {
1674 max_value
1675 };
1676 if (reconstructed - value).abs() > max_z_error + epsilon {
1677 return Ok(None);
1678 }
1679 max_quantized = max_quantized.max(quantized_value);
1680 quantized.push(quantized_value);
1681 }
1682
1683 let bits = bits_required(max_quantized as usize);
1684 if bits == 0 || bits > 31 {
1685 return Ok(None);
1686 }
1687
1688 let (count_code, count_bytes) = count_field(values.len())?;
1689 let (type_code, offset_type) = reduce_data_type(offset, base_type)?;
1690 payload.clear();
1691 payload.reserve(1 + count_bytes + (values.len() * bits as usize).div_ceil(8));
1692 payload.push((count_code << 6) | bits);
1693 append_count(payload, values.len(), count_bytes)?;
1694 pack_lsb_bits_into(quantized, bits, payload);
1695 Ok(Some(BitstuffTile {
1696 offset,
1697 offset_type,
1698 type_code,
1699 payload_len: payload.len(),
1700 }))
1701}
1702
1703fn count_field(count: usize) -> Result<(u8, usize)> {
1704 if count <= u8::MAX as usize {
1705 Ok((2, 1))
1706 } else if count <= u16::MAX as usize {
1707 Ok((1, 2))
1708 } else if count <= u32::MAX as usize {
1709 Ok((0, 4))
1710 } else {
1711 Err(Error::InvalidArgument(
1712 "tile valid-value count exceeds u32".into(),
1713 ))
1714 }
1715}
1716
1717fn append_count(out: &mut Vec<u8>, count: usize, count_bytes: usize) -> Result<()> {
1718 match count_bytes {
1719 1 => out.push(
1720 u8::try_from(count)
1721 .map_err(|_| Error::InvalidArgument("count does not fit in u8".into()))?,
1722 ),
1723 2 => out.extend_from_slice(
1724 &u16::try_from(count)
1725 .map_err(|_| Error::InvalidArgument("count does not fit in u16".into()))?
1726 .to_le_bytes(),
1727 ),
1728 4 => out.extend_from_slice(
1729 &u32::try_from(count)
1730 .map_err(|_| Error::InvalidArgument("count does not fit in u32".into()))?
1731 .to_le_bytes(),
1732 ),
1733 _ => {
1734 return Err(Error::InvalidArgument(
1735 "unsupported count field width".into(),
1736 ))
1737 }
1738 }
1739 Ok(())
1740}
1741
1742fn pack_lsb_bits_into(values: &[u32], bits_per_value: u8, out: &mut Vec<u8>) {
1743 let total_bits = values.len() * bits_per_value as usize;
1744 let byte_len = total_bits.div_ceil(8);
1745 let base = out.len();
1746 out.resize(base + byte_len, 0);
1747 let mut bit_offset = 0usize;
1748 for &value in values {
1749 for bit in 0..bits_per_value {
1750 if ((value >> bit) & 1) != 0 {
1751 let byte_index = bit_offset / 8;
1752 let bit_index = bit_offset % 8;
1753 out[base + byte_index] |= 1 << bit_index;
1754 }
1755 bit_offset += 1;
1756 }
1757 }
1758}
1759
1760fn reduce_data_type(value: f64, data_type: DataType) -> Result<(u8, DataType)> {
1761 let reduced = match data_type {
1762 DataType::I8 | DataType::U8 => (0, data_type),
1763 DataType::I16 => {
1764 if fits_i8(value) {
1765 (2, DataType::I8)
1766 } else if fits_u8(value) {
1767 (1, DataType::U8)
1768 } else {
1769 (0, DataType::I16)
1770 }
1771 }
1772 DataType::U16 => {
1773 if fits_u8(value) {
1774 (1, DataType::U8)
1775 } else {
1776 (0, DataType::U16)
1777 }
1778 }
1779 DataType::I32 => {
1780 if fits_i8(value) {
1781 (3, DataType::I8)
1782 } else if fits_i16(value) {
1783 (2, DataType::I16)
1784 } else if fits_u16(value) {
1785 (1, DataType::U16)
1786 } else {
1787 (0, DataType::I32)
1788 }
1789 }
1790 DataType::U32 => {
1791 if fits_u8(value) {
1792 (2, DataType::U8)
1793 } else if fits_u16(value) {
1794 (1, DataType::U16)
1795 } else {
1796 (0, DataType::U32)
1797 }
1798 }
1799 DataType::F32 => {
1800 if fits_u8(value) {
1801 (2, DataType::U8)
1802 } else if fits_i16(value) {
1803 (1, DataType::I16)
1804 } else {
1805 (0, DataType::F32)
1806 }
1807 }
1808 DataType::F64 => {
1809 if fits_i16(value) {
1810 (3, DataType::I16)
1811 } else if fits_i32(value) {
1812 (2, DataType::I32)
1813 } else if fits_f32(value) {
1814 (1, DataType::F32)
1815 } else {
1816 (0, DataType::F64)
1817 }
1818 }
1819 };
1820 Ok(reduced)
1821}
1822
1823fn fits_i8(value: f64) -> bool {
1824 (i8::MIN as f64..=i8::MAX as f64).contains(&value) && (value as i8) as f64 == value
1825}
1826
1827fn fits_u8(value: f64) -> bool {
1828 (u8::MIN as f64..=u8::MAX as f64).contains(&value) && (value as u8) as f64 == value
1829}
1830
1831fn fits_i16(value: f64) -> bool {
1832 (i16::MIN as f64..=i16::MAX as f64).contains(&value) && (value as i16) as f64 == value
1833}
1834
1835fn fits_u16(value: f64) -> bool {
1836 (u16::MIN as f64..=u16::MAX as f64).contains(&value) && (value as u16) as f64 == value
1837}
1838
1839fn fits_i32(value: f64) -> bool {
1840 (i32::MIN as f64..=i32::MAX as f64).contains(&value) && (value as i32) as f64 == value
1841}
1842
1843fn fits_f32(value: f64) -> bool {
1844 (value as f32) as f64 == value
1845}
1846
1847fn huffman_sample_value(value: f64, data_type: DataType) -> i32 {
1848 match data_type {
1849 DataType::I8 => value as i8 as i32,
1850 DataType::U8 => value as u8 as i32,
1851 _ => unreachable!("Huffman only supports 8-bit sample types"),
1852 }
1853}
1854
1855fn huffman_symbol(value: i32, data_type: DataType) -> usize {
1856 match data_type {
1857 DataType::I8 => (value as i8 as i16 + 128) as usize,
1858 DataType::U8 => value as u8 as usize,
1859 _ => unreachable!("Huffman only supports 8-bit sample types"),
1860 }
1861}
1862
1863fn huffman_delta_symbol(delta: i32, data_type: DataType) -> usize {
1864 match data_type {
1865 DataType::I8 => (((delta & 0xFF) as u8) as i8 as i16 + 128) as usize,
1866 DataType::U8 => ((delta & 0xFF) as u8) as usize,
1867 _ => unreachable!("Huffman only supports 8-bit sample types"),
1868 }
1869}
1870
1871fn words_to_le_bytes(words: &[u32]) -> Vec<u8> {
1872 let mut bytes = Vec::with_capacity(words.len() * 4);
1873 for &word in words {
1874 bytes.extend_from_slice(&word.to_le_bytes());
1875 }
1876 bytes
1877}
1878
1879fn pack_mask_bitset(mask: &[u8], pixel_count: usize) -> Result<Vec<u8>> {
1880 if mask.len() != pixel_count {
1881 return Err(Error::InvalidArgument(
1882 "mask length does not match the raster dimensions".into(),
1883 ));
1884 }
1885
1886 let bitset_len = pixel_count.div_ceil(8);
1887 let mut bitset = vec![0u8; bitset_len];
1888 for (index, &value) in mask.iter().enumerate() {
1889 if value != 0 {
1890 bitset[index >> 3] |= 1 << (7 - (index & 7));
1891 }
1892 }
1893 Ok(bitset)
1894}
1895
1896fn emit_mask_literal_chunks<F>(bytes: &[u8], emit_literal: &mut F) -> Result<()>
1897where
1898 F: FnMut(&[u8]) -> Result<()>,
1899{
1900 let mut offset = 0usize;
1901 while offset < bytes.len() {
1902 let chunk = (bytes.len() - offset).min(i16::MAX as usize);
1903 emit_literal(&bytes[offset..offset + chunk])?;
1904 offset += chunk;
1905 }
1906 Ok(())
1907}
1908
1909enum MaskRleSegment<'a> {
1910 Literal(&'a [u8]),
1911 Repeat { value: u8, count: usize },
1912}
1913
1914fn emit_mask_rle_segments<F>(bitset: &[u8], mut emit: F) -> Result<()>
1915where
1916 F: FnMut(MaskRleSegment<'_>) -> Result<()>,
1917{
1918 let mut literal_start = 0usize;
1919 let mut offset = 0usize;
1920
1921 while offset < bitset.len() {
1922 let value = bitset[offset];
1923 let mut run_end = offset + 1;
1924 while run_end < bitset.len() && bitset[run_end] == value {
1925 run_end += 1;
1926 }
1927
1928 let run_len = run_end - offset;
1929 let repeat_threshold = if literal_start == offset && run_end == bitset.len() {
1930 2
1931 } else if literal_start == offset || run_end == bitset.len() {
1932 4
1933 } else {
1934 6
1935 };
1936
1937 if run_len >= repeat_threshold {
1938 emit_mask_literal_chunks(&bitset[literal_start..offset], &mut |bytes| {
1939 emit(MaskRleSegment::Literal(bytes))
1940 })?;
1941
1942 let mut emitted = 0usize;
1943 while emitted < run_len {
1944 let chunk = (run_len - emitted).min(i16::MAX as usize);
1945 emit(MaskRleSegment::Repeat {
1946 value,
1947 count: chunk,
1948 })?;
1949 emitted += chunk;
1950 }
1951
1952 literal_start = run_end;
1953 }
1954
1955 offset = run_end;
1956 }
1957
1958 emit_mask_literal_chunks(&bitset[literal_start..], &mut |bytes| {
1959 emit(MaskRleSegment::Literal(bytes))
1960 })
1961}
1962
1963fn write_mask_rle(
1964 sink: &mut impl ByteSink,
1965 mask: MaskKind<'_>,
1966 pixel_count: usize,
1967 valid_pixel_count: usize,
1968) -> Result<()> {
1969 if valid_pixel_count == 0 || valid_pixel_count == pixel_count {
1970 return Ok(());
1971 }
1972
1973 let MaskKind::Explicit(mask) = mask else {
1974 return Ok(());
1975 };
1976 let bitset = pack_mask_bitset(mask, pixel_count)?;
1977 emit_mask_rle_segments(&bitset, |segment| match segment {
1978 MaskRleSegment::Literal(bytes) => {
1979 write_i16(sink, bytes.len() as i16)?;
1980 sink.extend_from_slice(bytes)
1981 }
1982 MaskRleSegment::Repeat { value, count } => {
1983 write_i16(sink, -(count as i16))?;
1984 sink.push(value)
1985 }
1986 })?;
1987 write_i16(sink, i16::MIN)
1988}
1989
1990fn explicit_mask_payload_len(
1991 mask: &[u8],
1992 pixel_count: usize,
1993 valid_pixel_count: usize,
1994) -> Result<usize> {
1995 if valid_pixel_count == 0 || valid_pixel_count == pixel_count {
1996 return Ok(0);
1997 }
1998
1999 let bitset = pack_mask_bitset(mask, pixel_count)?;
2000 let mut len = 2usize;
2001 emit_mask_rle_segments(&bitset, |segment| {
2002 len = match segment {
2003 MaskRleSegment::Literal(bytes) => len
2004 .checked_add(2)
2005 .and_then(|len| len.checked_add(bytes.len())),
2006 MaskRleSegment::Repeat { .. } => len.checked_add(3),
2007 }
2008 .ok_or_else(|| Error::InvalidArgument("mask payload length overflows usize".into()))?;
2009 Ok(())
2010 })?;
2011 Ok(len)
2012}
2013
2014fn depth_range_len(analysis: &RasterAnalysis) -> Result<usize> {
2015 if analysis.min_values.is_none() {
2016 return Ok(0);
2017 }
2018 (analysis.depth as usize)
2019 .checked_mul(2)
2020 .and_then(|len| len.checked_mul(analysis.data_type.byte_len()))
2021 .ok_or_else(|| Error::InvalidArgument("range byte count overflows usize".into()))
2022}
2023
2024fn tile_count(width: usize, height: usize, options: EncodeOptions) -> Result<usize> {
2025 let micro = options.micro_block_size as usize;
2026 let num_blocks_x = width.div_ceil(micro);
2027 let num_blocks_y = height.div_ceil(micro);
2028 num_blocks_x
2029 .checked_mul(num_blocks_y)
2030 .ok_or_else(|| Error::InvalidArgument("tile count overflows usize".into()))
2031}
2032
2033fn header_len(version: i32) -> usize {
2034 if version >= 6 {
2035 FIXED_HEADER_LEN_V6
2036 } else {
2037 FIXED_HEADER_LEN_V4_V5
2038 }
2039}
2040
2041fn body_prefix_len(data_type: DataType, max_z_error: f64) -> usize {
2042 1 + usize::from(needs_huffman_flag(data_type, max_z_error))
2043}
2044
2045fn needs_huffman_flag(data_type: DataType, max_z_error: f64) -> bool {
2046 matches!(data_type, DataType::I8 | DataType::U8) && (max_z_error - 0.5).abs() < 1e-5
2047}
2048
2049fn has_per_depth_constant(analysis: &RasterAnalysis) -> bool {
2050 analysis
2051 .min_values
2052 .as_ref()
2053 .zip(analysis.max_values.as_ref())
2054 .map(|(mins, maxs)| mins == maxs)
2055 .unwrap_or(false)
2056}
2057
2058fn write_value_as(sink: &mut impl ByteSink, value: f64, data_type: DataType) -> Result<()> {
2059 match data_type {
2060 DataType::I8 => sink.push((value as i8) as u8),
2061 DataType::U8 => sink.push(value as u8),
2062 DataType::I16 => sink.extend_from_slice(&(value as i16).to_le_bytes()),
2063 DataType::U16 => sink.extend_from_slice(&(value as u16).to_le_bytes()),
2064 DataType::I32 => sink.extend_from_slice(&(value as i32).to_le_bytes()),
2065 DataType::U32 => sink.extend_from_slice(&(value as u32).to_le_bytes()),
2066 DataType::F32 => sink.extend_from_slice(&(value as f32).to_le_bytes()),
2067 DataType::F64 => sink.extend_from_slice(&value.to_le_bytes()),
2068 }
2069}
2070
2071fn write_u32(sink: &mut impl ByteSink, value: u32) -> Result<()> {
2072 sink.extend_from_slice(&value.to_le_bytes())
2073}
2074
2075fn write_i32(sink: &mut impl ByteSink, value: i32) -> Result<()> {
2076 sink.extend_from_slice(&value.to_le_bytes())
2077}
2078
2079fn write_i16(sink: &mut impl ByteSink, value: i16) -> Result<()> {
2080 sink.extend_from_slice(&value.to_le_bytes())
2081}
2082
2083fn write_f64(sink: &mut impl ByteSink, value: f64) -> Result<()> {
2084 sink.extend_from_slice(&value.to_le_bytes())
2085}
2086
2087fn tile_header(check_code: u8, encoding: u8) -> u8 {
2088 ((check_code & 15) << 2) | (encoding & 3)
2089}
2090
2091fn pixel_is_valid(mask: Option<&[u8]>, pixel: usize) -> bool {
2092 mask.map(|mask| mask[pixel] != 0).unwrap_or(true)
2093}