1use std::fs::File;
2use std::io::{BufReader, Read, Seek, SeekFrom};
3use std::path::Path;
4
5use copc_core::{Bounds, CancelCheck, CopcInfo, Entry, Error, Result};
6use las::point::Format as LasPointFormat;
7use las::{Point, Transform, Vector};
8use laz::record::{LayeredPointRecordDecompressor, RecordDecompressor};
9use laz::LazVlr;
10
11use crate::{CopcFile, LasHeader};
12
13const CANCEL_POLL_STRIDE: usize = 4_096;
14
15pub struct CopcReader<R> {
17 source: R,
18 file: CopcFile,
19}
20
21#[derive(Clone, Copy, Debug, PartialEq)]
23pub enum LodSelection {
24 All,
26 Resolution(f64),
28 Level(i32),
30 LevelMinMax(i32, i32),
32}
33
34#[derive(Clone, Copy, Debug, PartialEq)]
36pub enum BoundsSelection {
37 All,
39 Within(Bounds),
41}
42
43#[derive(Clone, Copy, Debug, PartialEq)]
45pub struct PointQuery {
46 pub lod: LodSelection,
47 pub bounds: BoundsSelection,
48}
49
50impl PointQuery {
51 pub const fn all() -> Self {
52 Self {
53 lod: LodSelection::All,
54 bounds: BoundsSelection::All,
55 }
56 }
57
58 pub const fn new(lod: LodSelection, bounds: BoundsSelection) -> Self {
59 Self { lod, bounds }
60 }
61}
62
63impl CopcReader<BufReader<File>> {
64 pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> {
65 let file = File::open(path.as_ref()).map_err(|e| Error::io("open COPC file", e))?;
66 Self::open(BufReader::new(file))
67 }
68}
69
70impl<R: Read + Seek + Send> CopcReader<R> {
71 pub fn open(mut source: R) -> Result<Self> {
73 let file = CopcFile::from_reader(&mut source)?;
74 Ok(Self { source, file })
75 }
76
77 pub fn file(&self) -> &CopcFile {
78 &self.file
79 }
80
81 pub fn header(&self) -> &LasHeader {
82 self.file.header()
83 }
84
85 pub fn copc_info(&self) -> &CopcInfo {
86 self.file.copc_info()
87 }
88
89 pub fn into_inner(self) -> R {
90 self.source
91 }
92
93 pub fn points(
98 &mut self,
99 lod: LodSelection,
100 bounds: BoundsSelection,
101 ) -> Result<PointIter<'_, R>> {
102 self.points_for_query(PointQuery::new(lod, bounds))
103 }
104
105 pub fn points_for_query(&mut self, query: PointQuery) -> Result<PointIter<'_, R>> {
106 PointIter::new(&mut self.source, &self.file, query, None)
107 }
108
109 pub fn points_with_cancel<'a>(
110 &'a mut self,
111 lod: LodSelection,
112 bounds: BoundsSelection,
113 cancel: &'a dyn CancelCheck,
114 ) -> Result<PointIter<'a, R>> {
115 PointIter::new(
116 &mut self.source,
117 &self.file,
118 PointQuery::new(lod, bounds),
119 Some(cancel),
120 )
121 }
122}
123
124pub struct PointIter<'a, R: Read + Seek + Send> {
126 chunks: Vec<Entry>,
127 next_chunk: usize,
128 current_chunk_points_left: usize,
129 remaining_candidate_points: usize,
130 exact_size: bool,
131 point_format: LasPointFormat,
132 transforms: Vector<Transform>,
133 bounds: Option<Bounds>,
134 decoder: ChunkLazDecoder<'a, R>,
135 point_buf: Vec<u8>,
136 decoded_points: usize,
137 cancel: Option<&'a dyn CancelCheck>,
138 finished: bool,
139}
140
141impl<'a, R: Read + Seek + Send> PointIter<'a, R> {
142 fn new(
143 source: &'a mut R,
144 file: &CopcFile,
145 query: PointQuery,
146 cancel: Option<&'a dyn CancelCheck>,
147 ) -> Result<Self> {
148 let chunks = select_point_chunks(file, query)?;
149 let point_format = file.point_format()?;
150 let transforms = file.transforms();
151 let bounds = match query.bounds {
152 BoundsSelection::All => None,
153 BoundsSelection::Within(bounds) => Some(bounds),
154 };
155 let decoder = ChunkLazDecoder::new(source, file.laszip_vlr().clone())?;
156 let expected_record_size = usize::from(point_format.len());
157 if decoder.record_size() != expected_record_size {
158 return Err(Error::InvalidData(format!(
159 "LASzip item size is {} bytes, but LAS point record length is {} bytes",
160 decoder.record_size(),
161 expected_record_size
162 )));
163 }
164 let remaining_candidate_points = total_candidate_points(&chunks)?;
165 let point_buf = vec![0u8; expected_record_size];
166 Ok(Self {
167 chunks,
168 next_chunk: 0,
169 current_chunk_points_left: 0,
170 remaining_candidate_points,
171 exact_size: bounds.is_none(),
172 point_format,
173 transforms,
174 bounds,
175 decoder,
176 point_buf,
177 decoded_points: 0,
178 cancel,
179 finished: false,
180 })
181 }
182
183 fn load_next_chunk(&mut self) -> Result<bool> {
184 while self.next_chunk < self.chunks.len() {
185 let entry = self.chunks[self.next_chunk];
186 self.next_chunk += 1;
187 if entry.point_count <= 0 {
188 continue;
189 }
190 self.decoder.seek_to_chunk(entry.offset)?;
191 self.current_chunk_points_left = usize::try_from(entry.point_count).map_err(|_| {
192 Error::InvalidData(format!(
193 "negative point count {} for {:?}",
194 entry.point_count, entry.key
195 ))
196 })?;
197 return Ok(true);
198 }
199 Ok(false)
200 }
201
202 fn next_inner(&mut self) -> Result<Option<Point>> {
203 loop {
204 while self.current_chunk_points_left == 0 {
205 if !self.load_next_chunk()? {
206 return Ok(None);
207 }
208 }
209
210 if self.decoded_points % CANCEL_POLL_STRIDE == 0 {
211 if let Some(cancel) = self.cancel {
212 cancel.check()?;
213 }
214 }
215
216 self.decoder.decompress_one(&mut self.point_buf)?;
217 self.current_chunk_points_left -= 1;
218 self.remaining_candidate_points -= 1;
219 self.decoded_points += 1;
220
221 let raw_point =
222 las::raw::Point::read_from(self.point_buf.as_slice(), &self.point_format)
223 .map_err(|e| Error::Las(e.to_string()))?;
224 if let Some(bounds) = self.bounds {
225 let x = self.transforms.x.direct(raw_point.x);
226 let y = self.transforms.y.direct(raw_point.y);
227 let z = self.transforms.z.direct(raw_point.z);
228 if !bounds.contains_xyz(x, y, z) {
229 continue;
230 }
231 }
232 return Ok(Some(Point::new(raw_point, &self.transforms)));
233 }
234 }
235}
236
237impl<R: Read + Seek + Send> Iterator for PointIter<'_, R> {
238 type Item = Result<Point>;
239
240 fn next(&mut self) -> Option<Self::Item> {
241 if self.finished {
242 return None;
243 }
244 match self.next_inner() {
245 Ok(Some(point)) => Some(Ok(point)),
246 Ok(None) => {
247 self.finished = true;
248 None
249 }
250 Err(error) => {
251 self.finished = true;
252 Some(Err(error))
253 }
254 }
255 }
256
257 fn size_hint(&self) -> (usize, Option<usize>) {
258 if self.exact_size {
259 (
260 self.remaining_candidate_points,
261 Some(self.remaining_candidate_points),
262 )
263 } else {
264 (0, Some(self.remaining_candidate_points))
265 }
266 }
267}
268
269struct ChunkLazDecoder<'a, R: Read + Seek + Send> {
270 laz_vlr: LazVlr,
271 decompressor: LayeredPointRecordDecompressor<'a, &'a mut R>,
272 record_size: usize,
273}
274
275impl<'a, R: Read + Seek + Send> ChunkLazDecoder<'a, R> {
276 fn new(source: &'a mut R, laz_vlr: LazVlr) -> Result<Self> {
277 let mut decompressor = LayeredPointRecordDecompressor::new(source);
278 let record_size = configure_layered_decompressor(&mut decompressor, &laz_vlr)?;
279 Ok(Self {
280 laz_vlr,
281 decompressor,
282 record_size,
283 })
284 }
285
286 fn record_size(&self) -> usize {
287 self.record_size
288 }
289
290 fn seek_to_chunk(&mut self, offset: u64) -> Result<()> {
291 self.decompressor
292 .get_mut()
293 .seek(SeekFrom::Start(offset))
294 .map_err(|e| Error::io("seek COPC point chunk", e))?;
295 self.decompressor.reset();
296 self.record_size = configure_layered_decompressor(&mut self.decompressor, &self.laz_vlr)?;
297 Ok(())
298 }
299
300 fn decompress_one(&mut self, out: &mut [u8]) -> Result<()> {
301 self.decompressor
302 .decompress_next(out)
303 .map_err(|e| Error::io("decompress COPC point", e))
304 }
305}
306
307fn configure_layered_decompressor<R: Read + Seek>(
308 decompressor: &mut LayeredPointRecordDecompressor<'_, R>,
309 laz_vlr: &LazVlr,
310) -> Result<usize> {
311 decompressor
312 .set_fields_from(laz_vlr.items())
313 .map_err(|e| Error::Las(e.to_string()))?;
314 let record_size = decompressor.record_size();
315 if record_size == 0 {
316 return Err(Error::Unsupported(
317 "COPC point iteration requires layered LAZ point records".into(),
318 ));
319 }
320 Ok(record_size)
321}
322
323fn select_point_chunks(file: &CopcFile, query: PointQuery) -> Result<Vec<Entry>> {
324 let (level_min, level_max) = level_range(query.lod, file.copc_info())?;
325 let query_bounds = match query.bounds {
326 BoundsSelection::All => None,
327 BoundsSelection::Within(bounds) => Some(bounds),
328 };
329
330 let mut chunks = Vec::new();
331 for entry in file.hierarchy_entries() {
332 if !entry.has_point_data() {
333 continue;
334 }
335 if entry.byte_size <= 0 {
336 return Err(Error::InvalidData(format!(
337 "point chunk {:?} has invalid byte size {}",
338 entry.key, entry.byte_size
339 )));
340 }
341 if !(level_min..level_max).contains(&entry.key.level) {
342 continue;
343 }
344 if let Some(bounds) = query_bounds {
345 let node_bounds = voxel_bounds(entry.key, file.copc_info())?;
346 if !node_bounds.intersects(bounds) {
347 continue;
348 }
349 }
350 chunks.push(*entry);
351 }
352 chunks.sort_by_key(|entry| (entry.offset, entry.key));
353 Ok(chunks)
354}
355
356fn level_range(selection: LodSelection, info: &CopcInfo) -> Result<(i32, i32)> {
357 match selection {
358 LodSelection::All => Ok((0, i32::MAX)),
359 LodSelection::Resolution(resolution) => {
360 if !resolution.is_finite() || resolution <= 0.0 {
361 return Err(Error::InvalidInput(format!(
362 "resolution must be finite and positive, got {resolution}"
363 )));
364 }
365 if !info.spacing.is_finite() || info.spacing <= 0.0 {
366 return Err(Error::InvalidData(format!(
367 "COPC spacing must be finite and positive, got {}",
368 info.spacing
369 )));
370 }
371 let level_max = ((info.spacing / resolution).log2().ceil() as i64 + 1)
372 .max(1)
373 .min(i64::from(i32::MAX)) as i32;
374 Ok((0, level_max))
375 }
376 LodSelection::Level(level) => {
377 validate_level(level)?;
378 let max = level
379 .checked_add(1)
380 .ok_or_else(|| Error::InvalidInput(format!("LOD level {level} is too large")))?;
381 Ok((level, max))
382 }
383 LodSelection::LevelMinMax(min, max) => {
384 validate_level(min)?;
385 validate_level(max)?;
386 if max < min {
387 return Err(Error::InvalidInput(format!(
388 "LOD max {max} is smaller than min {min}"
389 )));
390 }
391 Ok((min, max))
392 }
393 }
394}
395
396fn validate_level(level: i32) -> Result<()> {
397 if level < 0 {
398 return Err(Error::InvalidInput(format!(
399 "LOD level must be non-negative, got {level}"
400 )));
401 }
402 Ok(())
403}
404
405fn total_candidate_points(entries: &[Entry]) -> Result<usize> {
406 entries.iter().try_fold(0usize, |total, entry| {
407 let count = usize::try_from(entry.point_count).map_err(|_| {
408 Error::InvalidData(format!(
409 "negative point count {} for {:?}",
410 entry.point_count, entry.key
411 ))
412 })?;
413 total
414 .checked_add(count)
415 .ok_or_else(|| Error::InvalidData("selected point count overflows usize".into()))
416 })
417}
418
419fn voxel_bounds(key: copc_core::VoxelKey, info: &CopcInfo) -> Result<Bounds> {
420 if key.level < 0 || key.x < 0 || key.y < 0 || key.z < 0 {
421 return Err(Error::InvalidData(format!(
422 "invalid negative voxel key {:?}",
423 key
424 )));
425 }
426 let side = (info.halfsize * 2.0) / 2.0_f64.powi(key.level);
427 let root_min = (
428 info.center.0 - info.halfsize,
429 info.center.1 - info.halfsize,
430 info.center.2 - info.halfsize,
431 );
432 let min = (
433 root_min.0 + f64::from(key.x) * side,
434 root_min.1 + f64::from(key.y) * side,
435 root_min.2 + f64::from(key.z) * side,
436 );
437 Ok(Bounds::new(min, (min.0 + side, min.1 + side, min.2 + side)))
438}