Skip to main content

copc_reader/
lib.rs

1//! Pure-Rust COPC reader.
2//!
3//! Parses LAS/COPC metadata and exposes chunked-LAZ point iteration over COPC
4//! hierarchy entries.
5
6#![forbid(unsafe_code)]
7
8mod points;
9
10use std::collections::{BTreeMap, HashSet};
11use std::fs::File;
12use std::io::{Read, Seek, SeekFrom};
13use std::path::Path;
14
15use byteorder::{LittleEndian, ReadBytesExt};
16use copc_core::{CopcInfo, Entry, Error, HierarchyPage, Result, VoxelKey};
17use las::{Transform, Vector};
18use laz::LazVlr;
19
20pub use points::{BoundsSelection, CopcReader, LodSelection, PointIter, PointQuery};
21
22const LAS_HEADER_SIZE_14: u16 = 375;
23/// A parsed COPC file.
24#[derive(Debug, Clone)]
25pub struct CopcFile {
26    header: LasHeader,
27    copc_info: CopcInfo,
28    laszip_vlr: LazVlr,
29    root_hierarchy: HierarchyPage,
30    hierarchy: BTreeMap<VoxelKey, Entry>,
31}
32
33/// Minimal LAS header fields needed by COPC callers.
34#[derive(Debug, Clone, Copy, PartialEq)]
35pub struct LasHeader {
36    pub point_data_record_format: u8,
37    pub point_data_record_length: u16,
38    pub offset_to_point_data: u32,
39    pub number_of_vlrs: u32,
40    pub x_scale_factor: f64,
41    pub y_scale_factor: f64,
42    pub z_scale_factor: f64,
43    pub x_offset: f64,
44    pub y_offset: f64,
45    pub z_offset: f64,
46    pub min_x: f64,
47    pub max_x: f64,
48    pub min_y: f64,
49    pub max_y: f64,
50    pub min_z: f64,
51    pub max_z: f64,
52    pub offset_to_first_evlr: u64,
53    pub number_of_evlrs: u32,
54    pub number_of_points: u64,
55}
56
57#[derive(Debug, Clone)]
58struct Vlr {
59    user_id: String,
60    record_id: u16,
61    data: Vec<u8>,
62}
63
64#[derive(Debug, Clone, Copy)]
65struct EvlrRef {
66    user_id: [u8; 16],
67    record_id: u16,
68    data_offset: u64,
69}
70
71impl CopcFile {
72    pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
73        let mut file = File::open(path.as_ref()).map_err(|e| Error::io("open COPC file", e))?;
74        Self::from_reader(&mut file)
75    }
76
77    pub fn from_reader<R: Read + Seek>(reader: &mut R) -> Result<Self> {
78        let header = read_las_header(reader)?;
79        let vlrs = read_vlrs(reader, header.number_of_vlrs)?;
80        let copc_info_vlr = vlrs
81            .iter()
82            .find(|vlr| vlr.user_id == "copc" && vlr.record_id == 1)
83            .ok_or_else(|| Error::InvalidData("missing COPC info VLR".into()))?;
84        let copc_info = CopcInfo::from_le_bytes(&copc_info_vlr.data)?;
85        let laszip_vlr = vlrs
86            .iter()
87            .find(|vlr| vlr.user_id == "laszip encoded" && vlr.record_id == 22204)
88            .map(|vlr| {
89                LazVlr::read_from(vlr.data.as_slice()).map_err(|e| Error::Las(e.to_string()))
90            })
91            .transpose()?
92            .ok_or_else(|| Error::InvalidData("missing LASzip VLR".into()))?;
93        let evlrs = read_evlr_refs(reader, &header)?;
94        let root_evlr = evlrs
95            .iter()
96            .find(|evlr| trim_nul(&evlr.user_id) == "copc" && evlr.record_id == 1000)
97            .copied()
98            .ok_or_else(|| Error::InvalidData("missing COPC hierarchy EVLR".into()))?;
99        if copc_info.root_hier_offset != root_evlr.data_offset {
100            return Err(Error::InvalidData(format!(
101                "COPC root hierarchy offset {} does not match EVLR data offset {}",
102                copc_info.root_hier_offset, root_evlr.data_offset
103            )));
104        }
105        let root_hierarchy =
106            read_hierarchy_page_at(reader, copc_info.root_hier_offset, copc_info.root_hier_size)?;
107        let mut hierarchy = BTreeMap::new();
108        let mut visited_pages = HashSet::new();
109        visited_pages.insert((copc_info.root_hier_offset, copc_info.root_hier_size));
110        insert_hierarchy_page(reader, &root_hierarchy, &mut hierarchy, &mut visited_pages)?;
111        Ok(Self {
112            header,
113            copc_info,
114            laszip_vlr,
115            root_hierarchy,
116            hierarchy,
117        })
118    }
119
120    pub fn header(&self) -> &LasHeader {
121        &self.header
122    }
123
124    pub fn copc_info(&self) -> &CopcInfo {
125        &self.copc_info
126    }
127
128    pub fn root_hierarchy(&self) -> &HierarchyPage {
129        &self.root_hierarchy
130    }
131
132    /// Return all parsed hierarchy entries, including recursively loaded child pages.
133    pub fn hierarchy_walk(&self) -> Vec<Entry> {
134        self.hierarchy.values().copied().collect()
135    }
136
137    /// Return the full hierarchy index keyed by COPC voxel key.
138    pub fn hierarchy(&self) -> &BTreeMap<VoxelKey, Entry> {
139        &self.hierarchy
140    }
141
142    pub fn hierarchy_entries(&self) -> impl Iterator<Item = &Entry> {
143        self.hierarchy.values()
144    }
145
146    pub(crate) fn laszip_vlr(&self) -> &LazVlr {
147        &self.laszip_vlr
148    }
149
150    pub(crate) fn point_format(&self) -> Result<las::point::Format> {
151        let format_id = self.header.point_data_record_format & 0x7F;
152        let mut format =
153            las::point::Format::new(format_id).map_err(|e| Error::Las(e.to_string()))?;
154        let base_len = format.len();
155        if self.header.point_data_record_length < base_len {
156            return Err(Error::InvalidData(format!(
157                "point record length {} is smaller than point format {} base length {}",
158                self.header.point_data_record_length, format_id, base_len
159            )));
160        }
161        format.extra_bytes = self.header.point_data_record_length - base_len;
162        Ok(format)
163    }
164
165    pub(crate) fn transforms(&self) -> Vector<Transform> {
166        Vector {
167            x: Transform {
168                scale: self.header.x_scale_factor,
169                offset: self.header.x_offset,
170            },
171            y: Transform {
172                scale: self.header.y_scale_factor,
173                offset: self.header.y_offset,
174            },
175            z: Transform {
176                scale: self.header.z_scale_factor,
177                offset: self.header.z_offset,
178            },
179        }
180    }
181}
182
183impl LasHeader {
184    pub fn number_of_points(&self) -> u64 {
185        self.number_of_points
186    }
187}
188
189fn read_hierarchy_page_at<R: Read + Seek>(
190    reader: &mut R,
191    offset: u64,
192    byte_size: u64,
193) -> Result<HierarchyPage> {
194    let hierarchy_len = usize::try_from(byte_size)
195        .map_err(|_| Error::InvalidData("hierarchy page is too large".into()))?;
196    let mut hierarchy_bytes = vec![0u8; hierarchy_len];
197    reader
198        .seek(SeekFrom::Start(offset))
199        .map_err(|e| Error::io("seek hierarchy page", e))?;
200    reader
201        .read_exact(&mut hierarchy_bytes)
202        .map_err(|e| Error::io("read hierarchy page", e))?;
203    HierarchyPage::from_le_bytes(&hierarchy_bytes)
204}
205
206fn insert_hierarchy_page<R: Read + Seek>(
207    reader: &mut R,
208    page: &HierarchyPage,
209    hierarchy: &mut BTreeMap<VoxelKey, Entry>,
210    visited_pages: &mut HashSet<(u64, u64)>,
211) -> Result<()> {
212    for entry in page.entries().iter().copied() {
213        hierarchy.insert(entry.key, entry);
214    }
215    for entry in page.entries().iter().copied().filter(|e| e.is_child_page()) {
216        if entry.byte_size <= 0 {
217            return Err(Error::InvalidData(format!(
218                "child hierarchy page {:?} has invalid byte size {}",
219                entry.key, entry.byte_size
220            )));
221        }
222        let byte_size = u64::try_from(entry.byte_size).map_err(|_| {
223            Error::InvalidData(format!(
224                "child hierarchy page {:?} has negative byte size {}",
225                entry.key, entry.byte_size
226            ))
227        })?;
228        if visited_pages.insert((entry.offset, byte_size)) {
229            let child_page = read_hierarchy_page_at(reader, entry.offset, byte_size)?;
230            insert_hierarchy_page(reader, &child_page, hierarchy, visited_pages)?;
231        }
232    }
233    Ok(())
234}
235
236fn read_las_header<R: Read + Seek>(reader: &mut R) -> Result<LasHeader> {
237    reader
238        .seek(SeekFrom::Start(0))
239        .map_err(|e| Error::io("seek LAS header", e))?;
240    let mut signature = [0u8; 4];
241    reader
242        .read_exact(&mut signature)
243        .map_err(|e| Error::io("read LAS signature", e))?;
244    if &signature != b"LASF" {
245        return Err(Error::InvalidData("missing LASF signature".into()));
246    }
247    reader
248        .seek(SeekFrom::Start(94))
249        .map_err(|e| Error::io("seek LAS header size", e))?;
250    let header_size = reader
251        .read_u16::<LittleEndian>()
252        .map_err(|e| Error::io("read LAS header size", e))?;
253    if header_size < LAS_HEADER_SIZE_14 {
254        return Err(Error::Unsupported(format!(
255            "LAS header is {header_size} bytes; COPC requires LAS 1.4"
256        )));
257    }
258    let offset_to_point_data = reader
259        .read_u32::<LittleEndian>()
260        .map_err(|e| Error::io("read point data offset", e))?;
261    let number_of_vlrs = reader
262        .read_u32::<LittleEndian>()
263        .map_err(|e| Error::io("read VLR count", e))?;
264    let point_data_record_format = reader
265        .read_u8()
266        .map_err(|e| Error::io("read point record format", e))?;
267    let point_data_record_length = reader
268        .read_u16::<LittleEndian>()
269        .map_err(|e| Error::io("read point record length", e))?;
270    reader
271        .seek(SeekFrom::Start(131))
272        .map_err(|e| Error::io("seek LAS transforms", e))?;
273    let x_scale_factor = reader
274        .read_f64::<LittleEndian>()
275        .map_err(|e| Error::io("read x scale factor", e))?;
276    let y_scale_factor = reader
277        .read_f64::<LittleEndian>()
278        .map_err(|e| Error::io("read y scale factor", e))?;
279    let z_scale_factor = reader
280        .read_f64::<LittleEndian>()
281        .map_err(|e| Error::io("read z scale factor", e))?;
282    let x_offset = reader
283        .read_f64::<LittleEndian>()
284        .map_err(|e| Error::io("read x offset", e))?;
285    let y_offset = reader
286        .read_f64::<LittleEndian>()
287        .map_err(|e| Error::io("read y offset", e))?;
288    let z_offset = reader
289        .read_f64::<LittleEndian>()
290        .map_err(|e| Error::io("read z offset", e))?;
291    let max_x = reader
292        .read_f64::<LittleEndian>()
293        .map_err(|e| Error::io("read max x", e))?;
294    let min_x = reader
295        .read_f64::<LittleEndian>()
296        .map_err(|e| Error::io("read min x", e))?;
297    let max_y = reader
298        .read_f64::<LittleEndian>()
299        .map_err(|e| Error::io("read max y", e))?;
300    let min_y = reader
301        .read_f64::<LittleEndian>()
302        .map_err(|e| Error::io("read min y", e))?;
303    let max_z = reader
304        .read_f64::<LittleEndian>()
305        .map_err(|e| Error::io("read max z", e))?;
306    let min_z = reader
307        .read_f64::<LittleEndian>()
308        .map_err(|e| Error::io("read min z", e))?;
309    reader
310        .seek(SeekFrom::Start(235))
311        .map_err(|e| Error::io("seek LAS 1.4 fields", e))?;
312    let offset_to_first_evlr = reader
313        .read_u64::<LittleEndian>()
314        .map_err(|e| Error::io("read first EVLR offset", e))?;
315    let number_of_evlrs = reader
316        .read_u32::<LittleEndian>()
317        .map_err(|e| Error::io("read EVLR count", e))?;
318    let number_of_points = reader
319        .read_u64::<LittleEndian>()
320        .map_err(|e| Error::io("read point count", e))?;
321    reader
322        .seek(SeekFrom::Start(u64::from(header_size)))
323        .map_err(|e| Error::io("seek after LAS header", e))?;
324    Ok(LasHeader {
325        point_data_record_format,
326        point_data_record_length,
327        offset_to_point_data,
328        number_of_vlrs,
329        x_scale_factor,
330        y_scale_factor,
331        z_scale_factor,
332        x_offset,
333        y_offset,
334        z_offset,
335        min_x,
336        max_x,
337        min_y,
338        max_y,
339        min_z,
340        max_z,
341        offset_to_first_evlr,
342        number_of_evlrs,
343        number_of_points,
344    })
345}
346
347fn read_vlrs<R: Read>(reader: &mut R, count: u32) -> Result<Vec<Vlr>> {
348    let mut vlrs = Vec::with_capacity(count as usize);
349    for _ in 0..count {
350        let _reserved = reader
351            .read_u16::<LittleEndian>()
352            .map_err(|e| Error::io("read VLR reserved", e))?;
353        let mut user_id = [0u8; 16];
354        reader
355            .read_exact(&mut user_id)
356            .map_err(|e| Error::io("read VLR user id", e))?;
357        let record_id = reader
358            .read_u16::<LittleEndian>()
359            .map_err(|e| Error::io("read VLR record id", e))?;
360        let record_length = reader
361            .read_u16::<LittleEndian>()
362            .map_err(|e| Error::io("read VLR length", e))?;
363        let mut description = [0u8; 32];
364        reader
365            .read_exact(&mut description)
366            .map_err(|e| Error::io("read VLR description", e))?;
367        let mut data = vec![0u8; usize::from(record_length)];
368        reader
369            .read_exact(&mut data)
370            .map_err(|e| Error::io("read VLR data", e))?;
371        vlrs.push(Vlr {
372            user_id: trim_nul(&user_id).to_string(),
373            record_id,
374            data,
375        });
376    }
377    Ok(vlrs)
378}
379
380fn read_evlr_refs<R: Read + Seek>(reader: &mut R, header: &LasHeader) -> Result<Vec<EvlrRef>> {
381    if header.offset_to_first_evlr == 0 || header.number_of_evlrs == 0 {
382        return Ok(Vec::new());
383    }
384    reader
385        .seek(SeekFrom::Start(header.offset_to_first_evlr))
386        .map_err(|e| Error::io("seek EVLRs", e))?;
387    let mut evlrs = Vec::with_capacity(header.number_of_evlrs as usize);
388    for _ in 0..header.number_of_evlrs {
389        let _header_start = reader
390            .stream_position()
391            .map_err(|e| Error::io("record EVLR offset", e))?;
392        let _reserved = reader
393            .read_u16::<LittleEndian>()
394            .map_err(|e| Error::io("read EVLR reserved", e))?;
395        let mut user_id = [0u8; 16];
396        reader
397            .read_exact(&mut user_id)
398            .map_err(|e| Error::io("read EVLR user id", e))?;
399        let record_id = reader
400            .read_u16::<LittleEndian>()
401            .map_err(|e| Error::io("read EVLR record id", e))?;
402        let data_len = reader
403            .read_u64::<LittleEndian>()
404            .map_err(|e| Error::io("read EVLR length", e))?;
405        let mut description = [0u8; 32];
406        reader
407            .read_exact(&mut description)
408            .map_err(|e| Error::io("read EVLR description", e))?;
409        let data_offset = reader
410            .stream_position()
411            .map_err(|e| Error::io("record EVLR data offset", e))?;
412        evlrs.push(EvlrRef {
413            user_id,
414            record_id,
415            data_offset,
416        });
417        reader
418            .seek(SeekFrom::Current(i64::try_from(data_len).map_err(
419                |_| Error::InvalidData("EVLR length exceeds seek range".into()),
420            )?))
421            .map_err(|e| Error::io("skip EVLR data", e))?;
422        let expected_next = data_offset + data_len;
423        let actual_next = reader
424            .stream_position()
425            .map_err(|e| Error::io("record next EVLR offset", e))?;
426        if actual_next != expected_next {
427            return Err(Error::InvalidData(format!(
428                "EVLR cursor at {actual_next}, expected {expected_next}"
429            )));
430        }
431    }
432    Ok(evlrs)
433}
434
435fn trim_nul(bytes: &[u8]) -> &str {
436    let end = bytes.iter().position(|b| *b == 0).unwrap_or(bytes.len());
437    std::str::from_utf8(&bytes[..end]).unwrap_or("")
438}
439
440#[cfg(test)]
441mod tests {
442    use super::*;
443
444    use byteorder::{LittleEndian, WriteBytesExt};
445    use copc_core::{EntryAvailability, HIERARCHY_ENTRY_BYTES};
446    use laz::LazVlrBuilder;
447    use std::io::{Cursor, Write};
448
449    #[test]
450    fn hierarchy_walk_loads_recursive_child_pages() {
451        let mut fixture = Cursor::new(copc_with_child_hierarchy_page());
452        let file = CopcFile::from_reader(&mut fixture).unwrap();
453        let child_key = VoxelKey::root().child(3);
454        let grandchild_key = child_key.child(5);
455
456        assert_eq!(file.root_hierarchy().entries().len(), 2);
457        assert!(file.root_hierarchy().entries()[1].is_child_page());
458
459        let hierarchy = file.hierarchy();
460        assert_eq!(hierarchy.len(), 3);
461        assert_eq!(
462            hierarchy
463                .get(&VoxelKey::root())
464                .unwrap()
465                .availability()
466                .unwrap(),
467            EntryAvailability::PointData { point_count: 5 }
468        );
469        assert_eq!(
470            hierarchy.get(&child_key).unwrap().availability().unwrap(),
471            EntryAvailability::PointData { point_count: 4 }
472        );
473        assert_eq!(
474            hierarchy
475                .get(&grandchild_key)
476                .unwrap()
477                .availability()
478                .unwrap(),
479            EntryAvailability::PointData { point_count: 3 }
480        );
481        assert!(!hierarchy.values().any(|entry| entry.is_child_page()));
482
483        let walk = file.hierarchy_walk();
484        assert_eq!(walk.len(), hierarchy.len());
485        assert_eq!(walk.iter().map(|entry| entry.point_count).sum::<i32>(), 12);
486    }
487
488    fn copc_with_child_hierarchy_page() -> Vec<u8> {
489        let mut laz_vlr_bytes = Vec::new();
490        LazVlrBuilder::default()
491            .with_point_format(6, 0)
492            .unwrap()
493            .with_variable_chunk_size()
494            .build()
495            .write_to(&mut laz_vlr_bytes)
496            .unwrap();
497
498        let offset_to_point_data = u32::from(LAS_HEADER_SIZE_14)
499            + (54 + copc_core::info::COPC_INFO_BYTES as u32)
500            + (54 + laz_vlr_bytes.len() as u32);
501        let evlr_start = u64::from(offset_to_point_data);
502        let root_hier_offset = evlr_start + 60;
503        let root_hier_size = (2 * HIERARCHY_ENTRY_BYTES) as u64;
504        let child_page_offset = root_hier_offset + root_hier_size;
505
506        let child_key = VoxelKey::root().child(3);
507        let grandchild_key = child_key.child(5);
508        let child_page = HierarchyPage::new(vec![
509            Entry {
510                key: child_key,
511                offset: 2_000,
512                byte_size: 200,
513                point_count: 4,
514            },
515            Entry {
516                key: grandchild_key,
517                offset: 2_200,
518                byte_size: 220,
519                point_count: 3,
520            },
521        ]);
522        let child_page_bytes = child_page.write_le_bytes().unwrap();
523        let root_page = HierarchyPage::new(vec![
524            Entry {
525                key: VoxelKey::root(),
526                offset: 1_000,
527                byte_size: 100,
528                point_count: 5,
529            },
530            Entry {
531                key: child_key,
532                offset: child_page_offset,
533                byte_size: child_page_bytes.len() as i32,
534                point_count: -1,
535            },
536        ]);
537        let root_page_bytes = root_page.write_le_bytes().unwrap();
538
539        let info = CopcInfo {
540            center: (0.0, 0.0, 0.0),
541            halfsize: 10.0,
542            spacing: 1.0,
543            root_hier_offset,
544            root_hier_size,
545            gpstime_min: 0.0,
546            gpstime_max: 0.0,
547        };
548
549        let mut out = Vec::new();
550        write_las_header(&mut out, offset_to_point_data, evlr_start, 12);
551        write_vlr(&mut out, "copc", 1, &info.write_le_bytes(), "COPC info");
552        write_vlr(
553            &mut out,
554            "laszip encoded",
555            22204,
556            &laz_vlr_bytes,
557            "http://laszip.org",
558        );
559        assert_eq!(out.len(), offset_to_point_data as usize);
560
561        write_evlr_header(
562            &mut out,
563            "copc",
564            1000,
565            root_page_bytes.len() as u64,
566            "COPC hierarchy",
567        );
568        assert_eq!(out.len() as u64, root_hier_offset);
569        out.extend_from_slice(&root_page_bytes);
570        assert_eq!(out.len() as u64, child_page_offset);
571        out.extend_from_slice(&child_page_bytes);
572        out
573    }
574
575    fn write_las_header(
576        out: &mut Vec<u8>,
577        offset_to_point_data: u32,
578        evlr_start: u64,
579        point_count: u64,
580    ) {
581        out.resize(usize::from(LAS_HEADER_SIZE_14), 0);
582        out[0..4].copy_from_slice(b"LASF");
583        out[24] = 1;
584        out[25] = 4;
585        put_u16(out, 94, LAS_HEADER_SIZE_14);
586        put_u32(out, 96, offset_to_point_data);
587        put_u32(out, 100, 2);
588        out[104] = 6 | 0x80;
589        put_u16(out, 105, 30);
590        put_f64(out, 131, 0.001);
591        put_f64(out, 139, 0.001);
592        put_f64(out, 147, 0.001);
593        put_f64(out, 155, 0.0);
594        put_f64(out, 163, 0.0);
595        put_f64(out, 171, 0.0);
596        put_f64(out, 179, 10.0);
597        put_f64(out, 187, -10.0);
598        put_f64(out, 195, 10.0);
599        put_f64(out, 203, -10.0);
600        put_f64(out, 211, 10.0);
601        put_f64(out, 219, -10.0);
602        put_u64(out, 235, evlr_start);
603        put_u32(out, 243, 1);
604        put_u64(out, 247, point_count);
605    }
606
607    fn write_vlr(out: &mut Vec<u8>, user_id: &str, record_id: u16, data: &[u8], desc: &str) {
608        out.write_u16::<LittleEndian>(0).unwrap();
609        out.write_all(&padded(user_id.as_bytes(), 16)).unwrap();
610        out.write_u16::<LittleEndian>(record_id).unwrap();
611        out.write_u16::<LittleEndian>(data.len() as u16).unwrap();
612        out.write_all(&padded(desc.as_bytes(), 32)).unwrap();
613        out.write_all(data).unwrap();
614    }
615
616    fn write_evlr_header(
617        out: &mut Vec<u8>,
618        user_id: &str,
619        record_id: u16,
620        data_len: u64,
621        desc: &str,
622    ) {
623        out.write_u16::<LittleEndian>(0).unwrap();
624        out.write_all(&padded(user_id.as_bytes(), 16)).unwrap();
625        out.write_u16::<LittleEndian>(record_id).unwrap();
626        out.write_u64::<LittleEndian>(data_len).unwrap();
627        out.write_all(&padded(desc.as_bytes(), 32)).unwrap();
628    }
629
630    fn padded(bytes: &[u8], len: usize) -> Vec<u8> {
631        let mut out = vec![0u8; len];
632        let count = bytes.len().min(len);
633        out[..count].copy_from_slice(&bytes[..count]);
634        out
635    }
636
637    fn put_u16(out: &mut [u8], offset: usize, value: u16) {
638        out[offset..offset + 2].copy_from_slice(&value.to_le_bytes());
639    }
640
641    fn put_u32(out: &mut [u8], offset: usize, value: u32) {
642        out[offset..offset + 4].copy_from_slice(&value.to_le_bytes());
643    }
644
645    fn put_u64(out: &mut [u8], offset: usize, value: u64) {
646        out[offset..offset + 8].copy_from_slice(&value.to_le_bytes());
647    }
648
649    fn put_f64(out: &mut [u8], offset: usize, value: f64) {
650        out[offset..offset + 8].copy_from_slice(&value.to_le_bytes());
651    }
652}