1use std::collections::{HashMap, HashSet};
2use std::io;
3use std::path::Path;
4
5#[derive(Debug, PartialEq)]
9pub enum PdfReadError {
10 NotAPdf,
12 StartxrefNotFound,
14 MalformedXref,
16 MalformedTrailer,
18 XrefStreamNotSupported,
20 UnresolvableObject(u32),
22 MalformedPageTree,
24 Io(String),
26}
27
28impl std::fmt::Display for PdfReadError {
29 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
30 match self {
31 PdfReadError::NotAPdf => write!(f, "not a PDF file"),
32 PdfReadError::StartxrefNotFound => write!(f, "startxref not found"),
33 PdfReadError::MalformedXref => write!(f, "malformed or missing xref table"),
34 PdfReadError::MalformedTrailer => write!(f, "malformed or missing trailer"),
35 PdfReadError::XrefStreamNotSupported => {
36 write!(
37 f,
38 "cross-reference streams (PDF 1.5+) are not yet supported"
39 )
40 }
41 PdfReadError::UnresolvableObject(n) => write!(f, "cannot resolve object {}", n),
42 PdfReadError::MalformedPageTree => write!(f, "malformed page tree"),
43 PdfReadError::Io(msg) => write!(f, "I/O error: {}", msg),
44 }
45 }
46}
47
48impl std::error::Error for PdfReadError {}
49
50impl From<io::Error> for PdfReadError {
51 fn from(e: io::Error) -> Self {
52 PdfReadError::Io(e.to_string())
53 }
54}
55
56pub struct PdfReader {
69 data: Vec<u8>,
71 xref: HashMap<u32, usize>,
73 version: String,
74 page_count: usize,
75 catalog_num: u32,
77}
78
79impl PdfReader {
80 pub fn open<P: AsRef<Path>>(path: P) -> Result<Self, PdfReadError> {
82 let data = std::fs::read(path.as_ref())?;
83 Self::from_bytes(data)
84 }
85
86 pub fn from_bytes(data: Vec<u8>) -> Result<Self, PdfReadError> {
88 let version = parse_version(&data)?;
89 let xref_offset = find_startxref(&data)?;
90 let (xref, root_ref) = parse_xref_and_trailer(&data, xref_offset)?;
91 let page_count = resolve_page_count(&data, &xref, root_ref)?;
92
93 Ok(PdfReader {
94 data,
95 xref,
96 version,
97 page_count,
98 catalog_num: root_ref,
99 })
100 }
101
102 pub fn page_count(&self) -> usize {
104 self.page_count
105 }
106
107 pub fn pdf_version(&self) -> &str {
109 &self.version
110 }
111
112 #[allow(dead_code)]
115 pub(crate) fn page_object_numbers(&self) -> Result<Vec<u32>, PdfReadError> {
120 let catalog_dict = resolve_dict(&self.data, &self.xref, self.catalog_num)?;
121 let pages_ref = catalog_dict
122 .get("Pages")
123 .ok_or(PdfReadError::MalformedPageTree)?;
124 let pages_num: u32 = pages_ref
125 .parse()
126 .map_err(|_| PdfReadError::MalformedPageTree)?;
127
128 let mut result = Vec::new();
129 walk_page_tree(&self.data, &self.xref, pages_num, &mut result)?;
130 Ok(result)
131 }
132
133 #[allow(dead_code)]
140 pub(crate) fn collect_closure(&self, roots: &[u32]) -> Result<HashSet<u32>, PdfReadError> {
141 let mut visited: HashSet<u32> = HashSet::new();
142 let mut queue: Vec<u32> = roots.to_vec();
143
144 while let Some(obj_num) = queue.pop() {
145 if !visited.insert(obj_num) {
146 continue; }
148
149 if let Ok(bytes) = self.raw_object_bytes(obj_num) {
150 for r in extract_indirect_refs(bytes) {
151 if !visited.contains(&r) && self.xref.contains_key(&r) {
152 queue.push(r);
153 }
154 }
155 }
156 }
157
158 Ok(visited)
159 }
160
161 #[allow(dead_code)]
162 pub(crate) fn raw_object_bytes(&self, obj_num: u32) -> Result<&[u8], PdfReadError> {
171 let offset = self
172 .xref
173 .get(&obj_num)
174 .copied()
175 .ok_or(PdfReadError::UnresolvableObject(obj_num))?;
176
177 if offset >= self.data.len() {
178 return Err(PdfReadError::UnresolvableObject(obj_num));
179 }
180
181 let slice = &self.data[offset..];
182 let endobj_pos = slice
183 .windows(6)
184 .position(|w| w == b"endobj")
185 .ok_or(PdfReadError::UnresolvableObject(obj_num))?;
186
187 Ok(&slice[..endobj_pos + 6])
188 }
189}
190
191fn parse_version(data: &[u8]) -> Result<String, PdfReadError> {
195 if data.len() < 8 || !data.starts_with(b"%PDF-") {
196 return Err(PdfReadError::NotAPdf);
197 }
198 let rest = &data[5..];
200 let end = rest
201 .iter()
202 .position(|&b| b == b'\n' || b == b'\r' || b == b' ')
203 .unwrap_or(rest.len());
204 let version = std::str::from_utf8(&rest[..end])
205 .map(|s| s.to_string())
206 .map_err(|_| PdfReadError::NotAPdf)?;
207 Ok(version)
208}
209
210fn find_startxref(data: &[u8]) -> Result<usize, PdfReadError> {
215 let search_start = data.len().saturating_sub(1024);
216 let tail = &data[search_start..];
217
218 let keyword = b"startxref";
220 let pos = tail
221 .windows(keyword.len())
222 .rposition(|w| w == keyword)
223 .ok_or(PdfReadError::StartxrefNotFound)?;
224
225 let after = &tail[pos + keyword.len()..];
227 let offset_str = skip_whitespace_to_token(after).ok_or(PdfReadError::StartxrefNotFound)?;
228 let offset: usize = offset_str
229 .parse()
230 .map_err(|_| PdfReadError::StartxrefNotFound)?;
231
232 if offset >= data.len() {
233 return Err(PdfReadError::StartxrefNotFound);
234 }
235
236 Ok(offset)
237}
238
239fn parse_xref_and_trailer(
243 data: &[u8],
244 xref_offset: usize,
245) -> Result<(HashMap<u32, usize>, u32), PdfReadError> {
246 if xref_offset >= data.len() {
247 return Err(PdfReadError::MalformedXref);
248 }
249
250 let section = &data[xref_offset..];
251
252 let trimmed = skip_ascii_whitespace(section);
254 if !trimmed.starts_with(b"xref") {
255 return Err(PdfReadError::XrefStreamNotSupported);
256 }
257
258 let xref = parse_xref_table(section)?;
259 let root = parse_trailer_root(data, xref_offset)?;
260
261 Ok((xref, root))
262}
263
264fn parse_xref_table(section: &[u8]) -> Result<HashMap<u32, usize>, PdfReadError> {
269 let mut map = HashMap::new();
270
271 let rest = skip_ascii_whitespace(consume_token(section, b"xref")?);
273
274 let mut cursor = rest;
275 loop {
276 let trimmed = skip_ascii_whitespace(cursor);
277 if trimmed.is_empty() || trimmed.starts_with(b"trailer") {
279 break;
280 }
281
282 let (first_obj_str, after_first) =
284 next_token(trimmed).ok_or(PdfReadError::MalformedXref)?;
285 let first_obj: u32 = first_obj_str
286 .parse()
287 .map_err(|_| PdfReadError::MalformedXref)?;
288
289 let after_first = skip_ascii_whitespace(after_first);
290 let (count_str, after_count) =
291 next_token(after_first).ok_or(PdfReadError::MalformedXref)?;
292 let count: usize = count_str.parse().map_err(|_| PdfReadError::MalformedXref)?;
293
294 let entries_start = skip_line(after_count);
296 let entry_size = 20;
297 let entries_bytes = entries_start.len();
298
299 if entries_bytes < count * entry_size {
300 return Err(PdfReadError::MalformedXref);
301 }
302
303 for i in 0..count {
304 let entry = &entries_start[i * entry_size..(i + 1) * entry_size];
305 let offset_bytes = &entry[..10];
307 let status = entry[17];
309
310 if status == b'n' {
311 let offset_str =
312 std::str::from_utf8(offset_bytes).map_err(|_| PdfReadError::MalformedXref)?;
313 let offset: usize = offset_str
314 .parse()
315 .map_err(|_| PdfReadError::MalformedXref)?;
316 let obj_num = first_obj + i as u32;
317 if obj_num > 0 {
318 map.insert(obj_num, offset);
319 }
320 }
321 }
322
323 cursor = &entries_start[count * entry_size..];
324 }
325
326 Ok(map)
327}
328
329fn parse_trailer_root(data: &[u8], xref_offset: usize) -> Result<u32, PdfReadError> {
331 let section = &data[xref_offset..];
333 let pos = section
334 .windows(7)
335 .position(|w| w == b"trailer")
336 .ok_or(PdfReadError::MalformedTrailer)?;
337
338 let after_trailer = skip_ascii_whitespace(§ion[pos + 7..]);
339
340 let dict = parse_dict_bytes(after_trailer).ok_or(PdfReadError::MalformedTrailer)?;
342
343 let root_ref = dict.get("Root").ok_or(PdfReadError::MalformedTrailer)?;
344 let obj_num: u32 = root_ref
346 .parse()
347 .map_err(|_| PdfReadError::MalformedTrailer)?;
348 Ok(obj_num)
349}
350
351fn resolve_page_count(
353 data: &[u8],
354 xref: &HashMap<u32, usize>,
355 catalog_obj_num: u32,
356) -> Result<usize, PdfReadError> {
357 let catalog_dict = resolve_dict(data, xref, catalog_obj_num)?;
359
360 let pages_ref = catalog_dict
361 .get("Pages")
362 .ok_or(PdfReadError::MalformedPageTree)?;
363 let pages_obj_num: u32 = pages_ref
364 .parse()
365 .map_err(|_| PdfReadError::MalformedPageTree)?;
366
367 let pages_dict = resolve_dict(data, xref, pages_obj_num)?;
369
370 let count_str = pages_dict
371 .get("Count")
372 .ok_or(PdfReadError::MalformedPageTree)?;
373 let count: usize = count_str
374 .parse()
375 .map_err(|_| PdfReadError::MalformedPageTree)?;
376
377 Ok(count)
378}
379
380fn resolve_dict(
383 data: &[u8],
384 xref: &HashMap<u32, usize>,
385 obj_num: u32,
386) -> Result<HashMap<String, String>, PdfReadError> {
387 let offset = xref
388 .get(&obj_num)
389 .copied()
390 .ok_or(PdfReadError::UnresolvableObject(obj_num))?;
391
392 if offset >= data.len() {
393 return Err(PdfReadError::UnresolvableObject(obj_num));
394 }
395
396 let slice = &data[offset..];
397
398 let after_header = skip_obj_header(slice).ok_or(PdfReadError::UnresolvableObject(obj_num))?;
400 let after_ws = skip_ascii_whitespace(after_header);
401
402 parse_dict_bytes(after_ws).ok_or(PdfReadError::UnresolvableObject(obj_num))
403}
404
405#[allow(dead_code)]
408fn walk_page_tree(
410 data: &[u8],
411 xref: &HashMap<u32, usize>,
412 node: u32,
413 result: &mut Vec<u32>,
414) -> Result<(), PdfReadError> {
415 let dict = resolve_dict(data, xref, node)?;
416 match dict.get("Type").map(String::as_str) {
417 Some("/Page") => {
418 result.push(node);
419 }
420 Some("/Pages") | None => {
421 let kids = resolve_kids(data, xref, node)?;
422 for kid in kids {
423 walk_page_tree(data, xref, kid, result)?;
424 }
425 }
426 _ => {}
427 }
428 Ok(())
429}
430
431#[allow(dead_code)]
432fn resolve_kids(
434 data: &[u8],
435 xref: &HashMap<u32, usize>,
436 obj_num: u32,
437) -> Result<Vec<u32>, PdfReadError> {
438 let offset = xref
439 .get(&obj_num)
440 .copied()
441 .ok_or(PdfReadError::UnresolvableObject(obj_num))?;
442
443 if offset >= data.len() {
444 return Err(PdfReadError::UnresolvableObject(obj_num));
445 }
446
447 let slice = &data[offset..];
450 let obj_end = slice
451 .windows(6)
452 .position(|w| w == b"endobj")
453 .map(|p| p + 6)
454 .unwrap_or(slice.len());
455 let slice = &slice[..obj_end];
456
457 let after_header = skip_obj_header(slice).ok_or(PdfReadError::UnresolvableObject(obj_num))?;
458 let after_ws = skip_ascii_whitespace(after_header);
459
460 let needle = b"/Kids";
462 let kids_pos = after_ws
463 .windows(needle.len())
464 .position(|w| w == needle)
465 .ok_or(PdfReadError::MalformedPageTree)?;
466
467 let after_kids = skip_ascii_whitespace(&after_ws[kids_pos + needle.len()..]);
468 if !after_kids.starts_with(b"[") {
469 return Err(PdfReadError::MalformedPageTree);
470 }
471
472 parse_ref_array(after_kids).ok_or(PdfReadError::MalformedPageTree)
473}
474
475#[allow(dead_code)]
476fn parse_ref_array(data: &[u8]) -> Option<Vec<u32>> {
478 debug_assert!(data.starts_with(b"["));
479 let end = data.iter().position(|&b| b == b']')?;
480 let inner = &data[1..end];
481
482 let mut result = Vec::new();
483 let mut cursor = inner;
484
485 loop {
486 cursor = skip_ascii_whitespace(cursor);
487 if cursor.is_empty() {
488 break;
489 }
490
491 let (n_str, rest) = next_token(cursor)?;
492 let Ok(n) = n_str.parse::<u32>() else { break };
493
494 let rest = skip_ascii_whitespace(rest);
495 let (_g_str, rest) = next_token(rest)?; let rest = skip_ascii_whitespace(rest);
498 let (r_str, rest) = next_token(rest)?;
499 if r_str != "R" {
500 break;
501 }
502
503 result.push(n);
504 cursor = rest;
505 }
506
507 Some(result)
508}
509
510#[allow(dead_code)]
511fn extract_indirect_refs(data: &[u8]) -> HashSet<u32> {
518 let mut refs = HashSet::new();
519
520 let is_delim = |b: u8| b.is_ascii_whitespace() || matches!(b, b'<' | b'>' | b'[' | b']');
522 let mut tokens: Vec<&[u8]> = Vec::new();
523 let mut cursor = data;
524
525 loop {
526 let start = cursor.iter().position(|&b| !is_delim(b));
528 let Some(start) = start else { break };
529 cursor = &cursor[start..];
530
531 let end = cursor
533 .iter()
534 .position(|&b| is_delim(b))
535 .unwrap_or(cursor.len());
536 tokens.push(&cursor[..end]);
537 cursor = &cursor[end..];
538 }
539
540 let mut i = 0;
542 while i + 2 < tokens.len() {
543 let a = tokens[i];
544 let b = tokens[i + 1];
545 let c = tokens[i + 2];
546
547 if c == b"R"
548 && a.iter().all(|&x| x.is_ascii_digit())
549 && b.iter().all(|&x| x.is_ascii_digit())
550 && !a.is_empty()
551 {
552 if let Ok(s) = std::str::from_utf8(a) {
553 if let Ok(n) = s.parse::<u32>() {
554 refs.insert(n);
555 i += 3;
556 continue;
557 }
558 }
559 }
560 i += 1;
561 }
562
563 refs
564}
565
566fn parse_dict_bytes(data: &[u8]) -> Option<HashMap<String, String>> {
573 let data = skip_ascii_whitespace(data);
574 if !data.starts_with(b"<<") {
575 return None;
576 }
577
578 let mut map = HashMap::new();
579 let mut cursor = &data[2..];
580
581 loop {
582 cursor = skip_ascii_whitespace(cursor);
583
584 if cursor.starts_with(b">>") {
585 break;
586 }
587
588 if !cursor.starts_with(b"/") {
590 let (_, rest) = next_token(cursor)?;
592 cursor = rest;
593 continue;
594 }
595
596 let (key, after_key) = next_token(&cursor[1..])?;
597 cursor = skip_ascii_whitespace(after_key);
598
599 if cursor.starts_with(b"<<") {
601 cursor = skip_nested_dict(cursor)?;
603 } else if cursor.starts_with(b"[") {
604 cursor = skip_array(cursor)?;
606 } else if cursor.starts_with(b"(") {
607 cursor = skip_literal_string(cursor)?;
609 } else {
610 let (val, rest) = next_token(cursor)?;
611 cursor = skip_ascii_whitespace(rest);
612
613 if let Some((gen_str, after_gen)) = next_token(cursor) {
615 let after_gen_ws = skip_ascii_whitespace(after_gen);
616 if let Some((r_str, after_r)) = next_token(after_gen_ws) {
617 if r_str == "R"
618 && val.chars().all(|c| c.is_ascii_digit())
619 && gen_str.chars().all(|c| c.is_ascii_digit())
620 {
621 map.insert(key.to_string(), val.to_string());
622 cursor = after_r;
623 continue;
624 }
625 }
626 map.insert(key.to_string(), val.to_string());
628 } else {
629 map.insert(key.to_string(), val.to_string());
630 }
631 }
632 }
633
634 Some(map)
635}
636
637fn skip_nested_dict(data: &[u8]) -> Option<&[u8]> {
642 debug_assert!(data.starts_with(b"<<"));
643 let mut depth = 0i32;
644 let mut i = 0;
645 while i < data.len() {
646 if data[i..].starts_with(b"<<") {
647 depth += 1;
648 i += 2;
649 } else if data[i..].starts_with(b">>") {
650 depth -= 1;
651 i += 2;
652 if depth == 0 {
653 return Some(&data[i..]);
654 }
655 } else {
656 i += 1;
657 }
658 }
659 None
660}
661
662fn skip_array(data: &[u8]) -> Option<&[u8]> {
664 debug_assert!(data.starts_with(b"["));
665 let pos = data.iter().position(|&b| b == b']')?;
666 Some(&data[pos + 1..])
667}
668
669fn skip_literal_string(data: &[u8]) -> Option<&[u8]> {
671 debug_assert!(data.starts_with(b"("));
672 let mut i = 1;
673 let mut depth = 1i32;
674 while i < data.len() {
675 match data[i] {
676 b'\\' => i += 2,
677 b'(' => {
678 depth += 1;
679 i += 1;
680 }
681 b')' => {
682 depth -= 1;
683 i += 1;
684 if depth == 0 {
685 return Some(&data[i..]);
686 }
687 }
688 _ => i += 1,
689 }
690 }
691 None
692}
693
694fn skip_obj_header(data: &[u8]) -> Option<&[u8]> {
696 let (_, rest) = next_token(data)?; let rest = skip_ascii_whitespace(rest);
698 let (_, rest) = next_token(rest)?; let rest = skip_ascii_whitespace(rest);
700 let (keyword, rest) = next_token(rest)?; if keyword != "obj" {
702 return None;
703 }
704 Some(rest)
705}
706
707fn skip_ascii_whitespace(data: &[u8]) -> &[u8] {
709 let pos = data
710 .iter()
711 .position(|&b| !b.is_ascii_whitespace())
712 .unwrap_or(data.len());
713 &data[pos..]
714}
715
716fn skip_line(data: &[u8]) -> &[u8] {
718 let pos = data
719 .iter()
720 .position(|&b| b == b'\n')
721 .unwrap_or(data.len().saturating_sub(1));
722 if pos + 1 < data.len() {
723 &data[pos + 1..]
724 } else {
725 &data[data.len()..]
726 }
727}
728
729fn consume_token<'a>(data: &'a [u8], token: &[u8]) -> Result<&'a [u8], PdfReadError> {
731 let trimmed = skip_ascii_whitespace(data);
732 if trimmed.starts_with(token) {
733 Ok(&trimmed[token.len()..])
734 } else {
735 Err(PdfReadError::MalformedXref)
736 }
737}
738
739fn next_token(data: &[u8]) -> Option<(&str, &[u8])> {
742 let data = skip_ascii_whitespace(data);
743 if data.is_empty() {
744 return None;
745 }
746 let end = data
747 .iter()
748 .position(|&b| b.is_ascii_whitespace() || b == b'<' || b == b'>')
749 .unwrap_or(data.len());
750 if end == 0 {
751 let token = std::str::from_utf8(&data[..1]).ok()?;
753 return Some((token, &data[1..]));
754 }
755 let token = std::str::from_utf8(&data[..end]).ok()?;
756 Some((token, &data[end..]))
757}
758
759fn skip_whitespace_to_token(data: &[u8]) -> Option<&str> {
761 let (tok, _) = next_token(data)?;
762 Some(tok)
763}
764
765#[cfg(test)]
768mod tests {
769 use super::*;
770 use crate::document::{DocumentOptions, PdfDocument};
771
772 fn make_pdf(n: usize) -> Vec<u8> {
773 let mut doc = PdfDocument::new(Vec::new(), DocumentOptions::default()).unwrap();
774 for _ in 0..n {
775 doc.begin_page(612.0, 792.0);
776 doc.end_page().unwrap();
777 }
778 doc.end_document().unwrap()
779 }
780
781 #[test]
784 fn page_object_numbers_count_matches_page_count() {
785 for n in [0, 1, 3, 10] {
786 let bytes = make_pdf(n);
787 let reader = PdfReader::from_bytes(bytes).unwrap();
788 let nums = reader.page_object_numbers().unwrap();
789 assert_eq!(nums.len(), n, "expected {n} page objects");
790 }
791 }
792
793 #[test]
794 fn page_object_numbers_are_unique_and_positive() {
795 let bytes = make_pdf(5);
796 let reader = PdfReader::from_bytes(bytes).unwrap();
797 let nums = reader.page_object_numbers().unwrap();
798 assert!(
799 nums.iter().all(|&n| n > 0),
800 "all object numbers must be > 0"
801 );
802 let unique: HashSet<_> = nums.iter().collect();
803 assert_eq!(unique.len(), nums.len(), "object numbers must be unique");
804 }
805
806 #[test]
807 fn page_object_numbers_order_is_stable() {
808 let bytes = make_pdf(4);
810 let reader = PdfReader::from_bytes(bytes).unwrap();
811 let first = reader.page_object_numbers().unwrap();
812 let second = reader.page_object_numbers().unwrap();
813 assert_eq!(first, second);
814 }
815
816 #[test]
819 fn collect_closure_contains_seed_objects() {
820 let bytes = make_pdf(2);
821 let reader = PdfReader::from_bytes(bytes).unwrap();
822 let page_nums = reader.page_object_numbers().unwrap();
823 let closure = reader.collect_closure(&page_nums).unwrap();
824 for &n in &page_nums {
825 assert!(closure.contains(&n), "closure must include seed object {n}");
826 }
827 }
828
829 #[test]
830 fn collect_closure_includes_dependencies() {
831 let bytes = make_pdf(1);
834 let reader = PdfReader::from_bytes(bytes).unwrap();
835 let page_nums = reader.page_object_numbers().unwrap();
836 let closure = reader.collect_closure(&page_nums).unwrap();
837 assert!(
838 closure.len() > page_nums.len(),
839 "closure must include objects beyond the page nodes"
840 );
841 }
842
843 #[test]
844 fn collect_closure_empty_roots_returns_empty() {
845 let bytes = make_pdf(1);
846 let reader = PdfReader::from_bytes(bytes).unwrap();
847 let closure = reader.collect_closure(&[]).unwrap();
848 assert!(closure.is_empty());
849 }
850
851 #[test]
854 fn raw_object_bytes_starts_with_obj_header_and_ends_with_endobj() {
855 let bytes = make_pdf(1);
856 let reader = PdfReader::from_bytes(bytes).unwrap();
857 let page_num = reader.page_object_numbers().unwrap()[0];
858 let raw = reader.raw_object_bytes(page_num).unwrap();
859
860 assert!(raw.windows(4).any(|w| w == b" obj"), "must contain ' obj'");
862 assert!(raw.ends_with(b"endobj"), "must end with 'endobj'");
864 }
865
866 #[test]
867 fn raw_object_bytes_error_on_missing_object() {
868 let bytes = make_pdf(1);
869 let reader = PdfReader::from_bytes(bytes).unwrap();
870 assert!(
871 reader.raw_object_bytes(99999).is_err(),
872 "non-existent object must return Err"
873 );
874 }
875
876 #[test]
882 fn parse_dict_bytes_handles_nested_resources_dict() {
883 let dict = b"<< /Type /Page /Resources << /Font << >> >> >>";
885 let map = parse_dict_bytes(dict).expect("should parse successfully");
886 assert_eq!(
887 map.get("Type").map(String::as_str),
888 Some("/Page"),
889 "Type must be /Page, not overwritten by the inner Pages object"
890 );
891 }
892
893 #[test]
894 fn skip_nested_dict_returns_after_matching_close() {
895 let data = b"<< /K << >> >> tail";
897 let rest = skip_nested_dict(data).expect("should find closing >>");
898 assert_eq!(rest, b" tail");
899 }
900}