1use std::marker::PhantomData;
3use std::collections::HashMap;
4use std::sync::{Arc, Mutex};
5use std::path::Path;
6use std::io::Write;
7
8use crate as pdf;
9use crate::error::*;
10use crate::object::*;
11use crate::primitive::{Primitive, Dictionary, PdfString};
12use crate::backend::Backend;
13use crate::any::*;
14use crate::parser::{Lexer, parse_with_lexer};
15use crate::parser::{parse_indirect_object, parse, ParseFlags};
16use crate::xref::{XRef, XRefTable, XRefInfo};
17use crate::crypt::Decoder;
18use crate::crypt::CryptDict;
19use crate::enc::{StreamFilter, decode};
20use std::ops::Range;
21use datasize::DataSize;
22
23#[cfg(feature="cache")]
24pub use globalcache::{ValueSize, sync::SyncCache};
25
26#[must_use]
27pub struct PromisedRef<T> {
28 inner: PlainRef,
29 _marker: PhantomData<T>
30}
31impl<T> PromisedRef<T> {
32 pub fn get_inner(&self) -> PlainRef {
33 self.inner
34 }
35 pub fn get_ref(&self) -> Ref<T> {
36 Ref::new(self.inner)
37 }
38}
39
40pub trait Cache<T: Clone> {
41 fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T;
42 fn clear(&self);
43}
44pub struct NoCache;
45impl<T: Clone> Cache<T> for NoCache {
46 fn get_or_compute(&self, _key: PlainRef, compute: impl FnOnce() -> T) -> T {
47 compute()
48 }
49 fn clear(&self) {}
50}
51
52#[cfg(feature="cache")]
53impl<T: Clone + ValueSize + Send + 'static> Cache<T> for Arc<SyncCache<PlainRef, T>> {
54 fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T {
55 self.get(key, compute)
56 }
57 fn clear(&self) {
58 (**self).clear()
59 }
60}
61
62pub trait Log {
63 fn load_object(&self, _r: PlainRef) {}
64 fn log_get(&self, _r: PlainRef) {}
65}
66pub struct NoLog;
67impl Log for NoLog {}
68
69pub struct Storage<B, OC, SC, L> {
70 cache: OC,
72 stream_cache: SC,
73
74 changes: HashMap<ObjNr, (Primitive, GenNr)>,
76
77 refs: XRefTable,
78
79 decoder: Option<Decoder>,
80 options: ParseOptions,
81
82 backend: B,
83
84 start_offset: usize,
86
87 log: L
88}
89
90impl<OC, SC, L> Storage<Vec<u8>, OC, SC, L>
91where
92 OC: Cache<Result<AnySync, Arc<PdfError>>>,
93 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
94 L: Log,
95{
96 pub fn empty(object_cache: OC, stream_cache: SC, log: L) -> Self {
97 Storage {
98 cache: object_cache,
99 stream_cache,
100 changes: HashMap::new(),
101 refs: XRefTable::new(0),
102 decoder: None,
103 options: ParseOptions::strict(),
104 backend: Vec::from(&b"%PDF-1.7\n"[..]),
105 start_offset: 0,
106 log
107 }
108 }
109}
110
111impl<B, OC, SC, L> Storage<B, OC, SC, L>
112where
113 B: Backend,
114 OC: Cache<Result<AnySync, Arc<PdfError>>>,
115 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
116 L: Log,
117{
118 pub fn into_inner(self) -> B {
119 self.backend
120 }
121 pub fn version(&self) -> Result<String> {
122 Ok(String::from_utf8(self.backend.read(self.start_offset+1..self.start_offset+8)?.to_owned())?)
123 }
124 pub fn resolver(&self) -> impl Resolve + '_ {
125 StorageResolver::new(self)
126 }
127 pub fn with_cache(backend: B, options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result<Self> {
128 let start_offset = backend.locate_start_offset()?;
129
130 Ok(Storage {
131 start_offset,
132 backend,
133 refs: XRefTable::new(0),
134 cache: object_cache,
135 stream_cache,
136 changes: HashMap::new(),
137 decoder: None,
138 options,
139 log
140 })
141 }
142 fn decode(&self, id: PlainRef, range: Range<usize>, filters: &[StreamFilter]) -> Result<Arc<[u8]>> {
143 let data = self.backend.read(range)?;
144
145 let mut data = Vec::from(data);
146 if let Some(ref decoder) = self.decoder {
147 data = Vec::from(t!(decoder.decrypt(id, &mut data)));
148 }
149 for filter in filters {
150 data = t!(decode(&data, filter), filter);
151 }
152 Ok(data.into())
153 }
154
155 pub fn load_storage_and_trailer(&mut self) -> Result<Dictionary> {
156 self.load_storage_and_trailer_password(b"")
157 }
158
159 pub fn load_storage_and_trailer_password(&mut self, password: &[u8]) -> Result<Dictionary> {
160
161 let resolver = StorageResolver::new(self);
162 let (refs, trailer) = t!(self.backend.read_xref_table_and_trailer(self.start_offset, &resolver));
163 self.refs = refs;
164
165 if let Some(crypt) = trailer.get("Encrypt") {
166 let key = trailer
167 .get("ID")
168 .ok_or(PdfError::MissingEntry {
169 typ: "Trailer",
170 field: "ID".into(),
171 })?
172 .as_array()?
173 .get(0)
174 .ok_or(PdfError::MissingEntry {
175 typ: "Trailer",
176 field: "ID[0]".into()
177 })?
178 .as_string()?
179 .as_bytes();
180
181 let resolver = StorageResolver::new(self);
182 let dict = CryptDict::from_primitive(crypt.clone(), &resolver)?;
183
184 self.decoder = Some(t!(Decoder::from_password(&dict, key, password)));
185 if let Primitive::Reference(reference) = crypt {
186 self.decoder.as_mut().unwrap().encrypt_indirect_object = Some(*reference);
187 }
188 if let Some(Primitive::Reference(catalog_ref)) = trailer.get("Root") {
189 let resolver = StorageResolver::new(self);
190 let catalog = t!(t!(resolver.resolve(*catalog_ref)).resolve(&resolver)?.into_dictionary());
191 if let Some(Primitive::Reference(metadata_ref)) = catalog.get("Metadata") {
192 self.decoder.as_mut().unwrap().metadata_indirect_object = Some(*metadata_ref);
193 }
194 }
195 }
196 Ok(trailer)
197 }
198 pub fn scan(&self) -> impl Iterator<Item = Result<ScanItem>> + '_ {
199 let xref_offset = self.backend.locate_xref_offset().unwrap();
200 let slice = self.backend.read(self.start_offset .. xref_offset).unwrap();
201 let mut lexer = Lexer::with_offset(slice, 0);
202
203 fn skip_xref(lexer: &mut Lexer) -> Result<()> {
204 while lexer.next()? != "trailer" {
205
206 }
207 Ok(())
208 }
209
210 let resolver = StorageResolver::new(self);
211 std::iter::from_fn(move || {
212 loop {
213 let pos = lexer.get_pos();
214 match parse_indirect_object(&mut lexer, &resolver, self.decoder.as_ref(), ParseFlags::all()) {
215 Ok((r, p)) => return Some(Ok(ScanItem::Object(r, p))),
216 Err(e) if e.is_eof() => return None,
217 Err(e) => {
218 lexer.set_pos(pos);
219 if let Ok(s) = lexer.next() {
220 debug!("next: {:?}", String::from_utf8_lossy(s.as_slice()));
221 match &*s {
222 b"xref" => {
223 if let Err(e) = skip_xref(&mut lexer) {
224 return Some(Err(e));
225 }
226 if let Ok(trailer) = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::DICT).and_then(|p| p.into_dictionary()) {
227 return Some(Ok(ScanItem::Trailer(trailer)));
228 }
229 }
230 b"startxref" if lexer.next().is_ok() => {
231 continue;
232 }
233 _ => {}
234 }
235 }
236 return Some(Err(e));
237 }
238 }
239 }
240 })
241 }
242 fn resolve_ref(&self, r: PlainRef, flags: ParseFlags, resolve: &impl Resolve) -> Result<Primitive> {
243 match self.changes.get(&r.id) {
244 Some((p, _)) => Ok((*p).clone()),
245 None => match t!(self.refs.get(r.id)) {
246 XRef::Raw {pos, ..} => {
247 let mut lexer = Lexer::with_offset(t!(self.backend.read(self.start_offset + pos ..)), self.start_offset + pos);
248 let p = t!(parse_indirect_object(&mut lexer, resolve, self.decoder.as_ref(), flags)).1;
249 Ok(p)
250 }
251 XRef::Stream {stream_id, index} => {
252 if !flags.contains(ParseFlags::STREAM) {
253 return Err(PdfError::PrimitiveNotAllowed { found: ParseFlags::STREAM, allowed: flags });
254 }
255 let obj_stream = resolve.get::<ObjectStream>(Ref::from_id(stream_id))?;
257
258 let (data, range) = t!(obj_stream.get_object_slice(index, resolve));
259 let slice = data.get(range.clone()).ok_or_else(|| other!("invalid range {:?}, but only have {} bytes", range, data.len()))?;
260 parse(slice, resolve, flags)
261 }
262 XRef::Free {..} => err!(PdfError::FreeObject {obj_nr: r.id}),
263 XRef::Promised => unimplemented!(),
264 XRef::Invalid => err!(PdfError::NullRef {obj_nr: r.id}),
265 }
266 }
267 }
268}
269
270pub enum ScanItem {
271 Object(PlainRef, Primitive),
272 Trailer(Dictionary)
273}
274
275struct StorageResolver<'a, B, OC, SC, L> {
276 storage: &'a Storage<B, OC, SC, L>,
277 chain: Mutex<Vec<PlainRef>>,
278}
279impl<'a, B, OC, SC, L> StorageResolver<'a, B, OC, SC, L> {
280 pub fn new(storage: &'a Storage<B, OC, SC, L>) -> Self {
281 StorageResolver {
282 storage,
283 chain: Mutex::new(vec![])
284 }
285 }
286}
287
288struct Defer<F: FnMut()>(F);
289impl<F: FnMut()> Drop for Defer<F> {
290 fn drop(&mut self) {
291 (self.0)();
292 }
293}
294
295impl<'a, B, OC, SC, L> Resolve for StorageResolver<'a, B, OC, SC, L>
296where
297 B: Backend,
298 OC: Cache<Result<AnySync, Arc<PdfError>>>,
299 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
300 L: Log
301{
302 fn resolve_flags(&self, r: PlainRef, flags: ParseFlags, _depth: usize) -> Result<Primitive> {
303 let storage = self.storage;
304 storage.log.load_object(r);
305
306 storage.resolve_ref(r, flags, self)
307 }
308
309 fn get<T: Object+DataSize>(&self, r: Ref<T>) -> Result<RcRef<T>> {
310 let key = r.get_inner();
311 self.storage.log.log_get(key);
312
313 {
314 debug!("get {key:?} as {}", std::any::type_name::<T>());
315 let mut chain = self.chain.lock().unwrap();
316 if chain.contains(&key) {
317 bail!("Recursive reference");
318 }
319 chain.push(key);
320 }
321 let _defer = Defer(|| {
322 let mut chain = self.chain.lock().unwrap();
323 assert_eq!(chain.pop(), Some(key));
324 });
325
326 let res = self.storage.cache.get_or_compute(key, || {
327 match self.resolve(key).and_then(|p| T::from_primitive(p, self)) {
328 Ok(obj) => Ok(AnySync::new(Shared::new(obj))),
329 Err(e) => {
330 let p = self.resolve(key);
331 warn!("failed to decode {p:?} as {}", std::any::type_name::<T>());
332 Err(Arc::new(e))
333 }
334 }
335 });
336 match res {
337 Ok(any) => {
338 match any.downcast() {
339 Ok(val) => Ok(RcRef::new(key, val)),
340 Err(_) => {
341 let p = self.resolve(key)?;
342 Ok(RcRef::new(key, T::from_primitive(p, self)?.into()))
343 }
344 }
345 }
346 Err(e) => Err(PdfError::Shared { source: e.clone()}),
347 }
348 }
349 fn options(&self) -> &ParseOptions {
350 &self.storage.options
351 }
352 fn stream_data(&self, id: PlainRef, range: Range<usize>) -> Result<Arc<[u8]>> {
353 self.storage.decode(id, range, &[])
354 }
355
356 fn get_data_or_decode(&self, id: PlainRef, range: Range<usize>, filters: &[StreamFilter]) -> Result<Arc<[u8]>> {
357 self.storage.stream_cache.get_or_compute(id, || self.storage.decode(id, range, filters).map_err(Arc::new))
358 .map_err(|e| e.into())
359 }
360}
361
362impl<B, OC, SC, L> Updater for Storage<B, OC, SC, L>
363where
364 B: Backend,
365 OC: Cache<Result<AnySync, Arc<PdfError>>>,
366 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
367 L: Log,
368{
369 fn create<T: ObjectWrite>(&mut self, obj: T) -> Result<RcRef<T>> {
370 let id = self.refs.len() as u64;
371 self.refs.push(XRef::Promised);
372 let primitive = obj.to_primitive(self)?;
373 self.changes.insert(id, (primitive, 0));
374 let rc = Shared::new(obj);
375 let r = PlainRef { id, gen: 0 };
376
377 Ok(RcRef::new(r, rc))
378 }
379 fn update<T: ObjectWrite>(&mut self, old: PlainRef, obj: T) -> Result<RcRef<T>> {
380 use std::collections::hash_map::Entry;
381
382 let r = match self.refs.get(old.id)? {
383 XRef::Free { .. } => panic!(),
384 XRef::Raw { gen_nr, .. } => PlainRef { id: old.id, gen: gen_nr },
385 XRef::Stream { .. } => PlainRef { id: old.id, gen: 0 },
388 XRef::Promised => PlainRef { id: old.id, gen: 0 },
389 XRef::Invalid => panic!()
390 };
391 let primitive = obj.to_primitive(self)?;
392 match self.changes.entry(old.id) {
393 Entry::Vacant(e) => {
394 e.insert((primitive, r.gen));
395 }
396 Entry::Occupied(mut e) => match (e.get_mut(), primitive) {
397 ((Primitive::Dictionary(ref mut dict), _), Primitive::Dictionary(new)) => {
398 dict.append(new);
399 }
400 (old, new) => {
401 *old = (new, r.gen);
402 }
403 }
404 }
405 let rc = Shared::new(obj);
406
407 Ok(RcRef::new(r, rc))
408 }
409
410 fn promise<T: Object>(&mut self) -> PromisedRef<T> {
411 let id = self.refs.len() as u64;
412
413 self.refs.push(XRef::Promised);
414
415 PromisedRef {
416 inner: PlainRef {
417 id,
418 gen: 0
419 },
420 _marker: PhantomData
421 }
422 }
423
424 fn fulfill<T: ObjectWrite>(&mut self, promise: PromisedRef<T>, obj: T) -> Result<RcRef<T>> {
425 self.update(promise.inner, obj)
426 }
427}
428
429impl<OC, SC, L> Storage<Vec<u8>, OC, SC, L>
430where
431 OC: Cache<Result<AnySync, Arc<PdfError>>>,
432 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
433 L: Log
434{
435 pub fn save(&mut self, trailer: &mut Trailer) -> Result<&[u8]> {
436 trailer.size = (self.refs.len() + 2) as _;
438 let trailer_dict = trailer.to_dict(self)?;
439
440 let xref_promise = self.promise::<Stream<XRefInfo>>();
441
442 let mut changes: Vec<_> = self.changes.iter().collect();
443 changes.sort_unstable_by_key(|&(id, _)| id);
444
445 for &(&id, &(ref primitive, gen)) in changes.iter() {
446 let pos = self.backend.len();
447 self.refs.set(id, XRef::Raw { pos: pos as _, gen_nr: gen });
448 writeln!(self.backend, "{} {} obj", id, gen)?;
449 primitive.serialize(&mut self.backend)?;
450 writeln!(self.backend, "endobj")?;
451 }
452
453 let xref_pos = self.backend.len();
454 self.refs.set(xref_promise.get_inner().id, XRef::Raw { pos: xref_pos, gen_nr: 0 });
455 let stream = self.refs.write_stream(xref_promise.get_inner().id as usize + 1)?;
457
458 writeln!(self.backend, "{} {} obj", xref_promise.get_inner().id, 0)?;
459 let mut xref_and_trailer = stream.to_pdf_stream(&mut NoUpdate)?;
460 for (k, v) in trailer_dict.iter() {
461 xref_and_trailer.info.insert(k.clone(), v.clone());
462 }
463
464 xref_and_trailer.serialize(&mut self.backend)?;
465 writeln!(self.backend, "endobj")?;
466
467 let _ = self.fulfill(xref_promise, stream)?;
468
469 write!(self.backend, "\nstartxref\n{}\n%%EOF", xref_pos).unwrap();
470
471 self.cache.clear();
473 *trailer = Trailer::from_dict(trailer_dict, &self.resolver())?;
474
475 Ok(&self.backend)
476 }
477}
478
479#[cfg(feature="cache")]
480pub type ObjectCache = Arc<SyncCache<PlainRef, Result<AnySync, Arc<PdfError>>>>;
481#[cfg(feature="cache")]
482pub type StreamCache = Arc<SyncCache<PlainRef, Result<Arc<[u8]>, Arc<PdfError>>>>;
483#[cfg(feature="cache")]
484pub type CachedFile<B> = File<B, ObjectCache, StreamCache, NoLog>;
485
486pub struct File<B, OC, SC, L> {
487 storage: Storage<B, OC, SC, L>,
488 pub trailer: Trailer,
489}
490impl<B, OC, SC, L> Updater for File<B, OC, SC, L>
491where
492 B: Backend,
493 OC: Cache<Result<AnySync, Arc<PdfError>>>,
494 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
495 L: Log,
496{
497 fn create<T: ObjectWrite>(&mut self, obj: T) -> Result<RcRef<T>> {
498 self.storage.create(obj)
499 }
500 fn update<T: ObjectWrite>(&mut self, old: PlainRef, obj: T) -> Result<RcRef<T>> {
501 self.storage.update(old, obj)
502 }
503 fn promise<T: Object>(&mut self) -> PromisedRef<T> {
504 self.storage.promise()
505 }
506 fn fulfill<T: ObjectWrite>(&mut self, promise: PromisedRef<T>, obj: T) -> Result<RcRef<T>> {
507 self.storage.fulfill(promise, obj)
508 }
509}
510
511impl<OC, SC, L> File<Vec<u8>, OC, SC, L>
512where
513 OC: Cache<Result<AnySync, Arc<PdfError>>>,
514 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
515 L: Log,
516{
517 pub fn save_to(&mut self, path: impl AsRef<Path>) -> Result<()> {
518 std::fs::write(path, self.storage.save(&mut self.trailer)?)?;
519 Ok(())
520 }
521
522 pub fn save(&mut self) -> Result<Vec<u8>> {
523 Ok(self.storage.save(&mut self.trailer)?.to_vec())
524 }
525}
526
527
528pub struct FileOptions<'a, OC, SC, L> {
529 oc: OC,
530 sc: SC,
531 log: L,
532 password: &'a [u8],
533 parse_options: ParseOptions,
534}
535impl FileOptions<'static, NoCache, NoCache, NoLog> {
536 pub fn uncached() -> Self {
537 FileOptions {
538 oc: NoCache,
539 sc: NoCache,
540 password: b"",
541 parse_options: ParseOptions::strict(),
542 log: NoLog,
543 }
544 }
545}
546
547#[cfg(feature="cache")]
548impl FileOptions<'static, ObjectCache, StreamCache, NoLog> {
549 pub fn cached() -> Self {
550 FileOptions {
551 oc: SyncCache::new(),
552 sc: SyncCache::new(),
553 password: b"",
554 parse_options: ParseOptions::strict(),
555 log: NoLog
556 }
557 }
558}
559impl<'a, OC, SC, L> FileOptions<'a, OC, SC, L>
560where
561 OC: Cache<Result<AnySync, Arc<PdfError>>>,
562 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
563 L: Log,
564{
565 pub fn password(self, password: &'a [u8]) -> FileOptions<'a, OC, SC, L> {
566 FileOptions {
567 password,
568 .. self
569 }
570 }
571 pub fn cache<O, S>(self, oc: O, sc: S) -> FileOptions<'a, O, S, L> {
572 let FileOptions { oc: _, sc: _, password, parse_options, log } = self;
573 FileOptions {
574 oc,
575 sc,
576 password,
577 parse_options,
578 log,
579 }
580 }
581 pub fn log<Log>(self, log: Log) -> FileOptions<'a, OC, SC, Log> {
582 let FileOptions { oc, sc, password, parse_options, .. } = self;
583 FileOptions {
584 oc,
585 sc,
586 password,
587 parse_options,
588 log,
589 }
590 }
591 pub fn parse_options(self, parse_options: ParseOptions) -> Self {
592 FileOptions { parse_options, .. self }
593 }
594
595 pub fn open(self, path: impl AsRef<Path>) -> Result<File<Vec<u8>, OC, SC, L>> {
597 let data = std::fs::read(path)?;
598 self.load(data)
599 }
600 pub fn storage(self) -> Storage<Vec<u8>, OC, SC, L> {
601 let FileOptions { oc, sc, log, .. } = self;
602 Storage::empty(oc, sc, log)
603 }
604
605 pub fn load<B: Backend>(self, backend: B) -> Result<File<B, OC, SC, L>> {
607 let FileOptions { oc, sc, password, parse_options, log } = self;
608 File::load_data(backend, password, parse_options, oc, sc, log)
609 }
610}
611
612
613impl<B, OC, SC, L> File<B, OC, SC, L>
614where
615 B: Backend,
616 OC: Cache<Result<AnySync, Arc<PdfError>>>,
617 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
618 L: Log,
619{
620 fn load_data(backend: B, password: &[u8], options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result<Self> {
621 let mut storage = Storage::with_cache(backend, options, object_cache, stream_cache, log)?;
622 let trailer = storage.load_storage_and_trailer_password(password)?;
623
624 let resolver = StorageResolver::new(&storage);
625 let trailer = t!(Trailer::from_primitive(
626 Primitive::Dictionary(trailer),
627 &resolver,
628 ));
629 Ok(File { storage, trailer })
630 }
631 pub fn new(storage: Storage<B, OC, SC, L>, trailer: Trailer) -> Self {
632 File { storage, trailer }
633 }
634 pub fn resolver(&self) -> impl Resolve + '_ {
635 StorageResolver::new(&self.storage)
636 }
637
638 pub fn get_root(&self) -> &Catalog {
639 &self.trailer.root
640 }
641
642 pub fn pages(&self) -> impl Iterator<Item=Result<PageRc>> + '_ {
643 (0 .. self.num_pages()).map(move |n| self.get_page(n))
644 }
645 pub fn num_pages(&self) -> u32 {
646 self.trailer.root.pages.count
647 }
648
649 pub fn get_page(&self, n: u32) -> Result<PageRc> {
650 let resolver = StorageResolver::new(&self.storage);
651 self.trailer.root.pages.page(&resolver, n)
652 }
653
654 pub fn update_catalog(&mut self, catalog: Catalog) -> Result<()> {
655 self.trailer.root = self.create(catalog)?;
656 Ok(())
657 }
658
659 pub fn set_options(&mut self, options: ParseOptions) {
660 self.storage.options = options;
661 }
662
663 pub fn scan(&self) -> impl Iterator<Item = Result<ScanItem>> + '_ {
664 self.storage.scan()
665 }
666
667 pub fn log(&self) -> &L {
668 &self.storage.log
669 }
670
671 pub fn version(&self) -> Result<String> {
674 self.storage.version()
675 }
676}
677
678#[derive(Object, ObjectWrite, DataSize)]
679pub struct Trailer {
680 #[pdf(key = "Size")]
681 pub size: i32,
682
683 #[pdf(key = "Prev")]
684 pub prev_trailer_pos: Option<i32>,
685
686 #[pdf(key = "Root")]
687 pub root: RcRef<Catalog>,
688
689 #[pdf(key = "Encrypt")]
690 pub encrypt_dict: Option<RcRef<CryptDict>>,
691
692 #[pdf(key = "Info", indirect)]
693 pub info_dict: Option<InfoDict>,
694
695 #[pdf(key = "ID")]
696 pub id: Vec<PdfString>,
697}
698
699