1use std::marker::PhantomData;
3use std::collections::HashMap;
4use std::sync::{Arc, Mutex};
5use std::path::Path;
6use std::io::Write;
7
8use crate as pdf;
9use crate::error::*;
10use crate::object::*;
11use crate::primitive::{Primitive, Dictionary, PdfString};
12use crate::backend::Backend;
13use crate::any::*;
14use crate::parser::{Lexer, parse_with_lexer};
15use crate::parser::{parse_indirect_object, parse, ParseFlags};
16use crate::xref::{XRef, XRefTable, XRefInfo};
17use crate::crypt::Decoder;
18use crate::crypt::CryptDict;
19use crate::enc::{StreamFilter, decode};
20use std::ops::Range;
21use datasize::DataSize;
22
23#[cfg(feature="cache")]
24pub use globalcache::{ValueSize, sync::SyncCache};
25
26#[must_use]
27pub struct PromisedRef<T> {
28 inner: PlainRef,
29 _marker: PhantomData<T>
30}
31impl<T> PromisedRef<T> {
32 pub fn get_inner(&self) -> PlainRef {
33 self.inner
34 }
35 pub fn get_ref(&self) -> Ref<T> {
36 Ref::new(self.inner)
37 }
38}
39
40pub trait Cache<T: Clone> {
41 fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T;
42 fn clear(&self);
43}
44pub struct NoCache;
45impl<T: Clone> Cache<T> for NoCache {
46 fn get_or_compute(&self, _key: PlainRef, compute: impl FnOnce() -> T) -> T {
47 compute()
48 }
49 fn clear(&self) {}
50}
51
52#[cfg(feature="cache")]
53impl<T: Clone + ValueSize + Send + 'static> Cache<T> for Arc<SyncCache<PlainRef, T>> {
54 fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T {
55 self.get(key, compute)
56 }
57 fn clear(&self) {
58 (**self).clear()
59 }
60}
61
62pub trait Log {
63 fn load_object(&self, _r: PlainRef) {}
64 fn log_get(&self, _r: PlainRef) {}
65}
66pub struct NoLog;
67impl Log for NoLog {}
68
69pub struct Storage<B, OC, SC, L> {
70 cache: OC,
72 stream_cache: SC,
73
74 changes: HashMap<ObjNr, (Primitive, GenNr)>,
76
77 refs: XRefTable,
78
79 decoder: Option<Decoder>,
80 options: ParseOptions,
81
82 backend: B,
83
84 start_offset: usize,
86
87 log: L
88}
89
90impl<OC, SC, L> Storage<Vec<u8>, OC, SC, L>
91where
92 OC: Cache<Result<AnySync, Arc<PdfError>>>,
93 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
94 L: Log,
95{
96 pub fn empty(object_cache: OC, stream_cache: SC, log: L) -> Self {
97 Storage {
98 cache: object_cache,
99 stream_cache,
100 changes: HashMap::new(),
101 refs: XRefTable::new(0),
102 decoder: None,
103 options: ParseOptions::strict(),
104 backend: Vec::from(&b"%PDF-1.7\n"[..]),
105 start_offset: 0,
106 log
107 }
108 }
109}
110
111impl<B, OC, SC, L> Storage<B, OC, SC, L>
112where
113 B: Backend,
114 OC: Cache<Result<AnySync, Arc<PdfError>>>,
115 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
116 L: Log,
117{
118 pub fn into_inner(self) -> B {
119 self.backend
120 }
121 pub fn version(&self) -> Result<String> {
122 Ok(String::from_utf8(self.backend.read(self.start_offset+1..self.start_offset+8)?.to_owned())?)
123 }
124 pub fn resolver(&self) -> impl Resolve + '_ {
125 StorageResolver::new(self)
126 }
127 pub fn with_cache(backend: B, options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result<Self> {
128 let start_offset = backend.locate_start_offset()?;
129
130 Ok(Storage {
131 start_offset,
132 backend,
133 refs: XRefTable::new(0),
134 cache: object_cache,
135 stream_cache,
136 changes: HashMap::new(),
137 decoder: None,
138 options,
139 log
140 })
141 }
142 fn decode(&self, id: PlainRef, range: Range<usize>, filters: &[StreamFilter]) -> Result<Arc<[u8]>> {
143 let data = self.backend.read(range)?;
144
145 let mut data = Vec::from(data);
146 if let Some(ref decoder) = self.decoder {
147 data = Vec::from(t!(decoder.decrypt(id, &mut data)));
148 }
149 for filter in filters {
150 data = t!(decode(&data, filter), filter);
151 }
152 Ok(data.into())
153 }
154
155 pub fn load_storage_and_trailer(&mut self) -> Result<Dictionary> {
156 self.load_storage_and_trailer_password(b"")
157 }
158
159 pub fn load_storage_and_trailer_password(&mut self, password: &[u8]) -> Result<Dictionary> {
160
161 let resolver = StorageResolver::new(self);
162 let (refs, trailer) = t!(self.backend.read_xref_table_and_trailer(self.start_offset, &resolver));
163 self.refs = refs;
164
165 if let Some(crypt) = trailer.get("Encrypt") {
166 let key = trailer
167 .get("ID")
168 .ok_or(PdfError::MissingEntry {
169 typ: "Trailer",
170 field: "ID".into(),
171 })?
172 .as_array()?
173 .get(0)
174 .ok_or(PdfError::MissingEntry {
175 typ: "Trailer",
176 field: "ID[0]".into()
177 })?
178 .as_string()?
179 .as_bytes();
180
181 let resolver = StorageResolver::new(self);
182 let dict = CryptDict::from_primitive(crypt.clone(), &resolver)?;
183
184 self.decoder = Some(t!(Decoder::from_password(&dict, key, password)));
185 if let Primitive::Reference(reference) = crypt {
186 self.decoder.as_mut().unwrap().encrypt_indirect_object = Some(*reference);
187 }
188 if let Some(Primitive::Reference(catalog_ref)) = trailer.get("Root") {
189 let resolver = StorageResolver::new(self);
190 let catalog = t!(t!(resolver.resolve(*catalog_ref)).resolve(&resolver)?.into_dictionary());
191 if let Some(Primitive::Reference(metadata_ref)) = catalog.get("Metadata") {
192 self.decoder.as_mut().unwrap().metadata_indirect_object = Some(*metadata_ref);
193 }
194 }
195 }
196 Ok(trailer)
197 }
198 pub fn scan(&self) -> impl Iterator<Item = Result<ScanItem>> + '_ {
199 let xref_offset = self.backend.locate_xref_offset().unwrap();
200 let slice = self.backend.read(self.start_offset .. xref_offset).unwrap();
201 let mut lexer = Lexer::with_offset(slice, 0);
202
203 fn skip_xref(lexer: &mut Lexer) -> Result<()> {
204 while lexer.next()? != "trailer" {
205
206 }
207 Ok(())
208 }
209
210 let resolver = StorageResolver::new(self);
211 std::iter::from_fn(move || {
212 loop {
213 let pos = lexer.get_pos();
214 match parse_indirect_object(&mut lexer, &resolver, self.decoder.as_ref(), ParseFlags::all()) {
215 Ok((r, p)) => return Some(Ok(ScanItem::Object(r, p))),
216 Err(e) if e.is_eof() => return None,
217 Err(e) => {
218 lexer.set_pos(pos);
219 if let Ok(s) = lexer.next() {
220 debug!("next: {:?}", String::from_utf8_lossy(s.as_slice()));
221 match &*s {
222 b"xref" => {
223 if let Err(e) = skip_xref(&mut lexer) {
224 return Some(Err(e));
225 }
226 if let Ok(trailer) = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::DICT).and_then(|p| p.into_dictionary()) {
227 return Some(Ok(ScanItem::Trailer(trailer)));
228 }
229 }
230 b"startxref" if lexer.next().is_ok() => {
231 continue;
232 }
233 _ => {}
234 }
235 }
236 return Some(Err(e));
237 }
238 }
239 }
240 })
241 }
242 fn resolve_ref(&self, r: PlainRef, flags: ParseFlags, resolve: &impl Resolve) -> Result<Primitive> {
243 match self.changes.get(&r.id) {
244 Some((p, _)) => Ok((*p).clone()),
245 None => match t!(self.refs.get(r.id)) {
246 XRef::Raw {pos, ..} => {
247 let mut lexer = Lexer::with_offset(t!(self.backend.read(self.start_offset + pos ..)), self.start_offset + pos);
248 let p = t!(parse_indirect_object(&mut lexer, resolve, self.decoder.as_ref(), flags)).1;
249 Ok(p)
250 }
251 XRef::Stream {stream_id, index} => {
252 if !flags.contains(ParseFlags::STREAM) {
253 return Err(PdfError::PrimitiveNotAllowed { found: ParseFlags::STREAM, allowed: flags });
254 }
255 let obj_stream = resolve.get::<ObjectStream>(Ref::from_id(stream_id))?;
257
258 let (data, range) = t!(obj_stream.get_object_slice(index, resolve));
259 let slice = data.get(range.clone()).ok_or_else(|| other!("invalid range {:?}, but only have {} bytes", range, data.len()))?;
260 parse(slice, resolve, flags)
261 }
262 XRef::Free {..} => err!(PdfError::FreeObject {obj_nr: r.id}),
263 XRef::Promised => unimplemented!(),
264 XRef::Invalid => err!(PdfError::NullRef {obj_nr: r.id}),
265 }
266 }
267 }
268}
269
270pub enum ScanItem {
271 Object(PlainRef, Primitive),
272 Trailer(Dictionary)
273}
274
275struct StorageResolver<'a, B, OC, SC, L> {
276 storage: &'a Storage<B, OC, SC, L>,
277 chain: Mutex<Vec<PlainRef>>,
278}
279impl<'a, B, OC, SC, L> StorageResolver<'a, B, OC, SC, L> {
280 pub fn new(storage: &'a Storage<B, OC, SC, L>) -> Self {
281 StorageResolver {
282 storage,
283 chain: Mutex::new(vec![])
284 }
285 }
286}
287
288struct Defer<F: FnMut()>(F);
289impl<F: FnMut()> Drop for Defer<F> {
290 fn drop(&mut self) {
291 (self.0)();
292 }
293}
294
295impl<'a, B, OC, SC, L> Resolve for StorageResolver<'a, B, OC, SC, L>
296where
297 B: Backend,
298 OC: Cache<Result<AnySync, Arc<PdfError>>>,
299 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
300 L: Log
301{
302 fn resolve_flags(&self, r: PlainRef, flags: ParseFlags, _depth: usize) -> Result<Primitive> {
303 let storage = self.storage;
304 storage.log.load_object(r);
305
306 storage.resolve_ref(r, flags, self)
307 }
308
309 fn get<T: Object+DataSize>(&self, r: Ref<T>) -> Result<RcRef<T>> {
310 let key = r.get_inner();
311 self.storage.log.log_get(key);
312
313 {
314 debug!("get {key:?} as {}", std::any::type_name::<T>());
315 let mut chain = self.chain.lock().unwrap();
316 if chain.contains(&key) {
317 bail!("Recursive reference");
318 }
319 chain.push(key);
320 }
321 let _defer = Defer(|| {
322 let mut chain = self.chain.lock().unwrap();
323 assert_eq!(chain.pop(), Some(key));
324 });
325
326 let res = self.storage.cache.get_or_compute(key, || {
327 match self.resolve(key).and_then(|p| T::from_primitive(p, self)) {
328 Ok(obj) => Ok(AnySync::new(Shared::new(obj))),
329 Err(e) => {
330 let p = self.resolve(key);
331 warn!("failed to decode {p:?} as {}", std::any::type_name::<T>());
332 Err(Arc::new(e))
333 }
334 }
335 });
336 match res {
337 Ok(any) => {
338 match any.downcast() {
339 Ok(val) => Ok(RcRef::new(key, val)),
340 Err(_) => {
341 let p = self.resolve(key)?;
342 Ok(RcRef::new(key, T::from_primitive(p, self)?.into()))
343 }
344 }
345 }
346 Err(e) => Err(PdfError::Shared { source: e.clone()}),
347 }
348 }
349 fn options(&self) -> &ParseOptions {
350 &self.storage.options
351 }
352 fn stream_data(&self, id: PlainRef, range: Range<usize>) -> Result<Arc<[u8]>> {
353 self.storage.decode(id, range, &[])
354 }
355
356 fn get_data_or_decode(&self, id: PlainRef, range: Range<usize>, filters: &[StreamFilter]) -> Result<Arc<[u8]>> {
357 self.storage.stream_cache.get_or_compute(id, || self.storage.decode(id, range, filters).map_err(Arc::new))
358 .map_err(|e| e.into())
359 }
360}
361
362impl<B, OC, SC, L> Updater for Storage<B, OC, SC, L>
363where
364 B: Backend,
365 OC: Cache<Result<AnySync, Arc<PdfError>>>,
366 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
367 L: Log,
368{
369 fn create<T: ObjectWrite>(&mut self, obj: T) -> Result<RcRef<T>> {
370 let id = self.refs.len() as u64;
371 self.refs.push(XRef::Promised);
372 let primitive = obj.to_primitive(self)?;
373 self.changes.insert(id, (primitive, 0));
374 let rc = Shared::new(obj);
375 let r = PlainRef { id, gen: 0 };
376
377 Ok(RcRef::new(r, rc))
378 }
379 fn update<T: ObjectWrite>(&mut self, old: PlainRef, obj: T) -> Result<RcRef<T>> {
380 use std::collections::hash_map::Entry;
381
382 let r = match self.refs.get(old.id)? {
383 XRef::Free { .. } => panic!(),
384 XRef::Raw { gen_nr, .. } => PlainRef { id: old.id, gen: gen_nr },
385 XRef::Stream { .. } => return self.create(obj),
386 XRef::Promised => PlainRef { id: old.id, gen: 0 },
387 XRef::Invalid => panic!()
388 };
389 let primitive = obj.to_primitive(self)?;
390 match self.changes.entry(old.id) {
391 Entry::Vacant(e) => {
392 e.insert((primitive, r.gen));
393 }
394 Entry::Occupied(mut e) => match (e.get_mut(), primitive) {
395 ((Primitive::Dictionary(ref mut dict), _), Primitive::Dictionary(new)) => {
396 dict.append(new);
397 }
398 (old, new) => {
399 *old = (new, r.gen);
400 }
401 }
402 }
403 let rc = Shared::new(obj);
404
405 Ok(RcRef::new(r, rc))
406 }
407
408 fn promise<T: Object>(&mut self) -> PromisedRef<T> {
409 let id = self.refs.len() as u64;
410
411 self.refs.push(XRef::Promised);
412
413 PromisedRef {
414 inner: PlainRef {
415 id,
416 gen: 0
417 },
418 _marker: PhantomData
419 }
420 }
421
422 fn fulfill<T: ObjectWrite>(&mut self, promise: PromisedRef<T>, obj: T) -> Result<RcRef<T>> {
423 self.update(promise.inner, obj)
424 }
425}
426
427impl<OC, SC, L> Storage<Vec<u8>, OC, SC, L>
428where
429 OC: Cache<Result<AnySync, Arc<PdfError>>>,
430 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
431 L: Log
432{
433 pub fn save(&mut self, trailer: &mut Trailer) -> Result<&[u8]> {
434 trailer.size = (self.refs.len() + 2) as _;
436 let trailer_dict = trailer.to_dict(self)?;
437
438 let xref_promise = self.promise::<Stream<XRefInfo>>();
439
440 let mut changes: Vec<_> = self.changes.iter().collect();
441 changes.sort_unstable_by_key(|&(id, _)| id);
442
443 for &(&id, &(ref primitive, gen)) in changes.iter() {
444 let pos = self.backend.len();
445 self.refs.set(id, XRef::Raw { pos: pos as _, gen_nr: gen });
446 writeln!(self.backend, "{} {} obj", id, gen)?;
447 primitive.serialize(&mut self.backend)?;
448 writeln!(self.backend, "endobj")?;
449 }
450
451 let xref_pos = self.backend.len();
452 self.refs.set(xref_promise.get_inner().id, XRef::Raw { pos: xref_pos, gen_nr: 0 });
453 let stream = self.refs.write_stream(xref_promise.get_inner().id as usize + 1)?;
455
456 writeln!(self.backend, "{} {} obj", xref_promise.get_inner().id, 0)?;
457 let mut xref_and_trailer = stream.to_pdf_stream(&mut NoUpdate)?;
458 for (k, v) in trailer_dict.iter() {
459 xref_and_trailer.info.insert(k.clone(), v.clone());
460 }
461
462 xref_and_trailer.serialize(&mut self.backend)?;
463 writeln!(self.backend, "endobj")?;
464
465 let _ = self.fulfill(xref_promise, stream)?;
466
467 write!(self.backend, "\nstartxref\n{}\n%%EOF", xref_pos).unwrap();
468
469 self.cache.clear();
471 *trailer = Trailer::from_dict(trailer_dict, &self.resolver())?;
472
473 Ok(&self.backend)
474 }
475}
476
477#[cfg(feature="cache")]
478pub type ObjectCache = Arc<SyncCache<PlainRef, Result<AnySync, Arc<PdfError>>>>;
479#[cfg(feature="cache")]
480pub type StreamCache = Arc<SyncCache<PlainRef, Result<Arc<[u8]>, Arc<PdfError>>>>;
481#[cfg(feature="cache")]
482pub type CachedFile<B> = File<B, ObjectCache, StreamCache, NoLog>;
483
484pub struct File<B, OC, SC, L> {
485 storage: Storage<B, OC, SC, L>,
486 pub trailer: Trailer,
487}
488impl<B, OC, SC, L> Updater for File<B, OC, SC, L>
489where
490 B: Backend,
491 OC: Cache<Result<AnySync, Arc<PdfError>>>,
492 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
493 L: Log,
494{
495 fn create<T: ObjectWrite>(&mut self, obj: T) -> Result<RcRef<T>> {
496 self.storage.create(obj)
497 }
498 fn update<T: ObjectWrite>(&mut self, old: PlainRef, obj: T) -> Result<RcRef<T>> {
499 self.storage.update(old, obj)
500 }
501 fn promise<T: Object>(&mut self) -> PromisedRef<T> {
502 self.storage.promise()
503 }
504 fn fulfill<T: ObjectWrite>(&mut self, promise: PromisedRef<T>, obj: T) -> Result<RcRef<T>> {
505 self.storage.fulfill(promise, obj)
506 }
507}
508
509impl<OC, SC, L> File<Vec<u8>, OC, SC, L>
510where
511 OC: Cache<Result<AnySync, Arc<PdfError>>>,
512 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
513 L: Log,
514{
515 pub fn save_to(&mut self, path: impl AsRef<Path>) -> Result<()> {
516 std::fs::write(path, self.storage.save(&mut self.trailer)?)?;
517 Ok(())
518 }
519}
520
521
522pub struct FileOptions<'a, OC, SC, L> {
523 oc: OC,
524 sc: SC,
525 log: L,
526 password: &'a [u8],
527 parse_options: ParseOptions,
528}
529impl FileOptions<'static, NoCache, NoCache, NoLog> {
530 pub fn uncached() -> Self {
531 FileOptions {
532 oc: NoCache,
533 sc: NoCache,
534 password: b"",
535 parse_options: ParseOptions::strict(),
536 log: NoLog,
537 }
538 }
539}
540
541#[cfg(feature="cache")]
542impl FileOptions<'static, ObjectCache, StreamCache, NoLog> {
543 pub fn cached() -> Self {
544 FileOptions {
545 oc: SyncCache::new(),
546 sc: SyncCache::new(),
547 password: b"",
548 parse_options: ParseOptions::strict(),
549 log: NoLog
550 }
551 }
552}
553impl<'a, OC, SC, L> FileOptions<'a, OC, SC, L>
554where
555 OC: Cache<Result<AnySync, Arc<PdfError>>>,
556 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
557 L: Log,
558{
559 pub fn password(self, password: &'a [u8]) -> FileOptions<'a, OC, SC, L> {
560 FileOptions {
561 password,
562 .. self
563 }
564 }
565 pub fn cache<O, S>(self, oc: O, sc: S) -> FileOptions<'a, O, S, L> {
566 let FileOptions { oc: _, sc: _, password, parse_options, log } = self;
567 FileOptions {
568 oc,
569 sc,
570 password,
571 parse_options,
572 log,
573 }
574 }
575 pub fn log<Log>(self, log: Log) -> FileOptions<'a, OC, SC, Log> {
576 let FileOptions { oc, sc, password, parse_options, .. } = self;
577 FileOptions {
578 oc,
579 sc,
580 password,
581 parse_options,
582 log,
583 }
584 }
585 pub fn parse_options(self, parse_options: ParseOptions) -> Self {
586 FileOptions { parse_options, .. self }
587 }
588
589 pub fn open(self, path: impl AsRef<Path>) -> Result<File<Vec<u8>, OC, SC, L>> {
591 let data = std::fs::read(path)?;
592 self.load(data)
593 }
594 pub fn storage(self) -> Storage<Vec<u8>, OC, SC, L> {
595 let FileOptions { oc, sc, log, .. } = self;
596 Storage::empty(oc, sc, log)
597 }
598
599 pub fn load<B: Backend>(self, backend: B) -> Result<File<B, OC, SC, L>> {
601 let FileOptions { oc, sc, password, parse_options, log } = self;
602 File::load_data(backend, password, parse_options, oc, sc, log)
603 }
604}
605
606
607impl<B, OC, SC, L> File<B, OC, SC, L>
608where
609 B: Backend,
610 OC: Cache<Result<AnySync, Arc<PdfError>>>,
611 SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
612 L: Log,
613{
614 fn load_data(backend: B, password: &[u8], options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result<Self> {
615 let mut storage = Storage::with_cache(backend, options, object_cache, stream_cache, log)?;
616 let trailer = storage.load_storage_and_trailer_password(password)?;
617
618 let resolver = StorageResolver::new(&storage);
619 let trailer = t!(Trailer::from_primitive(
620 Primitive::Dictionary(trailer),
621 &resolver,
622 ));
623 Ok(File { storage, trailer })
624 }
625 pub fn new(storage: Storage<B, OC, SC, L>, trailer: Trailer) -> Self {
626 File { storage, trailer }
627 }
628 pub fn resolver(&self) -> impl Resolve + '_ {
629 StorageResolver::new(&self.storage)
630 }
631
632 pub fn get_root(&self) -> &Catalog {
633 &self.trailer.root
634 }
635
636 pub fn pages(&self) -> impl Iterator<Item=Result<PageRc>> + '_ {
637 (0 .. self.num_pages()).map(move |n| self.get_page(n))
638 }
639 pub fn num_pages(&self) -> u32 {
640 self.trailer.root.pages.count
641 }
642
643 pub fn get_page(&self, n: u32) -> Result<PageRc> {
644 let resolver = StorageResolver::new(&self.storage);
645 self.trailer.root.pages.page(&resolver, n)
646 }
647
648 pub fn update_catalog(&mut self, catalog: Catalog) -> Result<()> {
649 self.trailer.root = self.create(catalog)?;
650 Ok(())
651 }
652
653 pub fn set_options(&mut self, options: ParseOptions) {
654 self.storage.options = options;
655 }
656
657 pub fn scan(&self) -> impl Iterator<Item = Result<ScanItem>> + '_ {
658 self.storage.scan()
659 }
660
661 pub fn log(&self) -> &L {
662 &self.storage.log
663 }
664
665 pub fn version(&self) -> Result<String> {
668 self.storage.version()
669 }
670}
671
672#[derive(Object, ObjectWrite, DataSize)]
673pub struct Trailer {
674 #[pdf(key = "Size")]
675 pub size: i32,
676
677 #[pdf(key = "Prev")]
678 pub prev_trailer_pos: Option<i32>,
679
680 #[pdf(key = "Root")]
681 pub root: RcRef<Catalog>,
682
683 #[pdf(key = "Encrypt")]
684 pub encrypt_dict: Option<RcRef<CryptDict>>,
685
686 #[pdf(key = "Info", indirect)]
687 pub info_dict: Option<InfoDict>,
688
689 #[pdf(key = "ID")]
690 pub id: Vec<PdfString>,
691}
692
693