pdf/
file.rs

1//! This is kind of the entry-point of the type-safe PDF functionality.
2use std::marker::PhantomData;
3use std::collections::HashMap;
4use std::sync::{Arc, Mutex};
5use std::path::Path;
6use std::io::Write;
7
8use crate as pdf;
9use crate::error::*;
10use crate::object::*;
11use crate::primitive::{Primitive, Dictionary, PdfString};
12use crate::backend::Backend;
13use crate::any::*;
14use crate::parser::{Lexer, parse_with_lexer};
15use crate::parser::{parse_indirect_object, parse, ParseFlags};
16use crate::xref::{XRef, XRefTable, XRefInfo};
17use crate::crypt::Decoder;
18use crate::crypt::CryptDict;
19use crate::enc::{StreamFilter, decode};
20use std::ops::Range;
21use datasize::DataSize;
22
23#[cfg(feature="cache")]
24pub use globalcache::{ValueSize, sync::SyncCache};
25
26#[must_use]
27pub struct PromisedRef<T> {
28    inner:      PlainRef,
29    _marker:    PhantomData<T>
30}
31impl<T> PromisedRef<T> {
32    pub fn get_inner(&self) -> PlainRef {
33        self.inner
34    }
35    pub fn get_ref(&self) -> Ref<T> {
36        Ref::new(self.inner)
37    }
38}
39
40pub trait Cache<T: Clone> {
41    fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T;
42    fn clear(&self);
43}
44pub struct NoCache;
45impl<T: Clone> Cache<T> for NoCache {
46    fn get_or_compute(&self, _key: PlainRef, compute: impl FnOnce() -> T) -> T {
47        compute()
48    }
49    fn clear(&self) {}
50}
51
52#[cfg(feature="cache")]
53impl<T: Clone + ValueSize + Send + 'static> Cache<T> for Arc<SyncCache<PlainRef, T>> {
54    fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T {
55        self.get(key, compute)
56    }
57    fn clear(&self) {
58        (**self).clear()
59    }
60}
61
62pub trait Log {
63    fn load_object(&self, _r: PlainRef) {}
64    fn log_get(&self, _r: PlainRef) {}
65}
66pub struct NoLog;
67impl Log for NoLog {}
68
69pub struct Storage<B, OC, SC, L> {
70    // objects identical to those in the backend
71    cache: OC,
72    stream_cache: SC,
73
74    // objects that differ from the backend
75    changes:    HashMap<ObjNr, (Primitive, GenNr)>,
76
77    refs:       XRefTable,
78
79    decoder:    Option<Decoder>,
80    options:    ParseOptions,
81
82    backend:    B,
83
84    // Position of the PDF header in the file.
85    start_offset: usize,
86
87    log: L
88}
89
90impl<OC, SC, L> Storage<Vec<u8>, OC, SC, L>
91where
92    OC: Cache<Result<AnySync, Arc<PdfError>>>,
93    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
94    L: Log,
95{
96    pub fn empty(object_cache: OC, stream_cache: SC, log: L) -> Self {
97        Storage {
98            cache: object_cache,
99            stream_cache,
100            changes: HashMap::new(),
101            refs: XRefTable::new(0),
102            decoder: None,
103            options: ParseOptions::strict(),
104            backend: Vec::from(&b"%PDF-1.7\n"[..]),
105            start_offset: 0,
106            log
107        }
108    }
109}
110
111impl<B, OC, SC, L> Storage<B, OC, SC, L>
112where
113    B: Backend,
114    OC: Cache<Result<AnySync, Arc<PdfError>>>,
115    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
116    L: Log,
117{
118    pub fn into_inner(self) -> B {
119        self.backend
120    }
121    pub fn version(&self) -> Result<String> {
122        Ok(String::from_utf8(self.backend.read(self.start_offset+1..self.start_offset+8)?.to_owned())?)
123    }
124    pub fn resolver(&self) -> impl Resolve + '_ {
125        StorageResolver::new(self)
126    }
127    pub fn with_cache(backend: B, options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result<Self> {
128        let start_offset = backend.locate_start_offset()?;
129
130        Ok(Storage {
131            start_offset,
132            backend,
133            refs: XRefTable::new(0),
134            cache: object_cache,
135            stream_cache,
136            changes: HashMap::new(),
137            decoder: None,
138            options,
139            log
140        })
141    }
142    fn decode(&self, id: PlainRef, range: Range<usize>, filters: &[StreamFilter]) -> Result<Arc<[u8]>> {
143        let data = self.backend.read(range)?;
144
145        let mut data = Vec::from(data);
146        if let Some(ref decoder) = self.decoder {
147            data = Vec::from(t!(decoder.decrypt(id, &mut data)));
148        }
149        for filter in filters {
150            data = t!(decode(&data, filter), filter);
151        }
152        Ok(data.into())
153    }
154
155    pub fn load_storage_and_trailer(&mut self) -> Result<Dictionary> {
156        self.load_storage_and_trailer_password(b"")
157    }
158
159    pub fn load_storage_and_trailer_password(&mut self, password: &[u8]) -> Result<Dictionary> {
160
161        let resolver = StorageResolver::new(self);
162        let (refs, trailer) = t!(self.backend.read_xref_table_and_trailer(self.start_offset, &resolver));
163        self.refs = refs;
164
165        if let Some(crypt) = trailer.get("Encrypt") {
166            let key = trailer
167                .get("ID")
168                .ok_or(PdfError::MissingEntry {
169                    typ: "Trailer",
170                    field: "ID".into(),
171                })?
172                .as_array()?
173                .get(0)
174                .ok_or(PdfError::MissingEntry {
175                    typ: "Trailer",
176                    field: "ID[0]".into()
177                })?
178                .as_string()?
179                .as_bytes();
180
181            let resolver = StorageResolver::new(self);
182            let dict = CryptDict::from_primitive(crypt.clone(), &resolver)?;
183
184            self.decoder = Some(t!(Decoder::from_password(&dict, key, password)));
185            if let Primitive::Reference(reference) = crypt {
186                self.decoder.as_mut().unwrap().encrypt_indirect_object = Some(*reference);
187            }
188            if let Some(Primitive::Reference(catalog_ref)) = trailer.get("Root") {
189                let resolver = StorageResolver::new(self);
190                let catalog = t!(t!(resolver.resolve(*catalog_ref)).resolve(&resolver)?.into_dictionary());
191                if let Some(Primitive::Reference(metadata_ref)) = catalog.get("Metadata") {
192                    self.decoder.as_mut().unwrap().metadata_indirect_object = Some(*metadata_ref);
193                }
194            }
195        }
196        Ok(trailer)
197    }
198    pub fn scan(&self) -> impl Iterator<Item = Result<ScanItem>> + '_ {
199        let xref_offset = self.backend.locate_xref_offset().unwrap();
200        let slice = self.backend.read(self.start_offset .. xref_offset).unwrap();
201        let mut lexer = Lexer::with_offset(slice, 0);
202        
203        fn skip_xref(lexer: &mut Lexer) -> Result<()> {
204            while lexer.next()? != "trailer" {
205
206            }
207            Ok(())
208        }
209
210        let resolver = StorageResolver::new(self);
211        std::iter::from_fn(move || {
212            loop {
213                let pos = lexer.get_pos();
214                match parse_indirect_object(&mut lexer, &resolver, self.decoder.as_ref(), ParseFlags::all()) {
215                    Ok((r, p)) => return Some(Ok(ScanItem::Object(r, p))),
216                    Err(e) if e.is_eof() => return None,
217                    Err(e) => {
218                        lexer.set_pos(pos);
219                        if let Ok(s) = lexer.next() {
220                            debug!("next: {:?}", String::from_utf8_lossy(s.as_slice()));
221                            match &*s {
222                                b"xref" => {
223                                    if let Err(e) = skip_xref(&mut lexer) {
224                                        return Some(Err(e));
225                                    }
226                                    if let Ok(trailer) = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::DICT).and_then(|p| p.into_dictionary()) {
227                                        return Some(Ok(ScanItem::Trailer(trailer)));
228                                    }
229                                }
230                                b"startxref" if lexer.next().is_ok() => {
231                                    continue;
232                                }
233                                _ => {}
234                            }
235                        }
236                        return Some(Err(e));
237                    }
238                }
239            }
240        })
241    }
242    fn resolve_ref(&self, r: PlainRef, flags: ParseFlags, resolve: &impl Resolve) -> Result<Primitive> {
243        match self.changes.get(&r.id) {
244            Some((p, _)) => Ok((*p).clone()),
245            None => match t!(self.refs.get(r.id)) {
246                XRef::Raw {pos, ..} => {
247                    let mut lexer = Lexer::with_offset(t!(self.backend.read(self.start_offset + pos ..)), self.start_offset + pos);
248                    let p = t!(parse_indirect_object(&mut lexer, resolve, self.decoder.as_ref(), flags)).1;
249                    Ok(p)
250                }
251                XRef::Stream {stream_id, index} => {
252                    if !flags.contains(ParseFlags::STREAM) {
253                        return Err(PdfError::PrimitiveNotAllowed { found: ParseFlags::STREAM, allowed: flags });
254                    }
255                    // use get to cache the object stream
256                    let obj_stream = resolve.get::<ObjectStream>(Ref::from_id(stream_id))?;
257
258                    let (data, range) = t!(obj_stream.get_object_slice(index, resolve));
259                    let slice = data.get(range.clone()).ok_or_else(|| other!("invalid range {:?}, but only have {} bytes", range, data.len()))?;
260                    parse(slice, resolve, flags)
261                }
262                XRef::Free {..} => err!(PdfError::FreeObject {obj_nr: r.id}),
263                XRef::Promised => unimplemented!(),
264                XRef::Invalid => err!(PdfError::NullRef {obj_nr: r.id}),
265            }
266        }
267    }
268}
269
270pub enum ScanItem {
271    Object(PlainRef, Primitive),
272    Trailer(Dictionary)
273}
274
275struct StorageResolver<'a, B, OC, SC, L> {
276    storage: &'a Storage<B, OC, SC, L>,
277    chain: Mutex<Vec<PlainRef>>,
278}
279impl<'a, B, OC, SC, L> StorageResolver<'a, B, OC, SC, L> {
280    pub fn new(storage: &'a Storage<B, OC, SC, L>) -> Self {
281        StorageResolver {
282            storage,
283            chain: Mutex::new(vec![])
284        }
285    }
286}
287
288struct Defer<F: FnMut()>(F);
289impl<F: FnMut()> Drop for Defer<F> {
290    fn drop(&mut self) {
291        (self.0)();
292    }
293}
294
295impl<'a, B, OC, SC, L> Resolve for StorageResolver<'a, B, OC, SC, L>
296where
297    B: Backend,
298    OC: Cache<Result<AnySync, Arc<PdfError>>>,
299    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
300    L: Log
301{
302    fn resolve_flags(&self, r: PlainRef, flags: ParseFlags, _depth: usize) -> Result<Primitive> {
303        let storage = self.storage;
304        storage.log.load_object(r);
305
306        storage.resolve_ref(r, flags, self)
307    }
308
309    fn get<T: Object+DataSize>(&self, r: Ref<T>) -> Result<RcRef<T>> {
310        let key = r.get_inner();
311        self.storage.log.log_get(key);
312        
313        {
314            debug!("get {key:?} as {}", std::any::type_name::<T>());
315            let mut chain = self.chain.lock().unwrap();
316            if chain.contains(&key) {
317                bail!("Recursive reference");
318            }
319            chain.push(key);
320        }
321        let _defer = Defer(|| {
322            let mut chain = self.chain.lock().unwrap();
323            assert_eq!(chain.pop(), Some(key));
324        });
325        
326        let res = self.storage.cache.get_or_compute(key, || {
327            match self.resolve(key).and_then(|p| T::from_primitive(p, self)) {
328                Ok(obj) => Ok(AnySync::new(Shared::new(obj))),
329                Err(e) => {
330                    let p = self.resolve(key);
331                    warn!("failed to decode {p:?} as {}", std::any::type_name::<T>());
332                    Err(Arc::new(e))
333                }
334            }
335        });
336        match res {
337            Ok(any) => {
338                match any.downcast() {
339                    Ok(val) => Ok(RcRef::new(key, val)),
340                    Err(_) => {
341                        let p = self.resolve(key)?;
342                        Ok(RcRef::new(key, T::from_primitive(p, self)?.into()))
343                    }
344                }
345            }
346            Err(e) => Err(PdfError::Shared { source: e.clone()}),
347        }
348    }
349    fn options(&self) -> &ParseOptions {
350        &self.storage.options
351    }
352    fn stream_data(&self, id: PlainRef, range: Range<usize>) -> Result<Arc<[u8]>> {
353        self.storage.decode(id, range, &[])
354    }
355
356    fn get_data_or_decode(&self, id: PlainRef, range: Range<usize>, filters: &[StreamFilter]) -> Result<Arc<[u8]>> {
357        self.storage.stream_cache.get_or_compute(id, || self.storage.decode(id, range, filters).map_err(Arc::new))
358        .map_err(|e| e.into())
359    }
360}
361
362impl<B, OC, SC, L> Updater for Storage<B, OC, SC, L>
363where
364    B: Backend,
365    OC: Cache<Result<AnySync, Arc<PdfError>>>,
366    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
367    L: Log,
368{
369    fn create<T: ObjectWrite>(&mut self, obj: T) -> Result<RcRef<T>> {
370        let id = self.refs.len() as u64;
371        self.refs.push(XRef::Promised);
372        let primitive = obj.to_primitive(self)?;
373        self.changes.insert(id, (primitive, 0));
374        let rc = Shared::new(obj);
375        let r = PlainRef { id, gen: 0 };
376        
377        Ok(RcRef::new(r, rc))
378    }
379    fn update<T: ObjectWrite>(&mut self, old: PlainRef, obj: T) -> Result<RcRef<T>> {
380        use std::collections::hash_map::Entry;
381
382        let r = match self.refs.get(old.id)? {
383            XRef::Free { .. } => panic!(),
384            XRef::Raw { gen_nr, .. } => PlainRef { id: old.id, gen: gen_nr },
385            XRef::Stream { .. } => return self.create(obj),
386            XRef::Promised => PlainRef { id: old.id, gen: 0 },
387            XRef::Invalid => panic!()
388        };
389        let primitive = obj.to_primitive(self)?;
390        match self.changes.entry(old.id) {
391            Entry::Vacant(e) => {
392                e.insert((primitive, r.gen));
393            }
394            Entry::Occupied(mut e) => match (e.get_mut(), primitive) {
395                ((Primitive::Dictionary(ref mut dict), _), Primitive::Dictionary(new)) => {
396                    dict.append(new);
397                }
398                (old, new) => {
399                    *old = (new, r.gen);
400                }
401            }
402        }
403        let rc = Shared::new(obj);
404        
405        Ok(RcRef::new(r, rc))
406    }
407
408    fn promise<T: Object>(&mut self) -> PromisedRef<T> {
409        let id = self.refs.len() as u64;
410        
411        self.refs.push(XRef::Promised);
412        
413        PromisedRef {
414            inner: PlainRef {
415                id,
416                gen: 0
417            },
418            _marker:    PhantomData
419        }
420    }
421    
422    fn fulfill<T: ObjectWrite>(&mut self, promise: PromisedRef<T>, obj: T) -> Result<RcRef<T>> {
423        self.update(promise.inner, obj)
424    }
425}
426
427impl<OC, SC, L> Storage<Vec<u8>, OC, SC, L>
428where
429    OC: Cache<Result<AnySync, Arc<PdfError>>>,
430    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
431    L: Log
432{
433    pub fn save(&mut self, trailer: &mut Trailer) -> Result<&[u8]> {
434        // writing the trailer generates another id for the info dictionary
435        trailer.size = (self.refs.len() + 2) as _;
436        let trailer_dict = trailer.to_dict(self)?;
437        
438        let xref_promise = self.promise::<Stream<XRefInfo>>();
439
440        let mut changes: Vec<_> = self.changes.iter().collect();
441        changes.sort_unstable_by_key(|&(id, _)| id);
442
443        for &(&id, &(ref primitive, gen)) in changes.iter() {
444            let pos = self.backend.len();
445            self.refs.set(id, XRef::Raw { pos: pos as _, gen_nr: gen });
446            writeln!(self.backend, "{} {} obj", id, gen)?;
447            primitive.serialize(&mut self.backend)?;
448            writeln!(self.backend, "endobj")?;
449        }
450
451        let xref_pos = self.backend.len();
452        self.refs.set(xref_promise.get_inner().id, XRef::Raw { pos: xref_pos, gen_nr: 0 });
453        // only write up to the xref stream obj id
454        let stream = self.refs.write_stream(xref_promise.get_inner().id as usize + 1)?;
455
456        writeln!(self.backend, "{} {} obj", xref_promise.get_inner().id, 0)?;
457        let mut xref_and_trailer = stream.to_pdf_stream(&mut NoUpdate)?;
458        for (k, v) in trailer_dict.iter() {
459            xref_and_trailer.info.insert(k.clone(), v.clone());
460        }
461
462        xref_and_trailer.serialize(&mut self.backend)?;
463        writeln!(self.backend, "endobj")?;
464
465        let _ = self.fulfill(xref_promise, stream)?;
466
467        write!(self.backend, "\nstartxref\n{}\n%%EOF", xref_pos).unwrap();
468
469        // update trailer which may have change now.
470        self.cache.clear();
471        *trailer = Trailer::from_dict(trailer_dict, &self.resolver())?;
472
473        Ok(&self.backend)
474    }
475}
476
477#[cfg(feature="cache")]
478pub type ObjectCache = Arc<SyncCache<PlainRef, Result<AnySync, Arc<PdfError>>>>;
479#[cfg(feature="cache")]
480pub type StreamCache = Arc<SyncCache<PlainRef, Result<Arc<[u8]>, Arc<PdfError>>>>;
481#[cfg(feature="cache")]
482pub type CachedFile<B> = File<B, ObjectCache, StreamCache, NoLog>;
483
484pub struct File<B, OC, SC, L> {
485    storage:        Storage<B, OC, SC, L>,
486    pub trailer:    Trailer,
487}
488impl<B, OC, SC, L> Updater for File<B, OC, SC, L>
489where
490    B: Backend,
491    OC: Cache<Result<AnySync, Arc<PdfError>>>,
492    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
493    L: Log,
494{
495    fn create<T: ObjectWrite>(&mut self, obj: T) -> Result<RcRef<T>> {
496        self.storage.create(obj)
497    }
498    fn update<T: ObjectWrite>(&mut self, old: PlainRef, obj: T) -> Result<RcRef<T>> {
499        self.storage.update(old, obj)
500    }
501    fn promise<T: Object>(&mut self) -> PromisedRef<T> {
502        self.storage.promise()
503    }
504    fn fulfill<T: ObjectWrite>(&mut self, promise: PromisedRef<T>, obj: T) -> Result<RcRef<T>> {
505        self.storage.fulfill(promise, obj)
506    }
507}
508
509impl<OC, SC, L> File<Vec<u8>, OC, SC, L>
510where
511    OC: Cache<Result<AnySync, Arc<PdfError>>>,
512    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
513    L: Log,
514{
515    pub fn save_to(&mut self, path: impl AsRef<Path>) -> Result<()> {
516        std::fs::write(path, self.storage.save(&mut self.trailer)?)?;
517        Ok(())
518    }
519    
520    pub fn save(&mut self) -> Result<Vec<u8>> {
521        Ok(self.storage.save(&mut self.trailer)?.to_vec())
522    }
523}
524
525
526pub struct FileOptions<'a, OC, SC, L> {
527    oc: OC,
528    sc: SC,
529    log: L,
530    password: &'a [u8],
531    parse_options: ParseOptions,
532}
533impl FileOptions<'static, NoCache, NoCache, NoLog> {
534    pub fn uncached() -> Self {
535        FileOptions {
536            oc: NoCache,
537            sc: NoCache,
538            password: b"",
539            parse_options: ParseOptions::strict(),
540            log: NoLog,
541        }
542    }
543}
544
545#[cfg(feature="cache")]
546impl FileOptions<'static, ObjectCache, StreamCache, NoLog> {
547    pub fn cached() -> Self {
548        FileOptions {
549            oc: SyncCache::new(),
550            sc: SyncCache::new(),
551            password: b"",
552            parse_options: ParseOptions::strict(),
553            log: NoLog
554        }
555    }
556}
557impl<'a, OC, SC, L> FileOptions<'a, OC, SC, L>
558where
559    OC: Cache<Result<AnySync, Arc<PdfError>>>,
560    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
561    L: Log,
562{
563    pub fn password(self, password: &'a [u8]) -> FileOptions<'a, OC, SC, L> {
564        FileOptions {
565            password,
566            .. self
567        }
568    }
569    pub fn cache<O, S>(self, oc: O, sc: S) -> FileOptions<'a, O, S, L> {
570        let FileOptions { oc: _, sc: _, password, parse_options, log } = self;
571        FileOptions {
572            oc,
573            sc,
574            password,
575            parse_options,
576            log,
577        }
578    }
579    pub fn log<Log>(self, log: Log) -> FileOptions<'a, OC, SC, Log> {
580        let FileOptions { oc, sc, password, parse_options, .. } = self;
581        FileOptions {
582            oc,
583            sc,
584            password,
585            parse_options,
586            log,
587        }
588    }
589    pub fn parse_options(self, parse_options: ParseOptions) -> Self {
590        FileOptions { parse_options, .. self }
591    }
592
593    /// open a file
594    pub fn open(self, path: impl AsRef<Path>) -> Result<File<Vec<u8>, OC, SC, L>> {
595        let data = std::fs::read(path)?;
596        self.load(data)
597    }
598    pub fn storage(self) -> Storage<Vec<u8>, OC, SC, L> {
599        let FileOptions { oc, sc, log, .. } = self;
600        Storage::empty(oc, sc, log)
601    }
602
603    /// load data from the given backend
604    pub fn load<B: Backend>(self, backend: B) -> Result<File<B, OC, SC, L>> {
605        let FileOptions { oc, sc, password, parse_options, log } = self;
606        File::load_data(backend, password, parse_options, oc, sc, log)
607    }
608}
609
610
611impl<B, OC, SC, L> File<B, OC, SC, L>
612where
613    B: Backend,
614    OC: Cache<Result<AnySync, Arc<PdfError>>>,
615    SC: Cache<Result<Arc<[u8]>, Arc<PdfError>>>,
616    L: Log,
617{
618    fn load_data(backend: B, password: &[u8], options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result<Self> {
619        let mut storage = Storage::with_cache(backend, options, object_cache, stream_cache, log)?;
620        let trailer = storage.load_storage_and_trailer_password(password)?;
621
622        let resolver = StorageResolver::new(&storage);
623        let trailer = t!(Trailer::from_primitive(
624            Primitive::Dictionary(trailer),
625            &resolver,
626        ));
627        Ok(File { storage, trailer })
628    }
629    pub fn new(storage: Storage<B, OC, SC, L>, trailer: Trailer) -> Self {
630        File { storage, trailer }
631    }
632    pub fn resolver(&self) -> impl Resolve + '_ {
633        StorageResolver::new(&self.storage)
634    }
635
636    pub fn get_root(&self) -> &Catalog {
637        &self.trailer.root
638    }
639
640    pub fn pages(&self) -> impl Iterator<Item=Result<PageRc>> + '_ {
641        (0 .. self.num_pages()).map(move |n| self.get_page(n))
642    }
643    pub fn num_pages(&self) -> u32 {
644        self.trailer.root.pages.count
645    }
646
647    pub fn get_page(&self, n: u32) -> Result<PageRc> {
648        let resolver = StorageResolver::new(&self.storage);
649        self.trailer.root.pages.page(&resolver, n)
650    }
651
652    pub fn update_catalog(&mut self, catalog: Catalog) -> Result<()> {
653        self.trailer.root = self.create(catalog)?;
654        Ok(())
655    }
656
657    pub fn set_options(&mut self, options: ParseOptions) {
658        self.storage.options = options;
659    }
660
661    pub fn scan(&self) -> impl Iterator<Item = Result<ScanItem>> + '_ {
662        self.storage.scan()
663    }
664
665    pub fn log(&self) -> &L {
666        &self.storage.log
667    }
668
669    /// the version string in the file header.
670    /// if the version field in the Catalog is set, this should be used instead.
671    pub fn version(&self) -> Result<String> {
672        self.storage.version()
673    }
674}
675
676#[derive(Object, ObjectWrite, DataSize)]
677pub struct Trailer {
678    #[pdf(key = "Size")]
679    pub size:               i32,
680
681    #[pdf(key = "Prev")]
682    pub prev_trailer_pos:   Option<i32>,
683
684    #[pdf(key = "Root")]
685    pub root:               RcRef<Catalog>,
686
687    #[pdf(key = "Encrypt")]
688    pub encrypt_dict:       Option<RcRef<CryptDict>>,
689
690    #[pdf(key = "Info", indirect)]
691    pub info_dict:          Option<InfoDict>,
692
693    #[pdf(key = "ID")]
694    pub id:                 Vec<PdfString>,
695}
696
697/*
698pub struct XRefStream {
699    pub data: Vec<u8>,
700    pub info: XRefInfo,
701}
702
703impl Object for XRefStream {
704    fn serialize<W: io::Write>(&self, _out: &mut W) -> io::Result<()> {
705        unimplemented!();
706    }
707    fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self> {
708        let stream = p.to_stream(resolve)?;
709        let info = XRefInfo::from_primitive(Primitive::Dictionary (stream.info), resolve)?;
710        let data = stream.data.clone();
711        Ok(XRefStream {
712            data: data,
713            info: info,
714        })
715    }
716}
717*/