metaemu_state/
paged.rs

1use fugue::ir::{Address, AddressSpace};
2
3use std::mem::take;
4use std::ops::Range;
5use std::sync::Arc;
6
7use iset::IntervalMap;
8use thiserror::Error;
9use ustr::Ustr;
10
11use crate::chunked::{self, ChunkState};
12use crate::flat::{self, FlatState};
13use crate::traits::{State, StateOps, StateValue};
14
15#[derive(Debug, Error, Clone)]
16pub enum Error {
17    #[error("unmapped virtual address at {address}")]
18    UnmappedAddress { address: Address, size: usize },
19    #[error("overlapped access from {address} byte access at {size}")]
20    OverlappedAccess { address: Address, size: usize },
21    #[error("overlapped mapping of {size} bytes from {address}")]
22    OverlappedMapping { address: Address, size: usize },
23    #[error(transparent)]
24    Backing(flat::Error),
25    #[error(transparent)]
26    Chunked(chunked::Error),
27}
28
29impl Error {
30    pub fn access(&self) -> (Address, usize) {
31        match self {
32            Self::UnmappedAddress { address, size }
33            | Self::OverlappedAccess { address, size }
34            | Self::OverlappedMapping { address, size } => (*address, *size),
35            Self::Backing(
36                flat::Error::OOBRead { address, size } | flat::Error::OOBWrite { address, size },
37            ) => (*address, *size),
38            Self::Backing(flat::Error::AccessViolation { address, size, .. }) => {
39                (address.into(), *size)
40            }
41            Self::Chunked(
42                chunked::Error::Backing(
43                    flat::Error::OOBRead { address, size }
44                    | flat::Error::OOBWrite { address, size },
45                )
46                | chunked::Error::AccessUnmanaged { address, size }
47                | chunked::Error::HeapOverflow { address, size },
48            ) => (*address, *size),
49            Self::Chunked(chunked::Error::Backing(flat::Error::AccessViolation {
50                address,
51                size,
52                ..
53            })) => (address.into(), *size),
54            _ => panic!("error is not an access violation"),
55        }
56    }
57
58    fn backing(base: Address, e: flat::Error) -> Self {
59        Self::Backing(match e {
60            flat::Error::OOBRead { address, size } => flat::Error::OOBRead {
61                address: address + base,
62                size,
63            },
64            flat::Error::OOBWrite { address, size } => flat::Error::OOBWrite {
65                address: address + base,
66                size,
67            },
68            flat::Error::AccessViolation {
69                address,
70                access,
71                size,
72            } => flat::Error::AccessViolation {
73                address: address + base,
74                access,
75                size,
76            },
77        })
78    }
79}
80
81#[derive(Debug, Clone)]
82pub enum Segment<T: StateValue> {
83    Static { name: Ustr, offset: usize },
84    Mapping { name: Ustr, backing: ChunkState<T> },
85    StaticMapping { name: Ustr, backing: FlatState<T> },
86}
87
88#[derive(Debug, Clone)]
89pub enum MappingRef<'a, T: StateValue> {
90    Dynamic(&'a ChunkState<T>),
91    Static(&'a FlatState<T>),
92}
93
94#[derive(Debug, Clone)]
95pub enum MappingMut<'a, T: StateValue> {
96    Dynamic(&'a ChunkState<T>),
97    Static(&'a FlatState<T>),
98}
99
100impl<T: StateValue> Segment<T> {
101    pub fn new<S: AsRef<str>>(name: S, offset: usize) -> Self {
102        Self::Static {
103            name: Ustr::from(name.as_ref()),
104            offset,
105        }
106    }
107
108    pub fn mapping<S: AsRef<str>>(name: S, mapping: ChunkState<T>) -> Self {
109        Self::Mapping {
110            name: Ustr::from(name.as_ref()),
111            backing: mapping,
112        }
113    }
114
115    pub fn static_mapping<S: AsRef<str>>(name: S, mapping: FlatState<T>) -> Self {
116        Self::StaticMapping {
117            name: Ustr::from(name.as_ref()),
118            backing: mapping,
119        }
120    }
121
122    pub fn is_static(&self) -> bool {
123        matches!(self, Self::Static { .. })
124    }
125
126    pub fn is_mapping(&self) -> bool {
127        matches!(self, Self::Mapping { .. })
128    }
129
130    pub fn as_mapping(&self) -> Option<MappingRef<T>> {
131        match self {
132            Self::Mapping { ref backing, .. } => Some(MappingRef::Dynamic(backing)),
133            Self::StaticMapping { ref backing, .. } => Some(MappingRef::Static(backing)),
134            _ => None,
135        }
136    }
137
138    pub fn as_mapping_mut(&mut self) -> Option<MappingMut<T>> {
139        match self {
140            Self::Mapping {
141                ref mut backing, ..
142            } => Some(MappingMut::Dynamic(backing)),
143            Self::StaticMapping {
144                ref mut backing, ..
145            } => Some(MappingMut::Static(backing)),
146            _ => None,
147        }
148    }
149
150    pub fn name(&self) -> &str {
151        match self {
152            Self::Static { name, .. }
153            | Self::Mapping { name, .. }
154            | Self::StaticMapping { name, .. } => name,
155        }
156    }
157
158    pub fn fork(&self) -> Self {
159        match self {
160            Self::Static { .. } => self.clone(),
161            Self::Mapping { name, backing } => Self::Mapping {
162                name: name.clone(),
163                backing: backing.fork(),
164            },
165            Self::StaticMapping { name, backing } => Self::StaticMapping {
166                name: name.clone(),
167                backing: backing.fork(),
168            },
169        }
170    }
171
172    pub fn restore(&mut self, other: &Self) {
173        match (self, other) {
174            (
175                Self::Static { name, offset },
176                Self::Static {
177                    name: rname,
178                    offset: roffset,
179                },
180            ) => {
181                if name != rname || offset != roffset {
182                    panic!("attempting to restore segment `{}` at {} from incompatible segment `{}` at {}",
183                           name,
184                           offset,
185                           rname,
186                           roffset
187                    );
188                }
189            }
190            (
191                Self::Mapping { name, backing },
192                Self::Mapping {
193                    name: rname,
194                    backing: rbacking,
195                },
196            ) => {
197                if name != rname
198                    || backing.base_address() != rbacking.base_address()
199                    || backing.len() != rbacking.len()
200                {
201                    panic!("attempting to restore segment `{}` at {} of size {} from incompatible segment `{}` at {} of size {}",
202                           name,
203                           backing.base_address(),
204                           backing.len(),
205                           rname,
206                           rbacking.base_address(),
207                           rbacking.len(),
208                    );
209                }
210
211                backing.restore(rbacking);
212            }
213            (
214                Self::StaticMapping { name, backing },
215                Self::StaticMapping {
216                    name: rname,
217                    backing: rbacking,
218                },
219            ) => {
220                if name != rname || backing.len() != rbacking.len() {
221                    panic!("attempting to restore segment `{}` of size {} from incompatible segment `{}` of size {}",
222                           name,
223                           backing.len(),
224                           rname,
225                           rbacking.len(),
226                    );
227                }
228
229                backing.restore(rbacking);
230            }
231            (slf, oth) => panic!(
232                "attempting to restore segment `{}` from segment `{}` which have different kinds",
233                slf.name(),
234                oth.name()
235            ),
236        }
237    }
238}
239
240#[derive(Debug, Clone)]
241pub struct PagedState<T: StateValue> {
242    segments: IntervalMap<Address, Segment<T>>,
243    inner: FlatState<T>,
244}
245
246impl<T: StateValue> AsRef<Self> for PagedState<T> {
247    #[inline(always)]
248    fn as_ref(&self) -> &Self {
249        self
250    }
251}
252
253impl<T: StateValue> AsMut<Self> for PagedState<T> {
254    #[inline(always)]
255    fn as_mut(&mut self) -> &mut Self {
256        self
257    }
258}
259
260impl<T: StateValue> PagedState<T> {
261    pub fn new(
262        mapping: impl IntoIterator<Item = (Range<Address>, Segment<T>)>,
263        space: Arc<AddressSpace>,
264        size: usize,
265    ) -> Self {
266        Self::from_parts(mapping, FlatState::new(space, size))
267    }
268
269    pub fn from_parts(
270        mapping: impl IntoIterator<Item = (Range<Address>, Segment<T>)>,
271        backing: FlatState<T>,
272    ) -> Self {
273        Self {
274            segments: IntervalMap::from_iter(mapping.into_iter().map(|(r, s)| (r.start..r.end, s))),
275            inner: backing,
276        }
277    }
278
279    pub fn static_mapping<S, A>(
280        &mut self,
281        name: S,
282        base_address: A,
283        size: usize,
284    ) -> Result<(), Error>
285    where
286        S: AsRef<str>,
287        A: Into<Address>,
288    {
289        let base_address = base_address.into();
290        let range = base_address..base_address + size; // TODO: error for zero-size
291
292        if self.segments.has_overlap(range.clone()) {
293            return Err(Error::OverlappedMapping {
294                address: base_address,
295                size,
296            });
297        }
298
299        self.segments.insert(
300            range,
301            Segment::static_mapping(name, FlatState::new(self.inner.address_space(), size)),
302        );
303        Ok(())
304    }
305
306    pub fn mapping<S, A>(&mut self, name: S, base_address: A, size: usize) -> Result<(), Error>
307    where
308        S: AsRef<str>,
309        A: Into<Address>,
310    {
311        let base_address = base_address.into();
312        let range = base_address..base_address + size; // TODO: error for zero-size
313
314        if self.segments.has_overlap(range.clone()) {
315            return Err(Error::OverlappedMapping {
316                address: base_address,
317                size,
318            });
319        }
320
321        self.segments.insert(
322            range,
323            Segment::mapping(
324                name,
325                ChunkState::new(self.inner.address_space(), base_address, size),
326            ),
327        );
328        Ok(())
329    }
330
331    pub fn segments(&self) -> &IntervalMap<Address, Segment<T>> {
332        &self.segments
333    }
334
335    pub fn mappings(&self) -> impl Iterator<Item = &ChunkState<T>> {
336        self.segments.values(..).filter_map(|v| {
337            if let Segment::Mapping { backing, .. } = v {
338                Some(backing)
339            } else {
340                None
341            }
342        })
343    }
344
345    pub fn mapping_for<A>(&self, address: A) -> Option<MappingRef<T>>
346    where
347        A: Into<Address>,
348    {
349        let address = address.into();
350        if address + 1usize < address {
351            return None;
352        }
353        self.segments
354            .values_overlap(address)
355            .next()
356            .and_then(|e| e.as_mapping())
357    }
358
359    pub fn mapping_for_mut<A>(&mut self, address: A) -> Option<MappingMut<T>>
360    where
361        A: Into<Address>,
362    {
363        let address = address.into();
364        if address + 1usize < address {
365            return None;
366        }
367        self.segments
368            .values_overlap_mut(address)
369            .next()
370            .and_then(|e| e.as_mapping_mut())
371    }
372
373    pub fn inner(&self) -> &FlatState<T> {
374        &self.inner
375    }
376
377    pub fn inner_mut(&mut self) -> &mut FlatState<T> {
378        &mut self.inner
379    }
380
381    pub fn address_space(&self) -> Arc<AddressSpace> {
382        self.inner.address_space()
383    }
384
385    pub fn address_space_ref(&self) -> &AddressSpace {
386        self.inner.address_space_ref()
387    }
388}
389
390impl<T: StateValue> PagedState<T> {
391    #[inline(always)]
392    pub fn with_flat<'a, A, F, O: 'a>(
393        &'a self,
394        address: A,
395        access_size: usize,
396        f: F,
397    ) -> Result<O, Error>
398    where
399        A: Into<Address>,
400        F: FnOnce(&'a FlatState<T>, Address, usize) -> Result<O, Error>,
401    {
402        let address = address.into();
403        if address + 1usize < address {
404            return Err(Error::UnmappedAddress {
405                address,
406                size: access_size,
407            });
408        }
409        if let Some((interval, value)) = self.segments.overlap(address).next() {
410            if address + access_size > interval.end {
411                // FIXME: checked arith.
412                return Err(Error::OverlappedAccess {
413                    address,
414                    size: access_size,
415                });
416            }
417
418            match value {
419                Segment::Mapping { ref backing, .. } => {
420                    let translated = backing
421                        .translate_checked(address, access_size)
422                        .map_err(Error::Chunked)?;
423                    f(
424                        backing.inner(),
425                        Address::from(translated as u64),
426                        access_size,
427                    )
428                }
429                Segment::StaticMapping { ref backing, .. } => {
430                    let translated = address - interval.start;
431                    f(backing, translated, access_size)
432                }
433                Segment::Static { offset, .. } => {
434                    let address = (address - interval.start) + *offset;
435                    f(&self.inner, address, access_size)
436                }
437            }
438        } else {
439            Err(Error::UnmappedAddress {
440                address,
441                size: access_size,
442            })
443        }
444    }
445
446    #[inline(always)]
447    pub fn with_flat_mut<'a, A, F, O: 'a>(
448        &'a mut self,
449        address: A,
450        access_size: usize,
451        f: F,
452    ) -> Result<O, Error>
453    where
454        A: Into<Address>,
455        F: FnOnce(&'a mut FlatState<T>, Address, usize) -> Result<O, Error>,
456    {
457        let address = address.into();
458        if address + 1usize < address {
459            return Err(Error::UnmappedAddress {
460                address,
461                size: access_size,
462            });
463        }
464        if let Some((interval, value)) = self.segments.overlap_mut(address).next() {
465            if address + access_size > interval.end {
466                return Err(Error::OverlappedAccess {
467                    address,
468                    size: access_size,
469                });
470            }
471            match value {
472                Segment::Mapping {
473                    ref mut backing, ..
474                } => {
475                    let translated = backing
476                        .translate_checked(address, access_size)
477                        .map_err(Error::Chunked)?;
478                    f(
479                        backing.inner_mut(),
480                        Address::from(translated as u64),
481                        access_size,
482                    )
483                }
484                Segment::StaticMapping {
485                    ref mut backing, ..
486                } => {
487                    let translated = address - interval.start;
488                    f(backing, translated, access_size)
489                }
490                Segment::Static { offset, .. } => {
491                    let address = (address - interval.start) + *offset;
492                    f(&mut self.inner, address, access_size)
493                }
494            }
495        } else {
496            Err(Error::UnmappedAddress {
497                address,
498                size: access_size,
499            })
500        }
501    }
502
503    #[inline(always)]
504    pub fn with_flat_from<'a, A, F, O: 'a>(&'a self, address: A, f: F) -> Result<O, Error>
505    where
506        A: Into<Address>,
507        F: FnOnce(&'a FlatState<T>, Address, usize) -> Result<O, Error>,
508    {
509        let address = address.into();
510        if address + 1usize < address {
511            return Err(Error::UnmappedAddress {
512                address,
513                size: 1,
514            });
515        }
516        if let Some((interval, value)) = self.segments.overlap(address).next() {
517            match value {
518                Segment::Mapping { ref backing, .. } => {
519                    // TODO: Chunked::available_len (view whole allocation)
520                    let access_size = backing.len(); // FIXME: should this be -1 due to the red-zone?
521                    let translated = backing
522                        .translate_checked(address, access_size)
523                        .map_err(Error::Chunked)?;
524                    f(
525                        backing.inner(),
526                        Address::from(translated as u64),
527                        access_size,
528                    )
529                }
530                Segment::StaticMapping { ref backing, .. } => {
531                    let max_access_size =
532                        usize::from(interval.end - interval.start);
533                    let address = address - interval.start;
534
535                    let access_size = max_access_size - usize::from(address);
536
537                    f(backing, address, access_size)
538                }
539                Segment::Static { offset, .. } => {
540                    let max_access_size =
541                        usize::from(interval.end - interval.start);
542                    let offset_in = address - interval.start;
543
544                    let address = offset_in + *offset;
545                    let access_size = max_access_size - usize::from(offset_in);
546
547                    f(&self.inner, address, access_size)
548                }
549            }
550        } else {
551            Err(Error::UnmappedAddress { address, size: 1 })
552        }
553    }
554
555    pub fn view_values_from<A>(&self, address: A) -> Result<&[T], Error>
556    where
557        A: Into<Address>,
558    {
559        self.with_flat_from(address, |inner, address, n| {
560            inner
561                .view_values(address, n)
562                .map_err(|e| Error::backing(address, e))
563        })
564    }
565
566    pub fn segment_bounds<A>(&self, address: A) -> Result<(Range<Address>, &Segment<T>), Error>
567    where
568        A: Into<Address>,
569    {
570        let address = address.into();
571        if address + 1usize < address {
572            return Err(Error::UnmappedAddress {
573                address,
574                size: 1usize,
575            });
576        }
577        self.segments
578            .overlap(address)
579            .next()
580            .ok_or_else(|| Error::UnmappedAddress {
581                address,
582                size: 1usize,
583            })
584    }
585}
586
587impl<V: StateValue> State for PagedState<V> {
588    type Error = Error;
589
590    fn fork(&self) -> Self {
591        Self {
592            segments: self.segments.iter(..).map(|(i, v)| (i, v.fork())).collect(),
593            inner: self.inner.fork(),
594        }
595    }
596
597    fn restore(&mut self, other: &Self) {
598        self.inner.restore(&other.inner);
599
600        let segments = take(&mut self.segments);
601        self.segments = segments
602            .into_iter(..)
603            .filter_map(|(i, mut v)| {
604                if let Some(vo) = other.segments.get(i.clone()) {
605                    v.restore(vo);
606                    Some((i, v))
607                } else {
608                    None
609                }
610            })
611            .collect();
612    }
613}
614
615impl<V: StateValue> StateOps for PagedState<V> {
616    type Value = V;
617
618    fn copy_values<F, T>(&mut self, from: F, to: T, size: usize) -> Result<(), Error>
619    where
620        F: Into<Address>,
621        T: Into<Address>,
622    {
623        let from = from.into();
624        let to = to.into();
625
626        // TODO: can we avoid the intermediate allocation?
627
628        let vals = self.view_values(from, size)?.to_vec();
629        let view = self.view_values_mut(to, size)?;
630
631        for (d, s) in view.iter_mut().zip(vals.into_iter()) {
632            *d = s;
633        }
634
635        Ok(())
636    }
637
638    fn get_values<A>(&self, address: A, values: &mut [Self::Value]) -> Result<(), Error>
639    where
640        A: Into<Address>,
641    {
642        let address = address.into();
643        let n = values.len();
644
645        self.with_flat(address, n, |inner, address, _size| {
646            inner
647                .get_values(address, values)
648                .map_err(|e| Error::backing(address, e))
649        })
650    }
651
652    fn view_values<A>(&self, address: A, n: usize) -> Result<&[Self::Value], Error>
653    where
654        A: Into<Address>,
655    {
656        let address = address.into();
657        self.with_flat(address, n, |inner, address, n| {
658            inner
659                .view_values(address, n)
660                .map_err(|e| Error::backing(address, e))
661        })
662    }
663
664    fn view_values_mut<A>(&mut self, address: A, n: usize) -> Result<&mut [Self::Value], Error>
665    where
666        A: Into<Address>,
667    {
668        let address = address.into();
669        self.with_flat_mut(address, n, |inner, address, n| {
670            inner
671                .view_values_mut(address, n)
672                .map_err(|e| Error::backing(address, e))
673        })
674    }
675
676    fn set_values<A>(&mut self, address: A, values: &[Self::Value]) -> Result<(), Error>
677    where
678        A: Into<Address>,
679    {
680        let address = address.into();
681        let n = values.len();
682        self.with_flat_mut(address, n, |inner, address, _size| {
683            inner
684                .set_values(address, values)
685                .map_err(|e| Error::backing(address, e))
686        })
687    }
688
689    fn len(&self) -> usize {
690        // what to do here? sum of all sizes?
691        self.inner.len() + self.mappings().map(|m| m.len()).sum::<usize>()
692    }
693}