ratsat/
alloc.rs

1/*****************************************************************************************[alloc.rs]
2Copyright (c) 2008-2010, Niklas Sorensson (MiniSat)
3Copyright (c) 2018-2018, Masaki Hara
4
5Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
6associated documentation files (the "Software"), to deal in the Software without restriction,
7including without limitation the rights to use, copy, modify, merge, publish, distribute,
8sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
9furnished to do so, subject to the following conditions:
10
11The above copyright notice and this permission notice shall be included in all copies or
12substantial portions of the Software.
13
14THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
15NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
16NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
17DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
18OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19**************************************************************************************************/
20
21use std::cmp;
22use std::fmt;
23use std::marker::PhantomData;
24use std::ops;
25
26#[derive(Debug)]
27pub struct RegionAllocator<T: Copy> {
28    vec: Vec<T>,
29    wasted: usize,
30}
31
32impl<T: Copy + Default> RegionAllocator<T> {
33    pub fn new(start_cap: u32) -> Self {
34        Self {
35            vec: Vec::with_capacity(start_cap as usize),
36            wasted: 0,
37        }
38    }
39    pub fn len(&self) -> u32 {
40        self.vec.len() as u32
41    }
42    pub fn wasted(&self) -> u32 {
43        self.wasted as u32
44    }
45    pub fn alloc(&mut self, size: u32) -> Ref<T> {
46        debug_assert!(size > 0);
47        let r = Ref(self.vec.len() as u32, PhantomData);
48        self.vec.extend((0..size).map(|_| T::default()));
49        r
50    }
51    pub fn free(&mut self, size: u32) {
52        self.wasted += size as usize;
53    }
54    pub fn subslice(&self, r: Ref<T>, len: u32) -> &[T] {
55        &self.vec[r.0 as usize..r.0 as usize + len as usize]
56    }
57    pub fn subslice_mut(&mut self, r: Ref<T>, len: u32) -> &mut [T] {
58        &mut self.vec[r.0 as usize..r.0 as usize + len as usize]
59    }
60}
61
62impl<T: Copy> ops::Index<Ref<T>> for RegionAllocator<T> {
63    type Output = T;
64    fn index(&self, index: Ref<T>) -> &Self::Output {
65        &self.vec[index.0 as usize]
66    }
67}
68impl<T: Copy> ops::IndexMut<Ref<T>> for RegionAllocator<T> {
69    fn index_mut(&mut self, index: Ref<T>) -> &mut Self::Output {
70        &mut self.vec[index.0 as usize]
71    }
72}
73
74#[derive(Clone, Copy)]
75pub struct Ref<T: Copy>(u32, PhantomData<fn(T) -> T>);
76
77impl<T: Copy> fmt::Debug for Ref<T> {
78    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
79        f.debug_tuple("Ref").field(&self.0).finish()
80    }
81}
82impl<T: Copy> PartialEq for Ref<T> {
83    fn eq(&self, rhs: &Self) -> bool {
84        self.0 == rhs.0
85    }
86}
87impl<T: Copy> Eq for Ref<T> {}
88impl<T: Copy> PartialOrd for Ref<T> {
89    fn partial_cmp(&self, rhs: &Self) -> Option<cmp::Ordering> {
90        PartialOrd::partial_cmp(&self.0, &rhs.0)
91    }
92}
93impl<T: Copy> Ord for Ref<T> {
94    fn cmp(&self, rhs: &Self) -> cmp::Ordering {
95        Ord::cmp(&self.0, &rhs.0)
96    }
97}
98impl<T: Copy> Default for Ref<T> {
99    fn default() -> Self {
100        Ref(0, PhantomData)
101    }
102}
103
104impl<T: Copy> Ref<T> {
105    pub const UNDEF: Self = Ref(!0, PhantomData);
106}
107
108impl<T: Copy> ops::Add<u32> for Ref<T> {
109    type Output = Ref<T>;
110    fn add(self, rhs: u32) -> Self::Output {
111        Ref(self.0 + rhs, PhantomData)
112    }
113}