vm_allocator/
address_allocator.rs

1// Copyright © 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2// Copyright © 2022 Alibaba Cloud. All rights reserved.
3// Copyright © 2019 Intel Corporation. All Rights Reserved.
4// SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause
5
6//! Provides allocation and releasing strategy for memory slots.
7//!
8//! This module implements an allocation strategies for memory slots in an
9//! address space (for example MMIO and PIO).
10
11use crate::allocation_engine::IntervalTree;
12use crate::{AllocPolicy, Constraint, Error, RangeInclusive, Result};
13
14// Internal representation of AddressAllocator. Contains the managed address
15// space represented through an instance of RangeInclusive. The address
16// allocator also contains a node that represents the root of the interval tree
17// used for memory slots management. The reason we chose to use an interval tree
18// is that the average complexity for deletion and insertion is O(log N) and for
19// searching a node is O(N).
20/// An address space allocator.
21///
22/// The `AddressAllocator` manages an address space by exporting functionality to reserve and
23/// free address ranges based on a user defined [Allocation Policy](enum.AllocPolicy.html).
24#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
25#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
26pub struct AddressAllocator {
27    // Address space that we want to manage.
28    address_space: RangeInclusive,
29    // Internal representation of the managed address space. Each node in the
30    // tree will represent a memory location and can have two states either
31    // `NodeState::Free` or `NodeState::Allocated`.
32    interval_tree: IntervalTree,
33}
34
35impl AddressAllocator {
36    /// Creates a new instance of AddressAllocator that will be used to manage
37    /// the allocation and release of memory slots from the managed address
38    /// space.
39    pub fn new(base: u64, size: u64) -> std::result::Result<Self, Error> {
40        let end = base
41            .checked_add(size.checked_sub(1).ok_or(Error::Underflow)?)
42            .ok_or(Error::Overflow)?;
43        let aux_range = RangeInclusive::new(base, end)?;
44        Ok(AddressAllocator {
45            address_space: aux_range,
46            interval_tree: IntervalTree::new(aux_range),
47        })
48    }
49
50    /// Allocates a new aligned memory slot. Returns the allocated range in case of success.
51    ///
52    /// When the `ExactMatch` policy is used, the start address MUST be aligned to the
53    /// alignment passed as a parameter.
54    ///
55    /// # Arguments:
56    /// - `size`: size to allocate.
57    /// - `alignment`: alignment to be used for the allocated resources.
58    ///            Valid alignments are a power of 2.
59    /// - `policy`: allocation policy.
60    pub fn allocate(
61        &mut self,
62        size: u64,
63        alignment: u64,
64        policy: AllocPolicy,
65    ) -> Result<RangeInclusive> {
66        let constraint = Constraint::new(size, alignment, policy)?;
67        self.interval_tree.allocate(constraint)
68    }
69
70    /// Deletes the specified memory slot or returns `ResourceNotAvailable` if
71    /// the node was not allocated before.
72    pub fn free(&mut self, key: &RangeInclusive) -> Result<()> {
73        self.interval_tree.free(key)
74    }
75
76    /// First address of the allocator.
77    pub fn base(&self) -> u64 {
78        self.address_space.start()
79    }
80
81    /// Last address of the allocator.
82    pub fn end(&self) -> u64 {
83        self.address_space.end()
84    }
85}
86
87#[cfg(test)]
88mod tests {
89    use super::*;
90
91    #[test]
92    fn test_regression_exact_match_length_check() {
93        let mut pool = AddressAllocator::new(0x0, 0x2000).unwrap();
94        let res = pool
95            .allocate(0x1000, 0x1000, AllocPolicy::ExactMatch(0x1000))
96            .unwrap();
97        assert_eq!(
98            pool.allocate(0x0, 0x1000, AllocPolicy::FirstMatch)
99                .unwrap_err(),
100            Error::InvalidSize(0x0)
101        );
102        assert_eq!(
103            pool.allocate(0x1000, 0x1000, AllocPolicy::ExactMatch(0x3))
104                .unwrap_err(),
105            Error::UnalignedAddress
106        );
107        assert_eq!(res, RangeInclusive::new(0x1000, 0x1FFF).unwrap());
108        let res = pool
109            .allocate(0x1000, 0x1000, AllocPolicy::ExactMatch(0x0))
110            .unwrap();
111        assert_eq!(res, RangeInclusive::new(0x0, 0x0FFF).unwrap());
112    }
113
114    #[test]
115    fn test_new_fails_overflow() {
116        assert_eq!(
117            AddressAllocator::new(u64::MAX, 0x100).unwrap_err(),
118            Error::Overflow
119        );
120    }
121
122    #[test]
123    fn test_new_fails_size_zero() {
124        assert_eq!(
125            AddressAllocator::new(0x1000, 0x0).unwrap_err(),
126            Error::Underflow
127        );
128    }
129
130    #[test]
131    fn test_allocate_fails_alignment_zero() {
132        let mut pool = AddressAllocator::new(0x1000, 0x10000).unwrap();
133        assert_eq!(
134            pool.allocate(0x100, 0, AllocPolicy::FirstMatch)
135                .unwrap_err(),
136            Error::InvalidAlignment
137        );
138    }
139
140    #[test]
141    fn test_allocate_fails_alignment_non_power_of_two() {
142        let mut pool = AddressAllocator::new(0x1000, 0x10000).unwrap();
143        assert_eq!(
144            pool.allocate(0x100, 200, AllocPolicy::FirstMatch)
145                .unwrap_err(),
146            Error::InvalidAlignment
147        );
148    }
149
150    #[test]
151    fn test_allocate_fails_not_enough_space() {
152        let mut pool = AddressAllocator::new(0x1000, 0x1000).unwrap();
153        assert_eq!(
154            pool.allocate(0x800, 0x100, AllocPolicy::LastMatch).unwrap(),
155            RangeInclusive::new(0x1800, 0x1FFF).unwrap()
156        );
157        assert_eq!(
158            pool.allocate(0x900, 0x100, AllocPolicy::FirstMatch)
159                .unwrap_err(),
160            Error::ResourceNotAvailable
161        );
162        assert_eq!(
163            pool.allocate(0x400, 0x100, AllocPolicy::FirstMatch)
164                .unwrap(),
165            RangeInclusive::new(0x1000, 0x13FF).unwrap()
166        );
167    }
168
169    #[test]
170    fn test_allocate_with_alignment_first_ok() {
171        let mut pool = AddressAllocator::new(0x1000, 0x1000).unwrap();
172        assert_eq!(
173            pool.allocate(0x110, 0x100, AllocPolicy::FirstMatch)
174                .unwrap(),
175            RangeInclusive::new(0x1000, 0x110F).unwrap()
176        );
177        assert_eq!(
178            pool.allocate(0x100, 0x100, AllocPolicy::FirstMatch)
179                .unwrap(),
180            RangeInclusive::new(0x1200, 0x12FF).unwrap()
181        );
182        assert_eq!(
183            pool.allocate(0x10, 0x100, AllocPolicy::FirstMatch).unwrap(),
184            RangeInclusive::new(0x1300, 0x130F).unwrap()
185        );
186    }
187
188    #[test]
189    fn test_allocate_with_alignment_last_ok() {
190        let mut pool_reverse = AddressAllocator::new(0x1000, 0x10000).unwrap();
191        assert_eq!(
192            pool_reverse
193                .allocate(0x110, 0x100, AllocPolicy::LastMatch)
194                .unwrap(),
195            RangeInclusive::new(0x10E00, 0x10F0F).unwrap()
196        );
197        assert_eq!(
198            pool_reverse
199                .allocate(0x100, 0x100, AllocPolicy::LastMatch)
200                .unwrap(),
201            RangeInclusive::new(0x10D00, 0x10DFF).unwrap()
202        );
203        assert_eq!(
204            pool_reverse
205                .allocate(0x10, 0x100, AllocPolicy::LastMatch)
206                .unwrap(),
207            RangeInclusive::new(0x10C00, 0x10C0F).unwrap()
208        );
209    }
210
211    #[test]
212    fn test_allocate_address_not_enough_space() {
213        let mut pool = AddressAllocator::new(0x1000, 0x1000).unwrap();
214        // First range is [0x1000:0x17FF]
215        assert_eq!(
216            pool.allocate(0x800, 0x100, AllocPolicy::FirstMatch)
217                .unwrap(),
218            RangeInclusive::new(0x1000, 0x17FF).unwrap()
219        );
220        // Second range is [0x1A00:0x1BFF]
221        assert_eq!(
222            pool.allocate(0x200, 0x100, AllocPolicy::ExactMatch(0x1A00))
223                .unwrap(),
224            RangeInclusive::new(0x1A00, 0x1BFF).unwrap()
225        );
226        // There is 0x200 between the first 2 ranges.
227        // We ask for an available address but the range is too big
228        assert_eq!(
229            pool.allocate(0x800, 0x100, AllocPolicy::ExactMatch(0x1800))
230                .unwrap_err(),
231            Error::ResourceNotAvailable
232        );
233        // We ask for an available address, with a small enough range
234        assert_eq!(
235            pool.allocate(0x100, 0x100, AllocPolicy::ExactMatch(0x1800))
236                .unwrap(),
237            RangeInclusive::new(0x1800, 0x18FF).unwrap()
238        );
239    }
240
241    #[test]
242    fn test_tree_allocate_address_free_and_realloc() {
243        let mut pool = AddressAllocator::new(0x1000, 0x1000).unwrap();
244        assert_eq!(
245            pool.allocate(0x800, 0x100, AllocPolicy::FirstMatch)
246                .unwrap(),
247            RangeInclusive::new(0x1000, 0x17FF).unwrap()
248        );
249
250        let _ = pool.free(&RangeInclusive::new(0x1000, 0x17FF).unwrap());
251        assert_eq!(
252            pool.allocate(0x800, 0x100, AllocPolicy::FirstMatch)
253                .unwrap(),
254            RangeInclusive::new(0x1000, 0x17FF).unwrap()
255        );
256    }
257
258    #[test]
259    fn test_allow_range_size_one_left() {
260        let mut pool = AddressAllocator::new(1, 1000).unwrap();
261        assert_eq!(
262            pool.allocate(10, 2, AllocPolicy::FirstMatch).unwrap(),
263            RangeInclusive::new(2, 11).unwrap()
264        );
265        assert_eq!(
266            pool.allocate(1, 1, AllocPolicy::FirstMatch).unwrap(),
267            RangeInclusive::new(1, 1).unwrap()
268        );
269    }
270
271    #[test]
272    fn test_allocate_address_fail_free_and_realloc() {
273        let mut pool = AddressAllocator::new(0x0, 0x1000).unwrap();
274        //First allocation fails
275        assert_eq!(
276            pool.allocate(0x2000, 0x100, AllocPolicy::FirstMatch)
277                .unwrap_err(),
278            Error::ResourceNotAvailable
279        );
280        // We try to free a range that was not allocated.
281        assert_eq!(
282            pool.free(&RangeInclusive::new(0x1200, 0x3200).unwrap())
283                .unwrap_err(),
284            Error::ResourceNotAvailable
285        );
286        // Now we try an allocation that should succeed.
287        assert_eq!(
288            pool.allocate(0x4FE, 0x100, AllocPolicy::ExactMatch(0x500))
289                .unwrap(),
290            RangeInclusive::new(0x500, 0x9FD).unwrap()
291        );
292        assert!(pool
293            .free(&RangeInclusive::new(0x500, 0x9FD).unwrap())
294            .is_ok());
295    }
296}