1use crate::StorageKind;
13
14use super::{MemoryDescription, StorageError};
15use offset_allocator::{Allocation, Allocator};
16use std::{
17 any::Any,
18 sync::{Arc, Mutex},
19};
20
21#[derive(Debug, thiserror::Error)]
23pub enum ArenaError {
24 #[error("Page size must be a power of 2")]
25 PageSizeNotAligned,
26
27 #[error("Allocation failed")]
28 AllocationFailed,
29
30 #[error("Failed to convert pages to u32")]
31 PagesNotConvertible,
32
33 #[error("Storage error: {0}")]
34 StorageError(#[from] StorageError),
35}
36
37#[derive(Clone)]
44pub struct ArenaAllocator<S: MemoryDescription> {
45 storage: Arc<S>,
46 allocator: Arc<Mutex<Allocator>>,
47 page_size: u64,
48}
49
50impl<S: MemoryDescription> std::fmt::Debug for ArenaAllocator<S> {
51 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
52 write!(
53 f,
54 "ArenaAllocator {{ storage: {:?}, page_size: {} }}",
55 self.storage, self.page_size
56 )
57 }
58}
59
60pub struct ArenaBuffer<S: MemoryDescription> {
67 offset: usize,
68 address: usize,
69 requested_size: usize,
70 storage: Arc<S>,
71 allocation: Allocation,
72 allocator: Arc<Mutex<Allocator>>,
73}
74
75impl<S: MemoryDescription> ArenaAllocator<S> {
76 pub fn new(storage: S, page_size: usize) -> std::result::Result<Self, ArenaError> {
85 let storage = Arc::new(storage);
86
87 if !page_size.is_power_of_two() {
88 return Err(ArenaError::PageSizeNotAligned);
89 }
90
91 let pages = storage.size() / page_size;
94
95 let allocator = Allocator::new(
96 pages
97 .try_into()
98 .map_err(|_| ArenaError::PagesNotConvertible)?,
99 );
100
101 let allocator = Arc::new(Mutex::new(allocator));
102
103 Ok(Self {
104 storage,
105 allocator,
106 page_size: page_size as u64,
107 })
108 }
109
110 pub fn allocate(&self, size: usize) -> std::result::Result<ArenaBuffer<S>, ArenaError> {
112 let size = size as u64;
113 let pages = size.div_ceil(self.page_size);
114
115 let allocation = self
116 .allocator
117 .lock()
118 .unwrap()
119 .allocate(pages.try_into().map_err(|_| ArenaError::AllocationFailed)?)
120 .ok_or(ArenaError::AllocationFailed)?;
121
122 let offset = allocation.offset as u64 * self.page_size;
123 let address = self.storage.addr() + offset as usize;
124
125 debug_assert!(address + size as usize <= self.storage.addr() + self.storage.size());
126
127 Ok(ArenaBuffer {
128 offset: offset as usize,
129 address,
130 requested_size: size as usize,
131 allocation,
132 storage: self.storage.clone(),
133 allocator: self.allocator.clone(),
134 })
135 }
136}
137
138impl<S: MemoryDescription> std::fmt::Debug for ArenaBuffer<S> {
139 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
140 write!(
141 f,
142 "ArenaBuffer {{ addr: {}, size: {}, kind: {:?}, allocator: {:p} }}",
143 self.address,
144 self.requested_size,
145 self.storage.storage_kind(),
146 Arc::as_ptr(&self.storage)
147 )
148 }
149}
150
151impl<S: MemoryDescription + 'static> MemoryDescription for ArenaBuffer<S> {
152 fn addr(&self) -> usize {
153 self.address
154 }
155 fn size(&self) -> usize {
156 self.requested_size
157 }
158 fn storage_kind(&self) -> StorageKind {
159 self.storage.storage_kind()
160 }
161 fn as_any(&self) -> &dyn Any {
162 self
163 }
164 fn nixl_descriptor(&self) -> Option<NixlDescriptor> {
165 if let Some(mut descriptor) = self.storage.nixl_descriptor() {
166 descriptor.addr = self.addr() as u64;
167 descriptor.size = self.size();
168 Some(descriptor)
169 } else {
170 None
171 }
172 }
173}
174
175use super::nixl::{NixlCompatible, NixlDescriptor, RegisteredView};
177
178impl<S> ArenaBuffer<S>
179where
180 S: MemoryDescription + NixlCompatible,
181{
182 pub fn nixl_descriptor(&self) -> Option<NixlDescriptor> {
187 let (base_ptr, _base_size, mem_type, device_id) = self.storage.nixl_params();
188
189 let buffer_ptr = unsafe { base_ptr.add(self.offset) };
191
192 Some(NixlDescriptor {
193 addr: buffer_ptr as u64,
194 size: self.requested_size,
195 mem_type,
196 device_id,
197 })
198 }
199}
200
201impl<S> ArenaBuffer<S>
202where
203 S: MemoryDescription + RegisteredView,
204{
205 pub fn agent_name(&self) -> &str {
209 self.storage.agent_name()
210 }
211
212 pub fn registered_descriptor(&self) -> NixlDescriptor {
214 let base_descriptor = self.storage.descriptor();
215
216 NixlDescriptor {
218 addr: base_descriptor.addr + self.offset as u64,
219 size: self.requested_size,
220 mem_type: base_descriptor.mem_type,
221 device_id: base_descriptor.device_id,
222 }
223 }
224}
225
226impl<S: MemoryDescription> Drop for ArenaBuffer<S> {
227 fn drop(&mut self) {
228 self.allocator.lock().unwrap().free(self.allocation);
229 }
230}
231
232#[cfg(test)]
233mod tests {
234 use super::*;
235 use crate::SystemStorage;
236
237 const PAGE_SIZE: usize = 4096;
238 const PAGE_COUNT: usize = 10;
239 const TOTAL_STORAGE_SIZE: usize = PAGE_SIZE * PAGE_COUNT;
240
241 fn create_allocator() -> ArenaAllocator<SystemStorage> {
242 let storage = SystemStorage::new(TOTAL_STORAGE_SIZE).unwrap();
243 ArenaAllocator::new(storage, PAGE_SIZE).unwrap()
244 }
245
246 #[test]
247 fn test_arena_allocator_new_success() {
250 let storage = SystemStorage::new(TOTAL_STORAGE_SIZE).unwrap();
251 let allocator_result = ArenaAllocator::new(storage, PAGE_SIZE);
252 assert!(allocator_result.is_ok());
253 }
254
255 #[test]
256 fn test_arena_allocator_new_invalid_page_size() {
259 let storage = SystemStorage::new(TOTAL_STORAGE_SIZE).unwrap();
260 let allocator_result = ArenaAllocator::new(storage, PAGE_SIZE + 1);
261 assert!(allocator_result.is_err());
262 assert!(matches!(
263 allocator_result,
264 Err(ArenaError::PageSizeNotAligned)
265 ));
266 }
267
268 #[test]
269 fn test_allocate_single_buffer() {
273 let allocator = create_allocator();
274 let buffer_size = PAGE_SIZE * 2;
275 let buffer_result = allocator.allocate(buffer_size);
276 assert!(buffer_result.is_ok());
277 let buffer = buffer_result.unwrap();
278 assert_eq!(buffer.size(), buffer_size);
279 assert_eq!(buffer.addr(), allocator.storage.addr()); }
281
282 #[test]
283 fn test_allocate_multiple_buffers() {
287 let allocator = create_allocator();
288 let buffer_size1 = PAGE_SIZE * 2;
289 let buffer1_result = allocator.allocate(buffer_size1);
290 assert!(buffer1_result.is_ok());
291 let buffer1 = buffer1_result.unwrap();
292 assert_eq!(buffer1.size(), buffer_size1);
293 assert_eq!(buffer1.addr(), allocator.storage.addr());
294
295 let buffer_size2 = PAGE_SIZE * 3;
296 let buffer2_result = allocator.allocate(buffer_size2);
297 assert!(buffer2_result.is_ok());
298 let buffer2 = buffer2_result.unwrap();
299 assert_eq!(buffer2.size(), buffer_size2);
300 assert_eq!(buffer2.addr(), allocator.storage.addr() + buffer_size1);
301 }
302
303 #[test]
304 fn test_allocate_exact_size() {
307 let allocator = create_allocator();
308 let buffer_size = TOTAL_STORAGE_SIZE;
309 let buffer_result = allocator.allocate(buffer_size);
310 assert!(buffer_result.is_ok());
311 let buffer = buffer_result.unwrap();
312 assert_eq!(buffer.size(), buffer_size);
313 }
314
315 #[test]
316 fn test_allocate_too_large() {
319 let allocator = create_allocator();
320 let buffer_size = TOTAL_STORAGE_SIZE + PAGE_SIZE;
321 let buffer_result = allocator.allocate(buffer_size);
322 assert!(buffer_result.is_err());
323 assert!(matches!(buffer_result, Err(ArenaError::AllocationFailed)));
324 }
325
326 #[test]
327 fn test_buffer_drop_and_reallocate() {
332 let allocator = create_allocator();
333 let buffer_size = PAGE_SIZE * 6;
336
337 {
338 let buffer1 = allocator.allocate(buffer_size).unwrap();
339 assert_eq!(buffer1.size(), buffer_size);
340 assert_eq!(buffer1.addr(), allocator.storage.addr());
341 } let buffer2_result = allocator.allocate(buffer_size);
345 assert!(buffer2_result.is_ok());
346 let buffer2 = buffer2_result.unwrap();
347 assert_eq!(buffer2.size(), buffer_size);
348 assert_eq!(buffer2.addr(), allocator.storage.addr()); }
350
351 #[test]
352 fn test_allocate_fill_and_fail() {
356 let allocator = create_allocator();
357 let buffer_size_half = TOTAL_STORAGE_SIZE / 2; let buffer1 = allocator.allocate(buffer_size_half).unwrap();
360 assert_eq!(buffer1.size(), buffer_size_half);
361
362 let buffer2 = allocator.allocate(buffer_size_half).unwrap();
363 assert_eq!(buffer2.size(), buffer_size_half);
364 assert_eq!(buffer2.addr(), allocator.storage.addr() + buffer_size_half);
365
366 let buffer3_result = allocator.allocate(PAGE_SIZE);
368 assert!(buffer3_result.is_err());
369 assert!(matches!(buffer3_result, Err(ArenaError::AllocationFailed)));
370 }
371
372 #[test]
373 fn test_allocate_non_page_aligned_single_byte() {
377 let allocator = create_allocator();
378 let buffer = allocator.allocate(1).unwrap();
379 assert_eq!(buffer.size(), 1);
380 }
382
383 #[test]
384 fn test_allocate_non_page_aligned_almost_full_page() {
388 let allocator = create_allocator();
389 let buffer = allocator.allocate(PAGE_SIZE - 1).unwrap();
390 assert_eq!(buffer.size(), PAGE_SIZE - 1);
391 }
392
393 #[test]
394 fn test_allocate_non_page_aligned_just_over_one_page() {
398 let allocator = create_allocator();
399 let buffer = allocator.allocate(PAGE_SIZE + 1).unwrap();
400 assert_eq!(buffer.size(), PAGE_SIZE + 1);
401 }
402
403 #[test]
404 fn test_allocate_half_plus_one_byte_twice_exhausts_arena() {
409 let allocator = create_allocator();
410 let allocation_size = (PAGE_COUNT / 2 * PAGE_SIZE) + 1;
411 let buffer1_result = allocator.allocate(allocation_size);
415 assert!(buffer1_result.is_ok(), "First allocation should succeed");
416 let buffer1 = buffer1_result.unwrap();
417 assert_eq!(buffer1.size(), allocation_size);
418 let pages_for_first_alloc = (allocation_size as u64).div_ceil(allocator.page_size);
419 assert_eq!(pages_for_first_alloc, (PAGE_COUNT / 2 + 1) as u64);
420
421 let buffer2_result = allocator.allocate(allocation_size);
426 assert!(
427 buffer2_result.is_err(),
428 "Second allocation should fail due to insufficient pages"
429 );
430 assert!(matches!(buffer2_result, Err(ArenaError::AllocationFailed)));
431 }
432
433 #[test]
434 fn test_fill_with_non_aligned_and_fail() {
439 let allocator = create_allocator();
440 let single_alloc_size = PAGE_SIZE + 1; let num_possible_allocs = PAGE_COUNT / 2; let mut allocated_buffers = Vec::with_capacity(num_possible_allocs);
448
449 for i in 0..num_possible_allocs {
450 let buffer_result = allocator.allocate(single_alloc_size);
451 assert!(buffer_result.is_ok(), "Allocation {} should succeed", i + 1);
452 let buffer = buffer_result.unwrap();
453 assert_eq!(buffer.size(), single_alloc_size);
454 allocated_buffers.push(buffer);
455 }
456
457 let final_alloc_result = allocator.allocate(1);
460 assert!(
461 final_alloc_result.is_err(),
462 "Final allocation of 1 byte should fail as arena is full"
463 );
464 assert!(matches!(
465 final_alloc_result,
466 Err(ArenaError::AllocationFailed)
467 ));
468 }
469}