1use crate::StorageKind;
13
14use super::{MemoryDescriptor, StorageError};
15use offset_allocator::{Allocation, Allocator};
16use std::{
17 any::Any,
18 sync::{Arc, Mutex},
19};
20
21#[derive(Debug, thiserror::Error)]
23#[allow(missing_docs)]
24pub enum ArenaError {
25 #[error("Page size must be a power of 2")]
26 PageSizeNotAligned,
27
28 #[error("Allocation failed")]
29 AllocationFailed,
30
31 #[error("Failed to convert pages to u32")]
32 PagesNotConvertible,
33
34 #[error("Storage error: {0}")]
35 StorageError(#[from] StorageError),
36}
37
38#[derive(Clone)]
45pub struct ArenaAllocator<S: MemoryDescriptor> {
46 storage: Arc<S>,
47 allocator: Arc<Mutex<Allocator>>,
48 page_size: u64,
49}
50
51impl<S: MemoryDescriptor> std::fmt::Debug for ArenaAllocator<S> {
52 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
53 write!(
54 f,
55 "ArenaAllocator {{ storage: {:?}, page_size: {} }}",
56 self.storage, self.page_size
57 )
58 }
59}
60
61pub struct ArenaBuffer<S: MemoryDescriptor> {
68 offset: usize,
70 address: usize,
72 requested_size: usize,
74 storage: Arc<S>,
76 allocation: Allocation,
78 allocator: Arc<Mutex<Allocator>>,
80}
81
82impl<S: MemoryDescriptor> ArenaAllocator<S> {
83 pub fn new(storage: S, page_size: usize) -> std::result::Result<Self, ArenaError> {
92 let storage = Arc::new(storage);
93
94 if !page_size.is_power_of_two() {
95 return Err(ArenaError::PageSizeNotAligned);
96 }
97
98 let pages = storage.size() / page_size;
101
102 let allocator = Allocator::new(
103 pages
104 .try_into()
105 .map_err(|_| ArenaError::PagesNotConvertible)?,
106 );
107
108 let allocator = Arc::new(Mutex::new(allocator));
109
110 Ok(Self {
111 storage,
112 allocator,
113 page_size: page_size as u64,
114 })
115 }
116
117 pub fn allocate(&self, size: usize) -> std::result::Result<ArenaBuffer<S>, ArenaError> {
123 let size = size as u64;
124 let pages = size.div_ceil(self.page_size);
125
126 let allocation = self
127 .allocator
128 .lock()
129 .unwrap()
130 .allocate(pages.try_into().map_err(|_| ArenaError::AllocationFailed)?)
131 .ok_or(ArenaError::AllocationFailed)?;
132
133 let offset = allocation.offset as u64 * self.page_size;
134 let address = self.storage.addr() + offset as usize;
135
136 debug_assert!(address + size as usize <= self.storage.addr() + self.storage.size());
137
138 Ok(ArenaBuffer {
139 offset: offset as usize,
140 address,
141 requested_size: size as usize,
142 allocation,
143 storage: self.storage.clone(),
144 allocator: self.allocator.clone(),
145 })
146 }
147}
148
149impl<S: MemoryDescriptor> std::fmt::Debug for ArenaBuffer<S> {
150 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
151 write!(
152 f,
153 "ArenaBuffer {{ addr: {}, size: {}, kind: {:?}, allocator: {:p} }}",
154 self.address,
155 self.requested_size,
156 self.storage.storage_kind(),
157 Arc::as_ptr(&self.storage)
158 )
159 }
160}
161
162impl<S: MemoryDescriptor + 'static> MemoryDescriptor for ArenaBuffer<S> {
163 fn addr(&self) -> usize {
164 self.address
165 }
166 fn size(&self) -> usize {
167 self.requested_size
168 }
169 fn storage_kind(&self) -> StorageKind {
170 self.storage.storage_kind()
171 }
172 fn as_any(&self) -> &dyn Any {
173 self
174 }
175 fn nixl_descriptor(&self) -> Option<NixlDescriptor> {
176 if let Some(mut descriptor) = self.storage.nixl_descriptor() {
177 descriptor.addr = self.addr() as u64;
178 descriptor.size = self.size();
179 Some(descriptor)
180 } else {
181 None
182 }
183 }
184}
185
186use super::nixl::{NixlCompatible, NixlDescriptor, RegisteredView};
188
189impl<S> ArenaBuffer<S>
190where
191 S: MemoryDescriptor + NixlCompatible,
192{
193 pub fn nixl_descriptor(&self) -> Option<NixlDescriptor> {
198 let (base_ptr, _base_size, mem_type, device_id) = self.storage.nixl_params();
199
200 let buffer_ptr = unsafe { base_ptr.add(self.offset) };
202
203 Some(NixlDescriptor {
204 addr: buffer_ptr as u64,
205 size: self.requested_size,
206 mem_type,
207 device_id,
208 })
209 }
210}
211
212impl<S> ArenaBuffer<S>
213where
214 S: MemoryDescriptor + RegisteredView,
215{
216 pub fn agent_name(&self) -> &str {
220 self.storage.agent_name()
221 }
222
223 pub fn registered_descriptor(&self) -> NixlDescriptor {
225 let base_descriptor = self.storage.descriptor();
226
227 NixlDescriptor {
229 addr: base_descriptor.addr + self.offset as u64,
230 size: self.requested_size,
231 mem_type: base_descriptor.mem_type,
232 device_id: base_descriptor.device_id,
233 }
234 }
235}
236
237impl<S: MemoryDescriptor> Drop for ArenaBuffer<S> {
238 fn drop(&mut self) {
239 self.allocator.lock().unwrap().free(self.allocation);
240 }
241}
242
243#[cfg(test)]
244mod tests {
245 use super::*;
246 use crate::SystemStorage;
247
248 const PAGE_SIZE: usize = 4096;
249 const PAGE_COUNT: usize = 10;
250 const TOTAL_STORAGE_SIZE: usize = PAGE_SIZE * PAGE_COUNT;
251
252 fn create_allocator() -> ArenaAllocator<SystemStorage> {
253 let storage = SystemStorage::new(TOTAL_STORAGE_SIZE).unwrap();
254 ArenaAllocator::new(storage, PAGE_SIZE).unwrap()
255 }
256
257 #[test]
258 fn test_arena_allocator_new_success() {
261 let storage = SystemStorage::new(TOTAL_STORAGE_SIZE).unwrap();
262 let allocator_result = ArenaAllocator::new(storage, PAGE_SIZE);
263 assert!(allocator_result.is_ok());
264 }
265
266 #[test]
267 fn test_arena_allocator_new_invalid_page_size() {
270 let storage = SystemStorage::new(TOTAL_STORAGE_SIZE).unwrap();
271 let allocator_result = ArenaAllocator::new(storage, PAGE_SIZE + 1);
272 assert!(allocator_result.is_err());
273 assert!(matches!(
274 allocator_result,
275 Err(ArenaError::PageSizeNotAligned)
276 ));
277 }
278
279 #[test]
280 fn test_allocate_single_buffer() {
284 let allocator = create_allocator();
285 let buffer_size = PAGE_SIZE * 2;
286 let buffer_result = allocator.allocate(buffer_size);
287 assert!(buffer_result.is_ok());
288 let buffer = buffer_result.unwrap();
289 assert_eq!(buffer.size(), buffer_size);
290 assert_eq!(buffer.addr(), allocator.storage.addr()); }
292
293 #[test]
294 fn test_allocate_multiple_buffers() {
298 let allocator = create_allocator();
299 let buffer_size1 = PAGE_SIZE * 2;
300 let buffer1_result = allocator.allocate(buffer_size1);
301 assert!(buffer1_result.is_ok());
302 let buffer1 = buffer1_result.unwrap();
303 assert_eq!(buffer1.size(), buffer_size1);
304 assert_eq!(buffer1.addr(), allocator.storage.addr());
305
306 let buffer_size2 = PAGE_SIZE * 3;
307 let buffer2_result = allocator.allocate(buffer_size2);
308 assert!(buffer2_result.is_ok());
309 let buffer2 = buffer2_result.unwrap();
310 assert_eq!(buffer2.size(), buffer_size2);
311 assert_eq!(buffer2.addr(), allocator.storage.addr() + buffer_size1);
312 }
313
314 #[test]
315 fn test_allocate_exact_size() {
318 let allocator = create_allocator();
319 let buffer_size = TOTAL_STORAGE_SIZE;
320 let buffer_result = allocator.allocate(buffer_size);
321 assert!(buffer_result.is_ok());
322 let buffer = buffer_result.unwrap();
323 assert_eq!(buffer.size(), buffer_size);
324 }
325
326 #[test]
327 fn test_allocate_too_large() {
330 let allocator = create_allocator();
331 let buffer_size = TOTAL_STORAGE_SIZE + PAGE_SIZE;
332 let buffer_result = allocator.allocate(buffer_size);
333 assert!(buffer_result.is_err());
334 assert!(matches!(buffer_result, Err(ArenaError::AllocationFailed)));
335 }
336
337 #[test]
338 fn test_buffer_drop_and_reallocate() {
343 let allocator = create_allocator();
344 let buffer_size = PAGE_SIZE * 6;
347
348 {
349 let buffer1 = allocator.allocate(buffer_size).unwrap();
350 assert_eq!(buffer1.size(), buffer_size);
351 assert_eq!(buffer1.addr(), allocator.storage.addr());
352 } let buffer2_result = allocator.allocate(buffer_size);
356 assert!(buffer2_result.is_ok());
357 let buffer2 = buffer2_result.unwrap();
358 assert_eq!(buffer2.size(), buffer_size);
359 assert_eq!(buffer2.addr(), allocator.storage.addr()); }
361
362 #[test]
363 fn test_allocate_fill_and_fail() {
367 let allocator = create_allocator();
368 let buffer_size_half = TOTAL_STORAGE_SIZE / 2; let buffer1 = allocator.allocate(buffer_size_half).unwrap();
371 assert_eq!(buffer1.size(), buffer_size_half);
372
373 let buffer2 = allocator.allocate(buffer_size_half).unwrap();
374 assert_eq!(buffer2.size(), buffer_size_half);
375 assert_eq!(buffer2.addr(), allocator.storage.addr() + buffer_size_half);
376
377 let buffer3_result = allocator.allocate(PAGE_SIZE);
379 assert!(buffer3_result.is_err());
380 assert!(matches!(buffer3_result, Err(ArenaError::AllocationFailed)));
381 }
382
383 #[test]
384 fn test_allocate_non_page_aligned_single_byte() {
388 let allocator = create_allocator();
389 let buffer = allocator.allocate(1).unwrap();
390 assert_eq!(buffer.size(), 1);
391 }
393
394 #[test]
395 fn test_allocate_non_page_aligned_almost_full_page() {
399 let allocator = create_allocator();
400 let buffer = allocator.allocate(PAGE_SIZE - 1).unwrap();
401 assert_eq!(buffer.size(), PAGE_SIZE - 1);
402 }
403
404 #[test]
405 fn test_allocate_non_page_aligned_just_over_one_page() {
409 let allocator = create_allocator();
410 let buffer = allocator.allocate(PAGE_SIZE + 1).unwrap();
411 assert_eq!(buffer.size(), PAGE_SIZE + 1);
412 }
413
414 #[test]
415 fn test_allocate_half_plus_one_byte_twice_exhausts_arena() {
420 let allocator = create_allocator();
421 let allocation_size = (PAGE_COUNT / 2 * PAGE_SIZE) + 1;
422 let buffer1_result = allocator.allocate(allocation_size);
426 assert!(buffer1_result.is_ok(), "First allocation should succeed");
427 let buffer1 = buffer1_result.unwrap();
428 assert_eq!(buffer1.size(), allocation_size);
429 let pages_for_first_alloc = (allocation_size as u64).div_ceil(allocator.page_size);
430 assert_eq!(pages_for_first_alloc, (PAGE_COUNT / 2 + 1) as u64);
431
432 let buffer2_result = allocator.allocate(allocation_size);
437 assert!(
438 buffer2_result.is_err(),
439 "Second allocation should fail due to insufficient pages"
440 );
441 assert!(matches!(buffer2_result, Err(ArenaError::AllocationFailed)));
442 }
443
444 #[test]
445 fn test_fill_with_non_aligned_and_fail() {
450 let allocator = create_allocator();
451 let single_alloc_size = PAGE_SIZE + 1; let num_possible_allocs = PAGE_COUNT / 2; let mut allocated_buffers = Vec::with_capacity(num_possible_allocs);
459
460 for i in 0..num_possible_allocs {
461 let buffer_result = allocator.allocate(single_alloc_size);
462 assert!(buffer_result.is_ok(), "Allocation {} should succeed", i + 1);
463 let buffer = buffer_result.unwrap();
464 assert_eq!(buffer.size(), single_alloc_size);
465 allocated_buffers.push(buffer);
466 }
467
468 let final_alloc_result = allocator.allocate(1);
471 assert!(
472 final_alloc_result.is_err(),
473 "Final allocation of 1 byte should fail as arena is full"
474 );
475 assert!(matches!(
476 final_alloc_result,
477 Err(ArenaError::AllocationFailed)
478 ));
479 }
480}