framealloc/allocators/
handles.rs

1//! Handle-based allocation with relocation support.
2//!
3//! Provides stable handles that remain valid even when the underlying
4//! memory is relocated for defragmentation.
5
6use std::alloc::{alloc, dealloc, Layout};
7use std::marker::PhantomData;
8use std::sync::atomic::{AtomicU32, AtomicU64, Ordering};
9
10use crate::sync::mutex::Mutex;
11
12/// Generation counter for handle validation.
13type Generation = u32;
14
15/// A stable handle to allocated memory.
16///
17/// Handles remain valid across relocations. The actual pointer
18/// is resolved at access time.
19#[derive(Debug)]
20pub struct Handle<T> {
21    index: u32,
22    generation: Generation,
23    _marker: PhantomData<T>,
24}
25
26// Manual implementations to avoid T: Copy/Clone bounds
27impl<T> Copy for Handle<T> {}
28
29impl<T> Clone for Handle<T> {
30    fn clone(&self) -> Self {
31        *self
32    }
33}
34
35impl<T> PartialEq for Handle<T> {
36    fn eq(&self, other: &Self) -> bool {
37        self.index == other.index && self.generation == other.generation
38    }
39}
40
41impl<T> Eq for Handle<T> {}
42
43impl<T> std::hash::Hash for Handle<T> {
44    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
45        self.index.hash(state);
46        self.generation.hash(state);
47    }
48}
49
50impl<T> Handle<T> {
51    /// Create a dangling handle (for default initialization).
52    pub const fn dangling() -> Self {
53        Self {
54            index: u32::MAX,
55            generation: 0,
56            _marker: PhantomData,
57        }
58    }
59
60    /// Check if this is a dangling/invalid handle.
61    pub fn is_dangling(&self) -> bool {
62        self.index == u32::MAX
63    }
64
65    /// Get the raw index (for debugging).
66    pub fn raw_index(&self) -> u32 {
67        self.index
68    }
69
70    /// Get the generation (for debugging).
71    pub fn raw_generation(&self) -> u32 {
72        self.generation
73    }
74}
75
76impl<T> Default for Handle<T> {
77    fn default() -> Self {
78        Self::dangling()
79    }
80}
81
82
83/// Internal slot for tracking allocations.
84struct Slot {
85    /// Pointer to allocated memory
86    ptr: *mut u8,
87    /// Size of allocation
88    size: usize,
89    /// Current generation
90    generation: Generation,
91    /// Whether this slot is in use
92    in_use: bool,
93    /// Whether the memory can be relocated
94    relocatable: bool,
95    /// Relocation callback (called after move)
96    on_relocate: Option<Box<dyn Fn(*mut u8, *mut u8) + Send + Sync>>,
97}
98
99impl Slot {
100    #[allow(dead_code)]
101    fn empty() -> Self {
102        Self {
103            ptr: std::ptr::null_mut(),
104            size: 0,
105            generation: 0,
106            in_use: false,
107            relocatable: true,
108            on_relocate: None,
109        }
110    }
111}
112
113/// Handle-based allocator with relocation support.
114pub struct HandleAllocator {
115    /// Allocation slots
116    slots: Mutex<Vec<Slot>>,
117    
118    /// Free slot indices
119    free_list: Mutex<Vec<u32>>,
120    
121    /// Total allocated bytes
122    total_allocated: AtomicU64,
123    
124    /// Number of active handles
125    active_count: AtomicU32,
126    
127    /// Number of relocations performed
128    relocation_count: AtomicU64,
129}
130
131impl HandleAllocator {
132    /// Create a new handle allocator.
133    pub fn new() -> Self {
134        Self {
135            slots: Mutex::new(Vec::with_capacity(1024)),
136            free_list: Mutex::new(Vec::new()),
137            total_allocated: AtomicU64::new(0),
138            active_count: AtomicU32::new(0),
139            relocation_count: AtomicU64::new(0),
140        }
141    }
142
143    /// Allocate memory and return a handle.
144    pub fn alloc<T>(&self) -> Option<Handle<T>> {
145        self.alloc_with_options::<T>(true, None)
146    }
147
148    /// Allocate with options.
149    pub fn alloc_with_options<T>(
150        &self,
151        relocatable: bool,
152        on_relocate: Option<Box<dyn Fn(*mut u8, *mut u8) + Send + Sync>>,
153    ) -> Option<Handle<T>> {
154        let size = std::mem::size_of::<T>();
155        let align = std::mem::align_of::<T>();
156        
157        let layout = Layout::from_size_align(size, align).ok()?;
158        let ptr = unsafe { alloc(layout) };
159        
160        if ptr.is_null() {
161            return None;
162        }
163
164        let (index, generation) = self.allocate_slot(ptr, size, relocatable, on_relocate);
165        
166        self.total_allocated.fetch_add(size as u64, Ordering::Relaxed);
167        self.active_count.fetch_add(1, Ordering::Relaxed);
168
169        Some(Handle {
170            index,
171            generation,
172            _marker: PhantomData,
173        })
174    }
175
176    /// Allocate a slot for the given pointer.
177    fn allocate_slot(
178        &self,
179        ptr: *mut u8,
180        size: usize,
181        relocatable: bool,
182        on_relocate: Option<Box<dyn Fn(*mut u8, *mut u8) + Send + Sync>>,
183    ) -> (u32, Generation) {
184        let mut free_list = self.free_list.lock();
185        let mut slots = self.slots.lock();
186
187        if let Some(index) = free_list.pop() {
188            let slot = &mut slots[index as usize];
189            slot.ptr = ptr;
190            slot.size = size;
191            slot.generation = slot.generation.wrapping_add(1);
192            slot.in_use = true;
193            slot.relocatable = relocatable;
194            slot.on_relocate = on_relocate;
195            (index, slot.generation)
196        } else {
197            let index = slots.len() as u32;
198            slots.push(Slot {
199                ptr,
200                size,
201                generation: 1,
202                in_use: true,
203                relocatable,
204                on_relocate,
205            });
206            (index, 1)
207        }
208    }
209
210    /// Free a handle.
211    pub fn free<T>(&self, handle: Handle<T>) {
212        if handle.is_dangling() {
213            return;
214        }
215
216        let mut slots = self.slots.lock();
217        let mut free_list = self.free_list.lock();
218
219        if let Some(slot) = slots.get_mut(handle.index as usize) {
220            if slot.in_use && slot.generation == handle.generation {
221                let layout = Layout::from_size_align(slot.size, 1).expect("Invalid layout");
222                unsafe {
223                    dealloc(slot.ptr, layout);
224                }
225                
226                self.total_allocated.fetch_sub(slot.size as u64, Ordering::Relaxed);
227                self.active_count.fetch_sub(1, Ordering::Relaxed);
228                
229                slot.ptr = std::ptr::null_mut();
230                slot.in_use = false;
231                slot.on_relocate = None;
232                free_list.push(handle.index);
233            }
234        }
235    }
236
237    /// Resolve a handle to a pointer.
238    ///
239    /// Returns None if the handle is invalid or has been freed.
240    pub fn resolve<T>(&self, handle: Handle<T>) -> Option<*const T> {
241        if handle.is_dangling() {
242            return None;
243        }
244
245        let slots = self.slots.lock();
246        slots.get(handle.index as usize).and_then(|slot| {
247            if slot.in_use && slot.generation == handle.generation {
248                Some(slot.ptr as *const T)
249            } else {
250                None
251            }
252        })
253    }
254
255    /// Resolve a handle to a mutable pointer.
256    pub fn resolve_mut<T>(&self, handle: Handle<T>) -> Option<*mut T> {
257        if handle.is_dangling() {
258            return None;
259        }
260
261        let slots = self.slots.lock();
262        slots.get(handle.index as usize).and_then(|slot| {
263            if slot.in_use && slot.generation == handle.generation {
264                Some(slot.ptr as *mut T)
265            } else {
266                None
267            }
268        })
269    }
270
271    /// Check if a handle is valid.
272    pub fn is_valid<T>(&self, handle: Handle<T>) -> bool {
273        if handle.is_dangling() {
274            return false;
275        }
276
277        let slots = self.slots.lock();
278        slots.get(handle.index as usize).map_or(false, |slot| {
279            slot.in_use && slot.generation == handle.generation
280        })
281    }
282
283    /// Pin a handle to prevent relocation.
284    pub fn pin<T>(&self, handle: Handle<T>) {
285        if handle.is_dangling() {
286            return;
287        }
288
289        let mut slots = self.slots.lock();
290        if let Some(slot) = slots.get_mut(handle.index as usize) {
291            if slot.in_use && slot.generation == handle.generation {
292                slot.relocatable = false;
293            }
294        }
295    }
296
297    /// Unpin a handle to allow relocation.
298    pub fn unpin<T>(&self, handle: Handle<T>) {
299        if handle.is_dangling() {
300            return;
301        }
302
303        let mut slots = self.slots.lock();
304        if let Some(slot) = slots.get_mut(handle.index as usize) {
305            if slot.in_use && slot.generation == handle.generation {
306                slot.relocatable = true;
307            }
308        }
309    }
310
311    /// Perform defragmentation by relocating memory.
312    ///
313    /// This is a simple compaction that moves relocatable allocations
314    /// to reduce fragmentation. Returns the number of relocations performed.
315    pub fn defragment(&self) -> usize {
316        let mut slots = self.slots.lock();
317        let mut relocations = 0;
318
319        // Simple strategy: try to compact smaller allocations together
320        // This is a basic implementation - production would be more sophisticated
321        
322        let relocatable: Vec<usize> = slots
323            .iter()
324            .enumerate()
325            .filter(|(_, s)| s.in_use && s.relocatable)
326            .map(|(i, _)| i)
327            .collect();
328
329        for idx in relocatable {
330            let slot = &mut slots[idx];
331            
332            // Try to allocate new memory
333            let layout = match Layout::from_size_align(slot.size, 16) {
334                Ok(l) => l,
335                Err(_) => continue,
336            };
337            
338            let new_ptr = unsafe { alloc(layout) };
339            if new_ptr.is_null() {
340                continue;
341            }
342
343            // Copy data
344            unsafe {
345                std::ptr::copy_nonoverlapping(slot.ptr, new_ptr, slot.size);
346            }
347
348            let old_ptr = slot.ptr;
349            
350            // Call relocation callback if set
351            if let Some(ref callback) = slot.on_relocate {
352                callback(old_ptr, new_ptr);
353            }
354
355            // Free old memory
356            unsafe {
357                dealloc(old_ptr, layout);
358            }
359
360            slot.ptr = new_ptr;
361            relocations += 1;
362        }
363
364        self.relocation_count.fetch_add(relocations as u64, Ordering::Relaxed);
365        relocations
366    }
367
368    /// Get total allocated bytes.
369    pub fn total_allocated(&self) -> u64 {
370        self.total_allocated.load(Ordering::Relaxed)
371    }
372
373    /// Get number of active handles.
374    pub fn active_count(&self) -> u32 {
375        self.active_count.load(Ordering::Relaxed)
376    }
377
378    /// Get total relocations performed.
379    pub fn relocation_count(&self) -> u64 {
380        self.relocation_count.load(Ordering::Relaxed)
381    }
382
383    /// Get statistics about the handle allocator.
384    pub fn stats(&self) -> HandleAllocatorStats {
385        let slots = self.slots.lock();
386        let free_list = self.free_list.lock();
387        
388        let relocatable_count = slots.iter().filter(|s| s.in_use && s.relocatable).count();
389        let pinned_count = slots.iter().filter(|s| s.in_use && !s.relocatable).count();
390
391        HandleAllocatorStats {
392            total_allocated: self.total_allocated.load(Ordering::Relaxed),
393            active_handles: self.active_count.load(Ordering::Relaxed),
394            total_slots: slots.len(),
395            free_slots: free_list.len(),
396            relocatable_count,
397            pinned_count,
398            relocation_count: self.relocation_count.load(Ordering::Relaxed),
399        }
400    }
401}
402
403impl Default for HandleAllocator {
404    fn default() -> Self {
405        Self::new()
406    }
407}
408
409// SAFETY: HandleAllocator uses internal synchronization
410unsafe impl Send for HandleAllocator {}
411unsafe impl Sync for HandleAllocator {}
412
413/// Statistics for the handle allocator.
414#[derive(Debug, Clone, Default)]
415pub struct HandleAllocatorStats {
416    /// Total bytes allocated
417    pub total_allocated: u64,
418    /// Number of active handles
419    pub active_handles: u32,
420    /// Total slot capacity
421    pub total_slots: usize,
422    /// Number of free slots
423    pub free_slots: usize,
424    /// Number of relocatable allocations
425    pub relocatable_count: usize,
426    /// Number of pinned allocations
427    pub pinned_count: usize,
428    /// Total relocations performed
429    pub relocation_count: u64,
430}
431
432/// RAII guard for pinning a handle.
433pub struct PinGuard<'a, T> {
434    allocator: &'a HandleAllocator,
435    handle: Handle<T>,
436}
437
438impl<'a, T> PinGuard<'a, T> {
439    /// Create a new pin guard.
440    pub fn new(allocator: &'a HandleAllocator, handle: Handle<T>) -> Self {
441        allocator.pin(handle);
442        Self { allocator, handle }
443    }
444
445    /// Get the handle.
446    pub fn handle(&self) -> Handle<T> {
447        self.handle
448    }
449
450    /// Resolve to a pointer (valid for lifetime of guard).
451    pub fn get(&self) -> Option<*const T> {
452        self.allocator.resolve(self.handle)
453    }
454
455    /// Resolve to a mutable pointer.
456    pub fn get_mut(&self) -> Option<*mut T> {
457        self.allocator.resolve_mut(self.handle)
458    }
459}
460
461impl<'a, T> Drop for PinGuard<'a, T> {
462    fn drop(&mut self) {
463        self.allocator.unpin(self.handle);
464    }
465}
466
467#[cfg(test)]
468mod tests {
469    use super::*;
470
471    #[test]
472    fn test_basic_alloc_free() {
473        let allocator = HandleAllocator::new();
474        
475        let handle: Handle<u64> = allocator.alloc().unwrap();
476        assert!(allocator.is_valid(handle));
477        
478        let ptr = allocator.resolve_mut(handle).unwrap();
479        unsafe { *ptr = 42; }
480        
481        let value = unsafe { *allocator.resolve(handle).unwrap() };
482        assert_eq!(value, 42);
483        
484        allocator.free(handle);
485        assert!(!allocator.is_valid(handle));
486    }
487
488    #[test]
489    fn test_generation_invalidation() {
490        let allocator = HandleAllocator::new();
491        
492        let handle1: Handle<u64> = allocator.alloc().unwrap();
493        allocator.free(handle1);
494        
495        let handle2: Handle<u64> = allocator.alloc().unwrap();
496        
497        // Same index, different generation
498        assert_eq!(handle1.index, handle2.index);
499        assert_ne!(handle1.generation, handle2.generation);
500        
501        // Old handle should be invalid
502        assert!(!allocator.is_valid(handle1));
503        assert!(allocator.is_valid(handle2));
504    }
505
506    #[test]
507    fn test_pin_unpin() {
508        let allocator = HandleAllocator::new();
509        
510        let handle: Handle<u64> = allocator.alloc().unwrap();
511        
512        allocator.pin(handle);
513        
514        let stats = allocator.stats();
515        assert_eq!(stats.pinned_count, 1);
516        assert_eq!(stats.relocatable_count, 0);
517        
518        allocator.unpin(handle);
519        
520        let stats = allocator.stats();
521        assert_eq!(stats.pinned_count, 0);
522        assert_eq!(stats.relocatable_count, 1);
523    }
524
525    #[test]
526    fn test_dangling_handle() {
527        let allocator = HandleAllocator::new();
528        
529        let handle: Handle<u64> = Handle::dangling();
530        
531        assert!(handle.is_dangling());
532        assert!(!allocator.is_valid(handle));
533        assert!(allocator.resolve(handle).is_none());
534    }
535}