#![no_std]
#![feature(alloc_error_handler)]
#![feature(allocator_api)]
extern crate alloc;
use crate::nt::{ExFreePool, MEMORY_CACHING_TYPE::MmCached};
use alloc::alloc::handle_alloc_error;
use core::{
alloc::{AllocError, Allocator, GlobalAlloc, Layout},
ptr::NonNull,
};
use nt::{MmAllocateContiguousMemorySpecifyCacheNode, MmFreeContiguousMemory, MM_ANY_NODE_OK};
#[cfg(feature = "no-exec")]
use nt::NonPagedPoolNx;
#[cfg(not(feature = "no-exec"))]
use nt::NonPagedPool;
use winapi::shared::ntdef::PHYSICAL_ADDRESS;
#[doc(hidden)] pub mod nt;
#[cfg(feature = "pool-tag")]
const POOL_TAG: u32 = u32::from_ne_bytes(*b"tsuR");
#[derive(Clone)]
pub struct KernelAlloc;
#[derive(Clone)]
pub struct PhysicalAllocator;
unsafe impl GlobalAlloc for KernelAlloc {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
#[cfg(feature = "no-exec")]
let pool_type = NonPagedPoolNx;
#[cfg(not(feature = "no-exec"))]
let pool_type = NonPagedPool;
#[cfg(feature = "pool-tag")]
let pool = nt::ExAllocatePoolWithTag(pool_type, layout.size(), POOL_TAG);
#[cfg(not(feature = "pool-tag"))]
let pool = nt::ExAllocatePool(pool_type, layout.size());
if pool.is_null() {
handle_alloc_error(layout);
}
pool as _
}
unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
ExFreePool(ptr as _);
}
}
unsafe impl Allocator for PhysicalAllocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let mut boundary: PHYSICAL_ADDRESS = unsafe { core::mem::zeroed() };
let mut lowest: PHYSICAL_ADDRESS = unsafe { core::mem::zeroed() };
let mut highest: PHYSICAL_ADDRESS = unsafe { core::mem::zeroed() };
unsafe { *(boundary.QuadPart_mut()) = 0 };
unsafe { *(lowest.QuadPart_mut()) = 0 };
unsafe { *(highest.QuadPart_mut()) = -1 };
let memory = unsafe {
MmAllocateContiguousMemorySpecifyCacheNode(
layout.size(),
lowest,
highest,
boundary,
MmCached,
MM_ANY_NODE_OK,
)
} as *mut u8;
if memory.is_null() {
Err(AllocError)
} else {
let slice = unsafe { core::slice::from_raw_parts_mut(memory, layout.size()) };
Ok(unsafe { NonNull::new_unchecked(slice) })
}
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, _layout: Layout) {
MmFreeContiguousMemory(ptr.cast().as_ptr());
}
}
unsafe impl Allocator for KernelAlloc {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
#[cfg(feature = "no-exec")]
let pool_type = NonPagedPoolNx;
#[cfg(not(feature = "no-exec"))]
let pool_type = NonPagedPool;
let memory = unsafe { nt::ExAllocatePool(pool_type, layout.size()) } as *mut u8;
if memory.is_null() {
Err(AllocError)
} else {
let slice = unsafe { core::slice::from_raw_parts_mut(memory, layout.size()) };
Ok(unsafe { NonNull::new_unchecked(slice) })
}
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, _layout: Layout) {
ExFreePool(ptr.cast::<u64>().as_ptr() as _);
}
}