1use core::ops::Deref;
10
11#[derive(Debug)]
14pub struct JitAllocError;
15
16#[derive(Debug, Clone, Copy, Eq, PartialEq)]
18pub enum ProtectJitAccess {
19 ReadWrite = 0,
21 ReadExecute = 1,
23}
24
25pub trait JitAlloc {
30 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError>;
34
35 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError>;
43
44 unsafe fn protect_jit_memory(&self, ptr: *const u8, size: usize, access: ProtectJitAccess);
54
55 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize);
63}
64
65impl<J: JitAlloc> JitAlloc for &J {
66 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
67 (*self).alloc(size)
68 }
69
70 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
71 (*self).release(rx_ptr)
72 }
73
74 #[inline(always)]
75 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
76 (*self).flush_instruction_cache(rx_ptr, size);
77 }
78
79 #[inline(always)]
80 unsafe fn protect_jit_memory(&self, ptr: *const u8, size: usize, access: ProtectJitAccess) {
81 (*self).protect_jit_memory(ptr, size, access);
82 }
83}
84
85#[cfg(not(feature = "no_std"))]
86impl<J: JitAlloc> JitAlloc for std::sync::LazyLock<J> {
87 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
88 self.deref().alloc(size)
89 }
90
91 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
92 self.deref().release(rx_ptr)
93 }
94
95 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
96 self.deref().flush_instruction_cache(rx_ptr, size);
97 }
98
99 unsafe fn protect_jit_memory(&self, ptr: *const u8, size: usize, access: ProtectJitAccess) {
100 self.deref().protect_jit_memory(ptr, size, access);
101 }
102}
103
104#[cfg(feature = "no_std")]
105impl<J: JitAlloc, R: spin::RelaxStrategy> JitAlloc for spin::lazy::Lazy<J, fn() -> J, R> {
106 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
107 self.deref().alloc(size)
108 }
109
110 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
111 self.deref().release(rx_ptr)
112 }
113
114 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
115 self.deref().flush_instruction_cache(rx_ptr, size);
116 }
117
118 unsafe fn protect_jit_memory(&self, ptr: *const u8, size: usize, access: ProtectJitAccess) {
119 self.deref().protect_jit_memory(ptr, size, access);
120 }
121}
122
123#[cfg(any(feature = "bundled_jit_alloc", feature = "custom_jit_alloc"))]
124#[derive(Default, Clone, Copy)]
136pub struct GlobalJitAlloc;
137
138#[cfg(feature = "bundled_jit_alloc")]
139mod bundled_jit_alloc {
140 use jit_allocator::JitAllocator;
141
142 use super::*;
143
144 #[inline(always)]
145 fn convert_access(access: ProtectJitAccess) -> jit_allocator::ProtectJitAccess {
146 match access {
147 ProtectJitAccess::ReadExecute => jit_allocator::ProtectJitAccess::ReadExecute,
148 ProtectJitAccess::ReadWrite => jit_allocator::ProtectJitAccess::ReadWrite,
149 }
150 }
151
152 fn flush_instruction_cache(rx_ptr: *const u8, size: usize) {
153 #[cfg(all(target_arch = "arm", target_os = "linux"))]
154 unsafe {
155 const __ARM_NR_CACHEFLUSH: i32 = 0x0f0002;
156 libc::syscall(
157 __ARM_NR_CACHEFLUSH,
158 rx_ptr as usize as u64,
159 (rx_ptr as usize + size) as u64,
160 0,
161 );
162 return;
163 }
164 #[allow(unreachable_code)]
165 jit_allocator::flush_instruction_cache(rx_ptr, size);
166 }
167
168 impl JitAlloc for core::cell::RefCell<JitAllocator> {
169 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
170 self.borrow_mut().alloc(size).map_err(|_| JitAllocError)
171 }
172
173 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
174 self.borrow_mut().release(rx_ptr).map_err(|_| JitAllocError)
175 }
176
177 #[inline(always)]
178 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
179 flush_instruction_cache(rx_ptr, size);
180 }
181
182 #[inline(always)]
183 unsafe fn protect_jit_memory(
184 &self,
185 _ptr: *const u8,
186 _size: usize,
187 access: ProtectJitAccess,
188 ) {
189 jit_allocator::protect_jit_memory(convert_access(access));
190 }
191 }
192
193 #[cfg(not(feature = "no_std"))]
194 impl JitAlloc for std::sync::RwLock<JitAllocator> {
195 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
196 self.write().unwrap().alloc(size).map_err(|_| JitAllocError)
197 }
198
199 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
200 self.write().unwrap().release(rx_ptr).map_err(|_| JitAllocError)
201 }
202
203 #[inline(always)]
204 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
205 flush_instruction_cache(rx_ptr, size);
206 }
207
208 #[inline(always)]
209 unsafe fn protect_jit_memory(
210 &self,
211 _ptr: *const u8,
212 _size: usize,
213 access: ProtectJitAccess,
214 ) {
215 jit_allocator::protect_jit_memory(convert_access(access));
216 }
217 }
218
219 #[cfg(not(feature = "no_std"))]
220 impl JitAlloc for std::sync::Mutex<JitAllocator> {
221 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
222 self.lock().unwrap().alloc(size).map_err(|_| JitAllocError)
223 }
224
225 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
226 self.lock().unwrap().release(rx_ptr).map_err(|_| JitAllocError)
227 }
228
229 #[inline(always)]
230 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
231 flush_instruction_cache(rx_ptr, size);
232 }
233
234 #[inline(always)]
235 unsafe fn protect_jit_memory(
236 &self,
237 _ptr: *const u8,
238 _size: usize,
239 access: ProtectJitAccess,
240 ) {
241 jit_allocator::protect_jit_memory(convert_access(access));
242 }
243 }
244
245 #[cfg(feature = "no_std")]
246 static GLOBAL_JIT_ALLOC: spin::Mutex<Option<alloc::boxed::Box<JitAllocator>>> =
247 spin::Mutex::new(None);
248 #[cfg(not(feature = "no_std"))]
249 static GLOBAL_JIT_ALLOC: std::sync::Mutex<Option<Box<JitAllocator>>> =
250 std::sync::Mutex::new(None);
251
252 impl super::GlobalJitAlloc {
253 fn use_alloc<T>(&self, action: impl FnOnce(&mut JitAllocator) -> T) -> T {
254 #[cfg(feature = "no_std")]
255 let mut maybe_alloc = GLOBAL_JIT_ALLOC.lock();
256 #[cfg(not(feature = "no_std"))]
257 let mut maybe_alloc = GLOBAL_JIT_ALLOC.lock().unwrap();
258
259 let alloc = maybe_alloc.get_or_insert_with(|| JitAllocator::new(Default::default()));
260 action(alloc)
261 }
262 }
263
264 impl JitAlloc for super::GlobalJitAlloc {
265 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
266 self.use_alloc(|a| a.alloc(size)).map_err(|_| JitAllocError)
267 }
268
269 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
270 self.use_alloc(|a| a.release(rx_ptr)).map_err(|_| JitAllocError)
271 }
272
273 #[inline(always)]
274 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
275 flush_instruction_cache(rx_ptr, size);
276 }
277
278 #[inline(always)]
279 unsafe fn protect_jit_memory(
280 &self,
281 _ptr: *const u8,
282 _size: usize,
283 access: ProtectJitAccess,
284 ) {
285 jit_allocator::protect_jit_memory(convert_access(access));
286 }
287 }
288
289 #[cfg(not(feature = "no_std"))]
290 mod thread_jit_alloc {
291 use core::{cell::UnsafeCell, marker::PhantomData};
292
293 use jit_allocator::JitAllocator;
294
295 #[allow(unused_imports)]
296 use super::*;
297
298 thread_local! {
299 static THREAD_JIT_ALLOC: UnsafeCell<Box<JitAllocator>> =
300 UnsafeCell::new(JitAllocator::new(Default::default()));
301 }
302
303 #[derive(Default, Clone)]
307 pub struct ThreadJitAlloc(PhantomData<*mut ()>);
308
309 impl JitAlloc for ThreadJitAlloc {
310 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
311 THREAD_JIT_ALLOC
312 .with(|a| unsafe { &mut *a.get() }.alloc(size))
313 .map_err(|_| JitAllocError)
314 }
315
316 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
317 THREAD_JIT_ALLOC
318 .with(|a| unsafe { &mut *a.get() }.release(rx_ptr))
319 .map_err(|_| JitAllocError)
320 }
321
322 #[inline(always)]
323 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
324 flush_instruction_cache(rx_ptr, size);
325 }
326
327 #[inline(always)]
328 unsafe fn protect_jit_memory(
329 &self,
330 _ptr: *const u8,
331 _size: usize,
332 access: ProtectJitAccess,
333 ) {
334 jit_allocator::protect_jit_memory(convert_access(access));
335 }
336 }
337 }
338 #[cfg(not(feature = "no_std"))]
339 pub use thread_jit_alloc::*;
340}
341#[cfg(feature = "bundled_jit_alloc")]
342pub use bundled_jit_alloc::*;
343
344#[macro_export]
363#[cfg(any(feature = "custom_jit_alloc", feature = "build-docs"))]
364#[cfg_attr(feature = "build-docs", doc(cfg(feature = "custom_jit_alloc")))]
365macro_rules! global_jit_alloc {
366 ($static_var:path) => {
367 #[no_mangle]
368 extern "Rust" fn _closure_ffi__global_jit_alloc(
369 ) -> &'static dyn $crate::jit_alloc::JitAlloc {
370 &$static_var
371 }
372 };
373 ($provider:expr) => {
374 #[no_mangle]
375 extern "Rust" fn _closure_ffi__global_jit_alloc(
376 ) -> &'static dyn $crate::jit_alloc::JitAlloc {
377 $provider
378 }
379 };
380}
381#[cfg(feature = "custom_jit_alloc")]
382pub use global_jit_alloc;
383
384#[cfg(feature = "custom_jit_alloc")]
385mod custom_jit_alloc {
386 use super::{GlobalJitAlloc, JitAlloc, JitAllocError, ProtectJitAccess};
387
388 extern "Rust" {
389 fn _closure_ffi__global_jit_alloc() -> &'static dyn JitAlloc;
390 }
391
392 fn get_global_jit_alloc() -> &'static dyn JitAlloc {
393 unsafe { _closure_ffi__global_jit_alloc() }
394 }
395
396 impl JitAlloc for GlobalJitAlloc {
397 fn alloc(&self, size: usize) -> Result<(*const u8, *mut u8), JitAllocError> {
398 get_global_jit_alloc().alloc(size)
399 }
400
401 unsafe fn release(&self, rx_ptr: *const u8) -> Result<(), JitAllocError> {
402 get_global_jit_alloc().release(rx_ptr)
403 }
404
405 unsafe fn flush_instruction_cache(&self, rx_ptr: *const u8, size: usize) {
406 get_global_jit_alloc().flush_instruction_cache(rx_ptr, size);
407 }
408
409 unsafe fn protect_jit_memory(&self, ptr: *const u8, size: usize, access: ProtectJitAccess) {
410 get_global_jit_alloc().protect_jit_memory(ptr, size, access);
411 }
412 }
413}