comet/internal/
stack_bounds.rs

1
2/// Stack bounds for conservative stack marking. 
3#[derive(Clone, Copy, PartialEq, Eq)]
4pub struct StackBounds {
5    pub origin: *mut u8,
6    pub bound: *mut u8,
7}
8
9#[cfg(any(target_os = "macos", target_os = "ios"))]
10impl StackBounds {
11    pub unsafe fn new_thread_stack_bounds(thread: libc::pthread_t) -> Self {
12        let origin = libc::pthread_get_stackaddr_np(thread);
13        let size = libc::pthread_get_stacksize_np(thread);
14        let bound = origin.add(size);
15        Self {
16            origin: origin.cast(),
17            bound: bound.cast(),
18        }
19    }
20    pub fn current_thread_stack_bounds() -> Self {
21        unsafe { Self::new_thread_stack_bounds(thread_self() as _) }
22    }
23}
24
25#[cfg(all(unix, not(any(target_os = "macos", target_os = "ios"))))]
26impl StackBounds {
27    #[cfg(target_os = "openbsd")]
28    unsafe fn new_thread_stack_bounds(thread: libc::pthread_t) -> Self {
29        let mut stack: libc::stack_t = core::mem::MaybeUninit::zeroed().assume_init();
30        libc::pthread_stackseg_np(thread, &mut stack);
31        let origin = stack.ss_sp;
32        let bound = stack.origin.sub(stack.ss_size);
33        return Self {
34            origin: origin.cast(),
35            bound: bound.cast(),
36        };
37    }
38
39    #[cfg(not(target_os = "openbsd"))]
40    unsafe fn new_thread_stack_bounds(thread: libc::pthread_t) -> Self {
41        let mut bound = core::ptr::null_mut::<libc::c_void>();
42        let mut stack_size = 0;
43        let mut sattr: libc::pthread_attr_t = core::mem::MaybeUninit::zeroed().assume_init();
44        libc::pthread_attr_init(&mut sattr);
45        #[cfg(any(target_os = "freebsd", target_os = "netbsd"))]
46        {
47            libc::pthread_attr_get_np(thread, &mut sattr);
48        }
49        #[cfg(not(any(target_os = "freebsd", target_os = "netbsd")))]
50        {
51            libc::pthread_getattr_np(thread, &mut sattr);
52        }
53        let _rc = libc::pthread_attr_getstack(&sattr, &mut bound, &mut stack_size);
54        libc::pthread_attr_destroy(&mut sattr);
55        let origin = bound.add(stack_size);
56        Self {
57            bound: bound.cast(),
58            origin: origin.cast(),
59        }
60    }
61
62    pub fn current_thread_stack_bounds() -> Self {
63        unsafe { Self::new_thread_stack_bounds(thread_self() as _) }
64    }
65}
66#[allow(dead_code)]
67pub(crate) fn thread_self() -> u64 {
68    #[cfg(windows)]
69    unsafe {
70        extern "C" {
71            fn GetCurrentThreadId() -> u32;
72        }
73        GetCurrentThreadId() as u64
74    }
75    #[cfg(unix)]
76    unsafe {
77        libc::pthread_self() as u64
78    }
79}
80#[cfg(windows)]
81impl StackBounds {
82    pub unsafe fn current_thread_stack_bounds_internal() -> Self {
83        use winapi::um::memoryapi::*;
84        use winapi::um::winnt::*;
85        let mut stack_origin: MEMORY_BASIC_INFORMATION =
86            core::mem::MaybeUninit::zeroed().assume_init();
87        VirtualQuery(
88            &mut stack_origin as *mut MEMORY_BASIC_INFORMATION as *mut _,
89            &mut stack_origin,
90            core::mem::size_of::<MEMORY_BASIC_INFORMATION>(),
91        );
92
93        let origin = stack_origin
94            .BaseAddress
95            .cast::<u8>()
96            .add(stack_origin.RegionSize as _);
97        // The stack on Windows consists out of three parts (uncommitted memory, a guard page and present
98        // committed memory). The 3 regions have different BaseAddresses but all have the same AllocationBase
99        // since they are all from the same VirtualAlloc. The 3 regions are laid out in memory (from high to
100        // low) as follows:
101        //
102        //    High |-------------------|  -----
103        //         | committedMemory   |    ^
104        //         |-------------------|    |
105        //         | guardPage         | reserved memory for the stack
106        //         |-------------------|    |
107        //         | uncommittedMemory |    v
108        //    Low  |-------------------|  ----- <--- stackOrigin.AllocationBase
109        //
110        // See http://msdn.microsoft.com/en-us/library/ms686774%28VS.85%29.aspx for more information.
111        let mut uncommitted_memory: MEMORY_BASIC_INFORMATION =
112            core::mem::MaybeUninit::zeroed().assume_init();
113        VirtualQuery(
114            stack_origin.AllocationBase as *mut _,
115            &mut uncommitted_memory,
116            core::mem::size_of::<MEMORY_BASIC_INFORMATION>(),
117        );
118        let mut guard_page: MEMORY_BASIC_INFORMATION =
119            core::mem::MaybeUninit::zeroed().assume_init();
120        VirtualQuery(
121            uncommitted_memory
122                .BaseAddress
123                .cast::<u8>()
124                .add(uncommitted_memory.RegionSize as _)
125                .cast(),
126            &mut guard_page,
127            core::mem::size_of::<MEMORY_BASIC_INFORMATION>(),
128        );
129        let end_of_stack = stack_origin.AllocationBase as *mut u8;
130        let bound = end_of_stack.add(guard_page.RegionSize as _);
131        Self {
132            origin: origin as *mut u8,
133            bound,
134        }
135    }
136
137    pub fn current_thread_stack_bounds() -> Self {
138        unsafe { Self::current_thread_stack_bounds_internal() }
139    }
140}