use std::{
fmt,
marker::PhantomData,
thread::{AccessError, LocalKey},
};
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct LocalScope<'a>(PhantomData<*const &'a ()>);
impl<'a> fmt::Debug for LocalScope<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("LocalScope").finish()
}
}
pub fn local_scope<F, R>(f: F) -> R
where
F: for<'a> FnOnce(LocalScope<'a>) -> R,
{
f(
unsafe { LocalScope::new_unchecked() },
)
}
#[inline(always)]
fn unsafe_tls_callback<'a, T>(tls: &T) -> &'a T {
unsafe { &*(tls as *const T) }
}
impl<'a> LocalScope<'a> {
pub const unsafe fn new_unchecked() -> Self {
Self(PhantomData)
}
pub fn try_access<T>(self, target: &'static LocalKey<T>) -> Result<&'a T, AccessError> {
target.try_with(unsafe_tls_callback)
}
#[track_caller]
pub fn access<T>(self, target: &'static LocalKey<T>) -> &'a T {
target.with(unsafe_tls_callback)
}
}
#[cfg(test)]
mod test {
static_assertions::assert_not_impl_any!(LocalScope<'static>: Send, Sync);
static_assertions::assert_eq_size!(LocalScope<'static>, ());
use crate::*;
use std::{
cell::Cell,
sync::atomic::{AtomicUsize, Ordering},
thread::spawn,
};
#[test]
fn re_entrant() {
static DID_RUN_DESTRUCTOR: AtomicUsize = AtomicUsize::new(0);
thread_local! {
static MY_THING: MyThing = MyThing;
}
struct MyThing;
impl Drop for MyThing {
fn drop(&mut self) {
local_scope(|sc| {
DID_RUN_DESTRUCTOR.fetch_add(1, Ordering::Relaxed);
assert!(
sc.try_access(&MY_THING).is_err(),
"Can't access self while in destructor"
)
})
}
}
spawn(|| {
local_scope(|s| {
let _ = s.try_access(&MY_THING).expect("Testing, should be defined");
})
})
.join()
.unwrap();
assert_eq!(DID_RUN_DESTRUCTOR.load(Ordering::Relaxed), 1);
}
#[test]
fn swap() {
fn swap_local_cells<T>(a: &'static LocalKey<Cell<T>>, b: &'static LocalKey<Cell<T>>) {
local_scope(|s| s.access(a).swap(s.access(b)))
}
thread_local! {
static A: Cell<u8> = Cell::new(0);
static B: Cell<u8> = Cell::new(1);
}
swap_local_cells(&A, &B);
assert_eq!(A.with(|x| x.get()), 1);
assert_eq!(B.with(|x| x.get()), 0);
}
}