use core::alloc::Layout;
use core::ptr::{self, NonNull};
#[cfg(feature = "bitflags")]
pub use bitflags;
#[cfg(not(feature = "bitflags"))]
pub mod bitflags {
#[macro_export]
macro_rules! bitflags {
(
$(#[$attr:meta])*
$vis:vis struct $name:ident : $repr:ty {
$(
$(#[$flag_attr:meta])*
const $flag:ident = $val:expr;
)*
}
) => {
$(#[$attr])*
$vis struct $name {
bits: $repr,
}
impl $name {
$(
$(#[$flag_attr])*
$vis const $flag: Self = Self { bits: $val };
)*
$vis fn empty() -> Self {
Self { bits: 0 }
}
$vis fn from_bits_retain(bits: $repr) -> Self {
Self { bits }
}
$vis fn bits(&self) -> $repr {
self.bits
}
}
impl core::ops::BitOr<$name> for $name {
type Output = Self;
fn bitor(self, rhs: $name) -> $name {
Self { bits: self.bits | rhs.bits }
}
}
impl core::ops::BitAnd<$name> for $name {
type Output = Self;
fn bitand(self, rhs: $name) -> $name {
Self { bits: self.bits & rhs.bits }
}
}
impl core::ops::BitXor<$name> for $name {
type Output = Self;
fn bitxor(self, rhs: $name) -> $name {
Self { bits: self.bits ^ rhs.bits }
}
}
};
}
pub use crate::bitflags;
}
#[cfg(not(target_env = "p2"))]
mod wit_bindgen_cabi_realloc;
pub fn maybe_link_cabi_realloc() {
#[cfg(all(target_family = "wasm", not(target_env = "p2")))]
{
unsafe extern "C" {
fn cabi_realloc(
old_ptr: *mut u8,
old_len: usize,
align: usize,
new_len: usize,
) -> *mut u8;
}
#[used]
static _NAME_DOES_NOT_MATTER: unsafe extern "C" fn(
*mut u8,
usize,
usize,
usize,
) -> *mut u8 = cabi_realloc;
}
}
#[cfg(not(target_env = "p2"))]
pub unsafe fn cabi_realloc(
old_ptr: *mut u8,
old_len: usize,
align: usize,
new_len: usize,
) -> *mut u8 {
use alloc::alloc::{Layout, alloc as allocate, handle_alloc_error, realloc};
let layout;
let ptr = unsafe {
if old_len == 0 {
if new_len == 0 {
return align as *mut u8;
}
layout = Layout::from_size_align_unchecked(new_len, align);
allocate(layout)
} else {
debug_assert_ne!(new_len, 0, "non-zero old_len requires non-zero new_len!");
layout = Layout::from_size_align_unchecked(old_len, align);
realloc(old_ptr, layout, new_len)
}
};
if ptr.is_null() {
if cfg!(debug_assertions) {
handle_alloc_error(layout);
} else {
#[cfg(target_arch = "wasm32")]
core::arch::wasm32::unreachable();
#[cfg(not(target_arch = "wasm32"))]
unreachable!();
}
}
return ptr;
}
#[cfg(target_arch = "wasm32")]
pub fn run_ctors_once() {
static mut RUN: bool = false;
unsafe {
if !RUN {
unsafe extern "C" {
fn __wasm_call_ctors();
}
__wasm_call_ctors();
RUN = true;
}
}
}
#[cfg(feature = "async")]
pub mod async_support;
pub struct Cleanup {
ptr: NonNull<u8>,
layout: Layout,
}
unsafe impl Send for Cleanup {}
unsafe impl Sync for Cleanup {}
impl Cleanup {
pub fn new(layout: Layout) -> (*mut u8, Option<Cleanup>) {
use alloc::alloc;
if layout.size() == 0 {
return (ptr::null_mut(), None);
}
let ptr = unsafe { alloc::alloc(layout) };
let ptr = match NonNull::new(ptr) {
Some(ptr) => ptr,
None => alloc::handle_alloc_error(layout),
};
(ptr.as_ptr(), Some(Cleanup { ptr, layout }))
}
pub fn forget(self) {
core::mem::forget(self);
}
}
impl Drop for Cleanup {
fn drop(&mut self) {
unsafe {
for i in 0..self.layout.size() {
*self.ptr.add(i).as_ptr() = 0xff;
}
alloc::alloc::dealloc(self.ptr.as_ptr(), self.layout);
}
}
}