1use crate::convert::{FromWasmAbi, IntoWasmAbi, WasmAbi, WasmRet};
2use crate::describe::inform;
3use crate::JsValue;
4use core::borrow::{Borrow, BorrowMut};
5use core::cell::{Cell, UnsafeCell};
6use core::convert::Infallible;
7use core::ops::{Deref, DerefMut};
8#[cfg(target_feature = "atomics")]
9use core::sync::atomic::{AtomicU8, Ordering};
10use wasm_bindgen_shared::tys::FUNCTION;
11
12use alloc::alloc::{alloc, dealloc, realloc, Layout};
13use alloc::rc::Rc;
14use once_cell::unsync::Lazy;
15
16pub extern crate alloc;
17pub extern crate core;
18#[cfg(feature = "std")]
19pub extern crate std;
20
21pub mod marker;
22
23pub use wasm_bindgen_macro::BindgenedStruct;
24
25pub fn wbg_cast<From: IntoWasmAbi, To: FromWasmAbi>(value: From) -> To {
31 #[inline(never)]
70 #[cfg_attr(wasm_bindgen_unstable_test_coverage, coverage(off))]
71 unsafe extern "C" fn breaks_if_inlined<From: IntoWasmAbi, To: FromWasmAbi>(
72 prim1: <From::Abi as WasmAbi>::Prim1,
73 prim2: <From::Abi as WasmAbi>::Prim2,
74 prim3: <From::Abi as WasmAbi>::Prim3,
75 prim4: <From::Abi as WasmAbi>::Prim4,
76 ) -> WasmRet<To::Abi> {
77 inform(FUNCTION);
78 inform(0);
79 inform(1);
80 From::describe();
81 To::describe();
82 To::describe();
83 core::ptr::read(super::__wbindgen_describe_cast(
86 breaks_if_inlined::<From, To> as _,
87 &(prim1, prim2, prim3, prim4) as *const _ as _,
88 ) as _)
89 }
90
91 let (prim1, prim2, prim3, prim4) = value.into_abi().split();
92
93 unsafe { To::from_abi(breaks_if_inlined::<From, To>(prim1, prim2, prim3, prim4).join()) }
94}
95
96pub(crate) const JSIDX_OFFSET: u32 = 128; pub(crate) const JSIDX_UNDEFINED: u32 = JSIDX_OFFSET;
98pub(crate) const JSIDX_NULL: u32 = JSIDX_OFFSET + 1;
99pub(crate) const JSIDX_TRUE: u32 = JSIDX_OFFSET + 2;
100pub(crate) const JSIDX_FALSE: u32 = JSIDX_OFFSET + 3;
101pub(crate) const JSIDX_RESERVED: u32 = JSIDX_OFFSET + 4;
102
103pub(crate) struct ThreadLocalWrapper<T>(pub(crate) T);
104
105#[cfg(not(target_feature = "atomics"))]
106unsafe impl<T> Sync for ThreadLocalWrapper<T> {}
107
108#[cfg(not(target_feature = "atomics"))]
109unsafe impl<T> Send for ThreadLocalWrapper<T> {}
110
111pub struct LazyCell<T, F = fn() -> T>(ThreadLocalWrapper<Lazy<T, F>>);
113
114impl<T, F> LazyCell<T, F> {
115 pub const fn new(init: F) -> LazyCell<T, F> {
116 Self(ThreadLocalWrapper(Lazy::new(init)))
117 }
118}
119
120impl<T, F: FnOnce() -> T> LazyCell<T, F> {
121 pub fn force(this: &Self) -> &T {
122 &this.0 .0
123 }
124}
125
126impl<T> Deref for LazyCell<T> {
127 type Target = T;
128
129 fn deref(&self) -> &T {
130 ::once_cell::unsync::Lazy::force(&self.0 .0)
131 }
132}
133
134#[cfg(not(target_feature = "atomics"))]
135pub use LazyCell as LazyLock;
136
137#[cfg(target_feature = "atomics")]
138pub struct LazyLock<T, F = fn() -> T> {
139 state: AtomicU8,
140 data: UnsafeCell<Data<T, F>>,
141}
142
143#[cfg(target_feature = "atomics")]
144enum Data<T, F> {
145 Value(T),
146 Init(F),
147}
148
149#[cfg(target_feature = "atomics")]
150impl<T, F> LazyLock<T, F> {
151 const STATE_UNINIT: u8 = 0;
152 const STATE_INITIALIZING: u8 = 1;
153 const STATE_INIT: u8 = 2;
154
155 pub const fn new(init: F) -> LazyLock<T, F> {
156 Self {
157 state: AtomicU8::new(Self::STATE_UNINIT),
158 data: UnsafeCell::new(Data::Init(init)),
159 }
160 }
161}
162
163#[cfg(target_feature = "atomics")]
164impl<T> Deref for LazyLock<T> {
165 type Target = T;
166
167 fn deref(&self) -> &T {
168 let mut state = self.state.load(Ordering::Acquire);
169
170 loop {
171 match state {
172 Self::STATE_INIT => {
173 let Data::Value(value) = (unsafe { &*self.data.get() }) else {
174 unreachable!()
175 };
176 return value;
177 }
178 Self::STATE_UNINIT => {
179 if let Err(new_state) = self.state.compare_exchange_weak(
180 Self::STATE_UNINIT,
181 Self::STATE_INITIALIZING,
182 Ordering::Acquire,
183 Ordering::Relaxed,
184 ) {
185 state = new_state;
186 continue;
187 }
188
189 let data = unsafe { &mut *self.data.get() };
190 let Data::Init(init) = data else {
191 unreachable!()
192 };
193 *data = Data::Value(init());
194 self.state.store(Self::STATE_INIT, Ordering::Release);
195 state = Self::STATE_INIT;
196 }
197 Self::STATE_INITIALIZING => {
198 state = self.state.load(Ordering::Acquire);
201 }
202 _ => unreachable!(),
203 }
204 }
205 }
206}
207
208#[cfg(target_feature = "atomics")]
209unsafe impl<T, F: Sync> Sync for LazyLock<T, F> {}
210
211#[cfg(target_feature = "atomics")]
212unsafe impl<T, F: Send> Send for LazyLock<T, F> {}
213
214#[macro_export]
215#[doc(hidden)]
216#[cfg(not(target_feature = "atomics"))]
217macro_rules! __wbindgen_thread_local {
218 ($wasm_bindgen:tt, $actual_ty:ty) => {{
219 static _VAL: $wasm_bindgen::__rt::LazyCell<$actual_ty> =
220 $wasm_bindgen::__rt::LazyCell::new(init);
221 $wasm_bindgen::JsThreadLocal { __inner: &_VAL }
222 }};
223}
224
225#[macro_export]
226#[doc(hidden)]
227#[cfg(target_feature = "atomics")]
228#[allow_internal_unstable(thread_local)]
229macro_rules! __wbindgen_thread_local {
230 ($wasm_bindgen:tt, $actual_ty:ty) => {{
231 #[thread_local]
232 static _VAL: $wasm_bindgen::__rt::LazyCell<$actual_ty> =
233 $wasm_bindgen::__rt::LazyCell::new(init);
234 $wasm_bindgen::JsThreadLocal {
235 __inner: || unsafe { $wasm_bindgen::__rt::LazyCell::force(&_VAL) as *const $actual_ty },
236 }
237 }};
238}
239
240#[macro_export]
241#[doc(hidden)]
242#[cfg(not(wasm_bindgen_unstable_test_coverage))]
243macro_rules! __wbindgen_coverage {
244 ($item:item) => {
245 $item
246 };
247}
248
249#[macro_export]
250#[doc(hidden)]
251#[cfg(wasm_bindgen_unstable_test_coverage)]
252#[allow_internal_unstable(coverage_attribute)]
253macro_rules! __wbindgen_coverage {
254 ($item:item) => {
255 #[coverage(off)]
256 $item
257 };
258}
259
260#[inline]
261pub fn assert_not_null<T>(s: *mut T) {
262 if s.is_null() {
263 throw_null();
264 }
265}
266
267#[cold]
268#[inline(never)]
269fn throw_null() -> ! {
270 super::throw_str("null pointer passed to rust");
271}
272
273pub struct WasmRefCell<T: ?Sized> {
291 borrow: Cell<usize>,
292 value: UnsafeCell<T>,
293}
294
295impl<T: ?Sized> WasmRefCell<T> {
296 pub fn new(value: T) -> WasmRefCell<T>
297 where
298 T: Sized,
299 {
300 WasmRefCell {
301 value: UnsafeCell::new(value),
302 borrow: Cell::new(0),
303 }
304 }
305
306 pub fn get_mut(&mut self) -> &mut T {
307 unsafe { &mut *self.value.get() }
308 }
309
310 pub fn borrow(&self) -> Ref<'_, T> {
311 unsafe {
312 if self.borrow.get() == usize::MAX {
313 borrow_fail();
314 }
315 self.borrow.set(self.borrow.get() + 1);
316 Ref {
317 value: &*self.value.get(),
318 borrow: &self.borrow,
319 }
320 }
321 }
322
323 pub fn borrow_mut(&self) -> RefMut<'_, T> {
324 unsafe {
325 if self.borrow.get() != 0 {
326 borrow_fail();
327 }
328 self.borrow.set(usize::MAX);
329 RefMut {
330 value: &mut *self.value.get(),
331 borrow: &self.borrow,
332 }
333 }
334 }
335
336 pub fn into_inner(self) -> T
337 where
338 T: Sized,
339 {
340 self.value.into_inner()
341 }
342}
343
344pub struct Ref<'b, T: ?Sized + 'b> {
345 value: &'b T,
346 borrow: &'b Cell<usize>,
347}
348
349impl<T: ?Sized> Deref for Ref<'_, T> {
350 type Target = T;
351
352 #[inline]
353 fn deref(&self) -> &T {
354 self.value
355 }
356}
357
358impl<T: ?Sized> Borrow<T> for Ref<'_, T> {
359 #[inline]
360 fn borrow(&self) -> &T {
361 self.value
362 }
363}
364
365impl<T: ?Sized> Drop for Ref<'_, T> {
366 fn drop(&mut self) {
367 self.borrow.set(self.borrow.get() - 1);
368 }
369}
370
371pub struct RefMut<'b, T: ?Sized + 'b> {
372 value: &'b mut T,
373 borrow: &'b Cell<usize>,
374}
375
376impl<T: ?Sized> Deref for RefMut<'_, T> {
377 type Target = T;
378
379 #[inline]
380 fn deref(&self) -> &T {
381 self.value
382 }
383}
384
385impl<T: ?Sized> DerefMut for RefMut<'_, T> {
386 #[inline]
387 fn deref_mut(&mut self) -> &mut T {
388 self.value
389 }
390}
391
392impl<T: ?Sized> Borrow<T> for RefMut<'_, T> {
393 #[inline]
394 fn borrow(&self) -> &T {
395 self.value
396 }
397}
398
399impl<T: ?Sized> BorrowMut<T> for RefMut<'_, T> {
400 #[inline]
401 fn borrow_mut(&mut self) -> &mut T {
402 self.value
403 }
404}
405
406impl<T: ?Sized> Drop for RefMut<'_, T> {
407 fn drop(&mut self) {
408 self.borrow.set(0);
409 }
410}
411
412fn borrow_fail() -> ! {
413 super::throw_str(
414 "recursive use of an object detected which would lead to \
415 unsafe aliasing in rust",
416 );
417}
418
419pub struct RcRef<T: ?Sized + 'static> {
425 ref_: Ref<'static, T>,
434 _rc: Rc<WasmRefCell<T>>,
435}
436
437impl<T: ?Sized> RcRef<T> {
438 pub fn new(rc: Rc<WasmRefCell<T>>) -> Self {
439 let ref_ = unsafe { (*Rc::as_ptr(&rc)).borrow() };
440 Self { _rc: rc, ref_ }
441 }
442}
443
444impl<T: ?Sized> Deref for RcRef<T> {
445 type Target = T;
446
447 #[inline]
448 fn deref(&self) -> &T {
449 &self.ref_
450 }
451}
452
453impl<T: ?Sized> Borrow<T> for RcRef<T> {
454 #[inline]
455 fn borrow(&self) -> &T {
456 &self.ref_
457 }
458}
459
460pub struct RcRefMut<T: ?Sized + 'static> {
466 ref_: RefMut<'static, T>,
467 _rc: Rc<WasmRefCell<T>>,
468}
469
470impl<T: ?Sized> RcRefMut<T> {
471 pub fn new(rc: Rc<WasmRefCell<T>>) -> Self {
472 let ref_ = unsafe { (*Rc::as_ptr(&rc)).borrow_mut() };
473 Self { _rc: rc, ref_ }
474 }
475}
476
477impl<T: ?Sized> Deref for RcRefMut<T> {
478 type Target = T;
479
480 #[inline]
481 fn deref(&self) -> &T {
482 &self.ref_
483 }
484}
485
486impl<T: ?Sized> DerefMut for RcRefMut<T> {
487 #[inline]
488 fn deref_mut(&mut self) -> &mut T {
489 &mut self.ref_
490 }
491}
492
493impl<T: ?Sized> Borrow<T> for RcRefMut<T> {
494 #[inline]
495 fn borrow(&self) -> &T {
496 &self.ref_
497 }
498}
499
500impl<T: ?Sized> BorrowMut<T> for RcRefMut<T> {
501 #[inline]
502 fn borrow_mut(&mut self) -> &mut T {
503 &mut self.ref_
504 }
505}
506
507#[no_mangle]
508pub extern "C" fn __wbindgen_malloc(size: usize, align: usize) -> *mut u8 {
509 if let Ok(layout) = Layout::from_size_align(size, align) {
510 unsafe {
511 if layout.size() > 0 {
512 let ptr = alloc(layout);
513 if !ptr.is_null() {
514 return ptr;
515 }
516 } else {
517 return align as *mut u8;
518 }
519 }
520 }
521
522 malloc_failure();
523}
524
525#[no_mangle]
526pub unsafe extern "C" fn __wbindgen_realloc(
527 ptr: *mut u8,
528 old_size: usize,
529 new_size: usize,
530 align: usize,
531) -> *mut u8 {
532 debug_assert!(old_size > 0);
533 debug_assert!(new_size > 0);
534 if let Ok(layout) = Layout::from_size_align(old_size, align) {
535 let ptr = realloc(ptr, layout, new_size);
536 if !ptr.is_null() {
537 return ptr;
538 }
539 }
540 malloc_failure();
541}
542
543#[cold]
544fn malloc_failure() -> ! {
545 cfg_if::cfg_if! {
546 if #[cfg(debug_assertions)] {
547 super::throw_str("invalid malloc request")
548 } else if #[cfg(feature = "std")] {
549 std::process::abort();
550 } else if #[cfg(all(
551 target_arch = "wasm32",
552 any(target_os = "unknown", target_os = "none")
553 ))] {
554 core::arch::wasm32::unreachable();
555 } else {
556 unreachable!()
557 }
558 }
559}
560
561#[no_mangle]
562pub unsafe extern "C" fn __wbindgen_free(ptr: *mut u8, size: usize, align: usize) {
563 if size == 0 {
566 return;
567 }
568 let layout = Layout::from_size_align_unchecked(size, align);
569 dealloc(ptr, layout);
570}
571
572#[cfg_attr(wasm_bindgen_unstable_test_coverage, coverage(off))]
606pub fn link_mem_intrinsics() {
607 crate::link::link_intrinsics();
608}
609
610#[cfg_attr(target_feature = "atomics", thread_local)]
611static GLOBAL_EXNDATA: ThreadLocalWrapper<Cell<[u32; 2]>> = ThreadLocalWrapper(Cell::new([0; 2]));
612
613#[no_mangle]
614pub unsafe extern "C" fn __wbindgen_exn_store(idx: u32) {
615 debug_assert_eq!(GLOBAL_EXNDATA.0.get()[0], 0);
616 GLOBAL_EXNDATA.0.set([1, idx]);
617}
618
619pub fn take_last_exception() -> Result<(), super::JsValue> {
620 let ret = if GLOBAL_EXNDATA.0.get()[0] == 1 {
621 Err(super::JsValue::_new(GLOBAL_EXNDATA.0.get()[1]))
622 } else {
623 Ok(())
624 };
625 GLOBAL_EXNDATA.0.set([0, 0]);
626 ret
627}
628
629pub trait IntoJsResult {
634 fn into_js_result(self) -> Result<JsValue, JsValue>;
635}
636
637impl IntoJsResult for () {
638 fn into_js_result(self) -> Result<JsValue, JsValue> {
639 Ok(JsValue::undefined())
640 }
641}
642
643impl<T: Into<JsValue>> IntoJsResult for T {
644 fn into_js_result(self) -> Result<JsValue, JsValue> {
645 Ok(self.into())
646 }
647}
648
649impl<T: Into<JsValue>, E: Into<JsValue>> IntoJsResult for Result<T, E> {
650 fn into_js_result(self) -> Result<JsValue, JsValue> {
651 match self {
652 Ok(e) => Ok(e.into()),
653 Err(e) => Err(e.into()),
654 }
655 }
656}
657
658impl<E: Into<JsValue>> IntoJsResult for Result<(), E> {
659 fn into_js_result(self) -> Result<JsValue, JsValue> {
660 match self {
661 Ok(()) => Ok(JsValue::undefined()),
662 Err(e) => Err(e.into()),
663 }
664 }
665}
666
667pub trait Start {
670 fn start(self);
671}
672
673impl Start for () {
674 #[inline]
675 fn start(self) {}
676}
677
678impl<E: Into<JsValue>> Start for Result<(), E> {
679 #[inline]
680 fn start(self) {
681 if let Err(e) = self {
682 crate::throw_val(e.into());
683 }
684 }
685}
686
687pub struct MainWrapper<T>(pub Option<T>);
690
691pub trait Main {
692 fn __wasm_bindgen_main(&mut self);
693}
694
695impl Main for &mut &mut MainWrapper<()> {
696 #[inline]
697 fn __wasm_bindgen_main(&mut self) {}
698}
699
700impl Main for &mut &mut MainWrapper<Infallible> {
701 #[inline]
702 fn __wasm_bindgen_main(&mut self) {}
703}
704
705impl<E: Into<JsValue>> Main for &mut &mut MainWrapper<Result<(), E>> {
706 #[inline]
707 fn __wasm_bindgen_main(&mut self) {
708 if let Err(e) = self.0.take().unwrap() {
709 crate::throw_val(e.into());
710 }
711 }
712}
713
714impl<E: core::fmt::Debug> Main for &mut MainWrapper<Result<(), E>> {
715 #[inline]
716 fn __wasm_bindgen_main(&mut self) {
717 if let Err(e) = self.0.take().unwrap() {
718 crate::throw_str(&alloc::format!("{:?}", e));
719 }
720 }
721}
722
723pub const fn flat_len<T, const SIZE: usize>(slices: [&[T]; SIZE]) -> usize {
724 let mut len = 0;
725 let mut i = 0;
726 while i < slices.len() {
727 len += slices[i].len();
728 i += 1;
729 }
730 len
731}
732
733pub const fn flat_byte_slices<const RESULT_LEN: usize, const SIZE: usize>(
734 slices: [&[u8]; SIZE],
735) -> [u8; RESULT_LEN] {
736 let mut result = [0; RESULT_LEN];
737
738 let mut slice_index = 0;
739 let mut result_offset = 0;
740
741 while slice_index < slices.len() {
742 let mut i = 0;
743 let slice = slices[slice_index];
744 while i < slice.len() {
745 result[result_offset] = slice[i];
746 i += 1;
747 result_offset += 1;
748 }
749 slice_index += 1;
750 }
751
752 result
753}
754
755pub const fn encode_u32_to_fixed_len_bytes(value: u32) -> [u8; 5] {
759 let mut result: [u8; 5] = [0; 5];
760 let mut i = 0;
761 while i < 4 {
762 result[i] = ((value >> (7 * i)) | 0x80) as u8;
763 i += 1;
764 }
765 result[4] = (value >> (7 * 4)) as u8;
766 result
767}