1use std::{
2 cell::RefCell,
3 iter::FilterMap,
4 marker::PhantomData,
5 mem,
6 ops::Index,
7 sync::atomic::{AtomicUsize, Ordering},
8 vec::IntoIter,
9};
10
11use crate::{Lock, Lockable, ReadSignal, SendSync, Sendable, Shared, SharedSignal, Signal};
12
13trait Item {}
14impl<T> Item for T {}
15
16#[derive(Default, Debug)]
17struct ScopeArena<'a> {
18 items: RefCell<Vec<*mut (dyn Item + 'a)>>,
19}
20
21#[cfg(feature = "multithread")]
23unsafe impl<'a> Sync for ScopeArena<'a> {}
24
25impl<'a> ScopeArena<'a> {
26 pub fn alloc_static<T: Sendable + 'static>(&self, item: T) -> &'a mut T {
27 let item = Box::into_raw(Box::new(item));
28 self.items.borrow_mut().push(item);
29 unsafe { &mut *item }
30 }
31
32 pub unsafe fn alloc<T: Sendable + 'a>(&self, item: T) -> &'a mut T {
35 let item = Box::into_raw(Box::new(item));
36 self.items.borrow_mut().push(item);
37 &mut *item
38 }
39
40 pub unsafe fn dispose(&self) {
47 let mut items = self.items.borrow_mut();
48 Self::dispose_inner(&mut items);
49 }
50
51 unsafe fn dispose_inner(items: &mut Vec<*mut (dyn Item + 'a)>) {
52 for &item in items.iter().rev() {
53 unsafe { Box::from_raw(item) };
55 }
56 items.clear();
57 }
58}
59
60impl<'a> Drop for ScopeArena<'a> {
61 fn drop(&mut self) {
62 let items = self.items.get_mut();
63 unsafe { Self::dispose_inner(items) };
64 }
65}
66
67#[derive(Clone, Debug)]
68struct Sparse<T> {
69 items: Vec<Option<T>>,
70 free: Vec<usize>,
71}
72
73impl<T> Sparse<T> {
74 pub const fn new() -> Self {
75 Self {
76 items: Vec::new(),
77 free: Vec::new(),
78 }
79 }
80
81 pub fn get(&self, index: usize) -> Option<&T> {
82 self.items.get(index)?.as_ref()
83 }
84
85 pub fn insert(&mut self, item: T) -> usize {
86 if let Some(index) = self.free.pop() {
87 self.items[index] = Some(item);
88 index
89 } else {
90 let index = self.items.len();
91 self.items.push(Some(item));
92 index
93 }
94 }
95
96 pub fn remove(&mut self, index: usize) -> Option<T> {
97 let item = self.items[index].take();
98 self.free.push(index);
99 item
100 }
101
102 pub fn iter(&self) -> impl Iterator<Item = &T> {
103 self.items.iter().filter_map(|item| item.as_ref())
104 }
105}
106
107impl<T> Default for Sparse<T> {
108 fn default() -> Self {
109 Self::new()
110 }
111}
112
113impl<T> Index<usize> for Sparse<T> {
114 type Output = T;
115
116 fn index(&self, index: usize) -> &Self::Output {
117 self.items[index].as_ref().unwrap()
118 }
119}
120
121impl<T> IntoIterator for Sparse<T> {
122 type Item = T;
123 type IntoIter = FilterMap<IntoIter<Option<T>>, fn(Option<T>) -> Option<T>>;
124
125 fn into_iter(self) -> Self::IntoIter {
126 self.items.into_iter().filter_map(|item| item)
127 }
128}
129
130#[derive(Debug)]
131struct RawScope<'a> {
132 arena: ScopeArena<'a>,
134
135 #[allow(dead_code)]
137 parent: Option<&'a RawScope<'a>>,
138
139 drop_lock: AtomicUsize,
142
143 children: RefCell<Sparse<*mut RawScope<'a>>>,
145
146 marker: PhantomData<&'a mut &'a ()>,
148}
149
150impl<'a> RawScope<'a> {
151 fn new() -> Self {
152 Self {
153 arena: ScopeArena::default(),
154 parent: None,
155 drop_lock: AtomicUsize::new(0),
156 children: RefCell::new(Sparse::new()),
157 marker: PhantomData,
158 }
159 }
160
161 fn child(parent: &'a RawScope<'a>) -> Self {
162 Self {
163 arena: ScopeArena::default(),
164 parent: Some(parent),
165 drop_lock: AtomicUsize::new(0),
166 children: RefCell::new(Sparse::new()),
167 marker: PhantomData,
168 }
169 }
170
171 fn push_child(&self, child: *mut RawScope<'a>) -> usize {
172 let mut children = self.children.borrow_mut();
173 children.insert(child)
174 }
175
176 fn is_drop_locked(&self) -> bool {
177 if self.drop_lock.load(Ordering::Acquire) > 0 {
178 return true;
179 }
180
181 self.is_child_scopes_drop_locked()
182 }
183
184 fn is_child_scope_drop_locked(&self, index: usize) -> bool {
185 let children = self.children.borrow();
186 if let Some(&child) = children.get(index) {
187 let child = unsafe { &*child };
188 child.is_drop_locked()
189 } else {
190 false
191 }
192 }
193
194 fn is_child_scopes_drop_locked(&self) -> bool {
195 self.children.borrow().iter().any(|&child| {
196 let child = unsafe { &*child };
197 child.is_drop_locked()
198 })
199 }
200
201 unsafe fn dispose(&self) {
202 let mut children = self.children.borrow_mut();
203
204 for child in mem::take(&mut *children).into_iter() {
205 let cx = Box::from_raw(child);
206 cx.dispose();
207 }
208
209 self.arena.dispose();
210 }
211}
212
213impl<'a> Drop for RawScope<'a> {
214 fn drop(&mut self) {
215 unsafe { self.dispose() };
216 }
217}
218
219pub type Scope<'a> = BoundedScope<'a, 'a>;
220
221#[derive(Clone, Copy, Debug)]
222pub struct BoundedScope<'a, 'b: 'a> {
223 raw: &'a RawScope<'a>,
224 marker: PhantomData<&'b ()>,
225}
226
227unsafe impl<'a, 'b> Send for BoundedScope<'a, 'b> {}
228unsafe impl<'a, 'b> Sync for BoundedScope<'a, 'b> {}
229
230impl<'a> Scope<'a> {
231 #[must_use = "not calling `dispose` will leak memory"]
236 pub fn new(f: impl FnOnce(Scope<'a>) + 'a) -> ScopeDisposer<'a> {
237 let raw = Box::into_raw(Box::new(RawScope::new()));
238 let scope = Scope {
239 raw: unsafe { &*raw },
240 marker: PhantomData,
241 };
242 super::effect::untrack(|| f(scope));
243 ScopeDisposer::root(raw)
244 }
245
246 pub(crate) fn drop_lock(&self) {
247 self.raw.drop_lock.fetch_add(1, Ordering::AcqRel);
248 }
249
250 pub(crate) fn release_drop_lock(&self) {
251 self.raw.drop_lock.fetch_sub(1, Ordering::AcqRel);
252 }
253
254 pub fn immediate(f: impl FnOnce(Scope<'a>) + 'a) {
256 let disposer = Self::new(f);
257
258 unsafe { disposer.dispose() };
260 }
261
262 pub fn child(self, f: impl for<'b> FnOnce(BoundedScope<'b, 'a>)) -> ScopeDisposer<'a> {
264 let raw = Box::into_raw(Box::new(RawScope::child(self.raw)));
265 let index = self.raw.push_child(raw);
266 let scope = Scope {
267 raw: unsafe { &*raw },
268 marker: PhantomData,
269 };
270 f(scope);
271 ScopeDisposer::child(self.raw, index)
272 }
273
274 pub fn alloc<T: Sendable + 'static>(&self, item: T) -> &'a T {
276 self.raw.arena.alloc_static(item)
277 }
278
279 pub unsafe fn alloc_unsafe<T: Sendable + 'a>(self, item: T) -> &'a T {
284 self.raw.arena.alloc(item)
285 }
286
287 pub fn alloc_mut<T: Sendable + 'static>(&self, item: T) -> &'a mut T {
289 self.raw.arena.alloc_static(item)
290 }
291
292 pub unsafe fn alloc_mut_unsafe<T: Sendable + 'a>(self, item: T) -> &'a mut T {
297 self.raw.arena.alloc(item)
298 }
299
300 pub fn signal<T: SendSync + 'static>(self, value: T) -> &'a Signal<T> {
302 self.alloc(Signal::new(value))
303 }
304
305 pub fn untrack<T>(self, f: impl FnOnce() -> T) -> T {
307 super::effect::untrack(f)
308 }
309
310 #[track_caller]
329 pub fn effect(self, f: impl FnMut() + Sendable + 'a) {
330 super::effect::create_effect(self, f);
331 }
332
333 #[track_caller]
335 pub fn effect_scoped(self, mut f: impl for<'b> FnMut(BoundedScope<'b, 'a>) + Sendable + 'a) {
336 let mut disposer = None::<ScopeDisposer<'a>>;
337 self.effect(move || {
338 if let Some(disposer) = disposer.take() {
339 if !disposer.is_drop_locked() {
340 unsafe { disposer.dispose() };
342 } else {
343 tracing::trace!("scope is drop locked, leaking disposer");
344 }
345 }
346
347 disposer = Some(self.child(|cx| {
348 f(cx);
349 }));
350 });
351 }
352
353 #[track_caller]
371 pub fn memo<T: SendSync + 'static>(
372 self,
373 mut f: impl FnMut() -> T + Sendable + 'a,
374 ) -> &'a ReadSignal<T> {
375 let signal = Shared::new(Lock::new(None::<&'a Signal<T>>));
376
377 self.effect({
378 let signal = signal.clone();
379 move || {
380 let value = f();
381 if signal.lock_mut().is_some() {
382 signal.lock_mut().unwrap().set(value);
383 } else {
384 *signal.lock_mut() = Some(self.signal(value));
385 }
386 }
387 });
388
389 let signal = signal.lock_mut().unwrap();
390 signal
391 }
392
393 #[track_caller]
399 pub fn bind<T: Clone + PartialEq + SendSync + 'static>(
400 self,
401 signal_a: &'a Signal<T>,
402 signal_b: &'a Signal<T>,
403 ) {
404 let prev = self.alloc(Lock::new(signal_a.cloned_untracked()));
405
406 self.effect(move || {
407 let a = signal_a.cloned();
408 let b = signal_b.cloned();
409 let mut prev = prev.lock_mut();
410
411 if *prev != a {
412 *prev = a.clone();
413 signal_b.set(a);
414 } else if *prev != b {
415 *prev = b.clone();
416 signal_a.set(b);
417 }
418 });
419 }
420
421 #[track_caller]
423 pub fn dynamic<T: SendSync + 'static>(
424 self,
425 mut f: impl FnMut(BoundedScope<'_, 'a>) -> T + Sendable + 'a,
426 ) -> SharedSignal<T> {
427 let signal = self.alloc(Lock::new(None::<SharedSignal<T>>));
428
429 self.effect_scoped(move |cx| {
430 let value = f(cx);
431
432 if signal.lock_mut().is_some() {
433 signal.lock_mut().as_ref().unwrap().set(value);
434 } else {
435 *signal.lock_mut() = Some(SharedSignal::new(value));
436 }
437 });
438
439 signal.lock_mut().as_ref().unwrap().clone()
440 }
441}
442
443#[derive(Debug)]
444enum ScopeDisposerInner<'a> {
445 Root {
446 raw: *mut RawScope<'a>,
447 },
448 Child {
449 parent: &'a RawScope<'a>,
450 index: usize,
451 },
452}
453
454unsafe impl<'a> Send for ScopeDisposerInner<'a> {}
456
457#[derive(Debug)]
458pub struct ScopeDisposer<'a> {
459 inner: ScopeDisposerInner<'a>,
460}
461
462impl<'a> ScopeDisposer<'a> {
463 fn root(raw: *mut RawScope<'a>) -> Self {
464 Self {
465 inner: ScopeDisposerInner::Root { raw },
466 }
467 }
468
469 fn child(parent: &'a RawScope<'a>, index: usize) -> Self {
470 Self {
471 inner: ScopeDisposerInner::Child { parent, index },
472 }
473 }
474
475 fn is_drop_locked(&self) -> bool {
476 match self.inner {
477 ScopeDisposerInner::Root { .. } => false,
478 ScopeDisposerInner::Child { parent, index } => parent.is_child_scope_drop_locked(index),
479 }
480 }
481
482 pub unsafe fn dispose(self) {
487 match self.inner {
488 ScopeDisposerInner::Root { raw } => {
489 let cx = Box::from_raw(raw);
491 cx.dispose();
492 }
493 ScopeDisposerInner::Child { parent, index } => {
494 let mut children = parent.children.borrow_mut();
495 let child = children.remove(index).unwrap();
496 let cx = Box::from_raw(child);
498 cx.dispose();
499 }
500 }
501 }
502}
503
504#[cfg(test)]
505mod tests {
506 use super::*;
507
508 #[test]
509 fn test_signal() {
510 Scope::immediate(|cx| {
511 let signal = cx.signal(0);
512
513 let cell = cx.alloc(Lock::new(0));
514 cx.effect(move || {
515 *cell.lock_mut() = *signal.get();
516 });
517 signal.set(1);
518
519 assert_eq!(*cell.lock_mut(), 1);
520 });
521 }
522
523 #[test]
524 fn test_memo() {
525 Scope::immediate(|cx| {
526 let signal = cx.signal(0);
527
528 let memo = cx.memo(|| *signal.get() + 1);
529 signal.set(1);
530
531 assert_eq!(*memo, 2);
532 });
533 }
534}