1use crate::{read::Readable, write_impls, ReadableRef, Signal};
2use crate::{read_impls, GlobalMemo, ReadableExt, WritableExt};
3use crate::{CopyValue, Writable};
4use std::{
5 cell::RefCell,
6 ops::Deref,
7 sync::{atomic::AtomicBool, Arc},
8};
9
10use dioxus_core::{
11 current_scope_id, spawn_isomorphic, IntoAttributeValue, IntoDynNode, ReactiveContext, ScopeId,
12 Subscribers,
13};
14use futures_util::StreamExt;
15use generational_box::{AnyStorage, BorrowResult, UnsyncStorage};
16
17struct UpdateInformation<T> {
18 dirty: Arc<AtomicBool>,
19 callback: RefCell<Box<dyn FnMut() -> T>>,
20}
21
22#[doc = include_str!("../docs/memo.md")]
23#[doc(alias = "Selector")]
24#[doc(alias = "UseMemo")]
25#[doc(alias = "Memorize")]
26pub struct Memo<T> {
27 inner: Signal<T>,
28 update: CopyValue<UpdateInformation<T>>,
29}
30
31impl<T> Memo<T> {
32 #[track_caller]
34 pub fn new(f: impl FnMut() -> T + 'static) -> Self
35 where
36 T: PartialEq + 'static,
37 {
38 Self::new_with_location(f, std::panic::Location::caller())
39 }
40
41 pub fn new_with_location(
43 mut f: impl FnMut() -> T + 'static,
44 location: &'static std::panic::Location<'static>,
45 ) -> Self
46 where
47 T: PartialEq + 'static,
48 {
49 let dirty = Arc::new(AtomicBool::new(false));
50 let (tx, mut rx) = futures_channel::mpsc::unbounded();
51
52 let callback = {
53 let dirty = dirty.clone();
54 move || {
55 dirty.store(true, std::sync::atomic::Ordering::Relaxed);
56 let _ = tx.unbounded_send(());
57 }
58 };
59 let rc = ReactiveContext::new_with_callback(callback, current_scope_id(), location);
60
61 let mut recompute = move || rc.reset_and_run_in(&mut f);
63 let value = recompute();
64 let recompute = RefCell::new(Box::new(recompute) as Box<dyn FnMut() -> T>);
65 let update = CopyValue::new(UpdateInformation {
66 dirty,
67 callback: recompute,
68 });
69 let state: Signal<T> = Signal::new_with_caller(value, location);
70
71 let memo = Memo {
72 inner: state,
73 update,
74 };
75
76 spawn_isomorphic(async move {
77 while rx.next().await.is_some() {
78 while rx.try_next().is_ok() {}
80 memo.recompute();
81 }
82 });
83
84 memo
85 }
86
87 #[track_caller]
113 pub const fn global(constructor: fn() -> T) -> GlobalMemo<T>
114 where
115 T: PartialEq + 'static,
116 {
117 GlobalMemo::new(constructor)
118 }
119
120 #[tracing::instrument(skip(self))]
122 fn recompute(&self)
123 where
124 T: PartialEq + 'static,
125 {
126 let mut update_copy = self.update;
127 let update_write = update_copy.write();
128 let peak = self.inner.peek();
129 let new_value = (update_write.callback.borrow_mut())();
130 if new_value != *peak {
131 drop(peak);
132 let mut copy = self.inner;
133 copy.set(new_value);
134 }
135 update_write
137 .dirty
138 .store(false, std::sync::atomic::Ordering::Relaxed);
139 }
140
141 pub fn origin_scope(&self) -> ScopeId
143 where
144 T: 'static,
145 {
146 self.inner.origin_scope()
147 }
148
149 pub fn id(&self) -> generational_box::GenerationalBoxId
151 where
152 T: 'static,
153 {
154 self.inner.id()
155 }
156}
157
158impl<T> Readable for Memo<T>
159where
160 T: PartialEq,
161{
162 type Target = T;
163 type Storage = UnsyncStorage;
164
165 #[track_caller]
166 fn try_read_unchecked(
167 &self,
168 ) -> Result<ReadableRef<'static, Self>, generational_box::BorrowError>
169 where
170 T: 'static,
171 {
172 let read = self.inner.inner.try_read_unchecked()?;
174
175 let needs_update = self
176 .update
177 .read()
178 .dirty
179 .swap(false, std::sync::atomic::Ordering::Relaxed);
180 let result = if needs_update {
181 drop(read);
182 self.recompute();
184 self.inner.inner.try_read_unchecked()
185 } else {
186 Ok(read)
187 };
188 if let Ok(read) = &result {
190 if let Some(reactive_context) = ReactiveContext::current() {
191 tracing::trace!("Subscribing to the reactive context {}", reactive_context);
192 reactive_context.subscribe(read.subscribers.clone());
193 }
194 }
195 result.map(|read| <UnsyncStorage as AnyStorage>::map(read, |v| &v.value))
196 }
197
198 #[track_caller]
202 fn try_peek_unchecked(&self) -> BorrowResult<ReadableRef<'static, Self>>
203 where
204 T: 'static,
205 {
206 self.inner.try_peek_unchecked()
207 }
208
209 fn subscribers(&self) -> Subscribers
210 where
211 T: 'static,
212 {
213 self.inner.subscribers()
214 }
215}
216
217impl<T: 'static + PartialEq> Writable for Memo<T> {
218 type WriteMetadata = <Signal<T> as Writable>::WriteMetadata;
219
220 fn try_write_unchecked(
221 &self,
222 ) -> Result<crate::WritableRef<'static, Self>, generational_box::BorrowMutError>
223 where
224 Self::Target: 'static,
225 {
226 self.inner.try_write_unchecked()
227 }
228}
229
230impl<T> IntoAttributeValue for Memo<T>
231where
232 T: Clone + IntoAttributeValue + PartialEq + 'static,
233{
234 fn into_value(self) -> dioxus_core::AttributeValue {
235 self.with(|f| f.clone().into_value())
236 }
237}
238
239impl<T> IntoDynNode for Memo<T>
240where
241 T: Clone + IntoDynNode + PartialEq + 'static,
242{
243 fn into_dyn_node(self) -> dioxus_core::DynamicNode {
244 self().into_dyn_node()
245 }
246}
247
248impl<T: 'static> PartialEq for Memo<T> {
249 fn eq(&self, other: &Self) -> bool {
250 self.inner == other.inner
251 }
252}
253
254impl<T: Clone> Deref for Memo<T>
255where
256 T: PartialEq + 'static,
257{
258 type Target = dyn Fn() -> T;
259
260 fn deref(&self) -> &Self::Target {
261 unsafe { ReadableExt::deref_impl(self) }
262 }
263}
264
265read_impls!(Memo<T> where T: PartialEq);
266write_impls!(Memo<T> where T: PartialEq);
267
268impl<T> Clone for Memo<T> {
269 fn clone(&self) -> Self {
270 *self
271 }
272}
273
274impl<T> Copy for Memo<T> {}