incremental/
incr.rs

1use std::cell::{Cell, RefCell};
2use std::fmt;
3use std::hash::Hash;
4use std::rc::{Rc, Weak};
5
6use super::kind::{self, Kind};
7use super::node::{ErasedNode, Incremental, Input, Node, NodeId};
8use super::scope::{BindScope, Scope};
9use crate::boxes::{new_unsized, SmallBox};
10use crate::incrsan::NotObserver;
11use crate::node_update::OnUpdateHandler;
12use crate::{Cutoff, NodeRef, NodeUpdate, Observer, Value, ValueInternal, WeakState};
13
14#[derive(Debug)]
15#[must_use = "Incr<T> must be observed (.observe()) to be part of a computation."]
16pub struct Incr<T> {
17    pub(crate) node: Input<T>,
18}
19
20impl<T> Clone for Incr<T> {
21    fn clone(&self) -> Self {
22        Self {
23            node: self.node.clone(),
24        }
25    }
26}
27
28impl<T> From<Input<T>> for Incr<T> {
29    fn from(node: Input<T>) -> Self {
30        Self { node }
31    }
32}
33
34impl<T> PartialEq for Incr<T> {
35    fn eq(&self, other: &Self) -> bool {
36        self.ptr_eq(other)
37    }
38}
39
40impl<T> Eq for Incr<T> {}
41
42impl<T> Hash for Incr<T> {
43    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
44        self.node.id().hash(state)
45    }
46}
47
48impl<T> Incr<T> {
49    /// Allows an extra T2, because you might be comparing two incrs inside a map2 constructor.
50    pub(crate) fn ptr_eq<T2>(&self, other: &Incr<T2>) -> bool {
51        crate::rc_thin_ptr_eq_t2(&self.node, &other.node)
52    }
53    pub fn weak(&self) -> WeakIncr<T> {
54        WeakIncr(Rc::downgrade(&self.node))
55    }
56    pub fn set_graphviz_user_data(&self, data: impl fmt::Debug + NotObserver + 'static) {
57        self.node.set_graphviz_user_data(Box::new(data))
58    }
59    pub fn with_graphviz_user_data(self, data: impl fmt::Debug + NotObserver + 'static) -> Self {
60        self.node.set_graphviz_user_data(Box::new(data));
61        self
62    }
63    pub fn state(&self) -> WeakState {
64        self.node.state().public_weak()
65    }
66}
67
68/// Type for use in weak hash maps of incrementals
69#[derive(Debug)]
70pub struct WeakIncr<T>(pub(crate) Weak<dyn Incremental<T>>);
71impl<T> WeakIncr<T> {
72    pub fn ptr_eq(&self, other: &Self) -> bool {
73        self.0.ptr_eq(&other.0)
74    }
75    pub fn upgrade(&self) -> Option<Incr<T>> {
76        self.0.upgrade().map(Incr::from)
77    }
78    pub fn strong_count(&self) -> usize {
79        self.0.strong_count()
80    }
81    pub fn weak_count(&self) -> usize {
82        self.0.weak_count()
83    }
84}
85
86impl<T> Clone for WeakIncr<T> {
87    fn clone(&self) -> Self {
88        Self(self.0.clone())
89    }
90}
91
92impl<T: Value> Incr<T> {
93    /// A convenience function for taking a function of type `fn(Incr<T>) -> Incr<R>` and
94    /// applying it to self. This enables you to put your own functions
95    /// into the middle of a chain of method calls on Incr.
96    #[inline]
97    pub fn pipe<R>(&self, f: impl FnOnce(Incr<T>) -> Incr<R>) -> Incr<R> {
98        // clones are cheap.
99        f(self.clone())
100    }
101
102    pub fn pipe1<R, A1>(&self, f: impl FnOnce(Incr<T>, A1) -> Incr<R>, arg1: A1) -> Incr<R> {
103        f(self.clone(), arg1)
104    }
105
106    pub fn pipe2<R, A1, A2>(
107        &self,
108        f: impl FnOnce(Incr<T>, A1, A2) -> Incr<R>,
109        arg1: A1,
110        arg2: A2,
111    ) -> Incr<R> {
112        f(self.clone(), arg1, arg2)
113    }
114
115    /// A simple variation on [Incr::map] that tells you how many
116    /// times the incremental has recomputed before this time.
117    pub fn enumerate<R, F>(&self, mut f: F) -> Incr<R>
118    where
119        R: Value,
120        F: FnMut(usize, &T) -> R + 'static + NotObserver,
121    {
122        let mut counter = 0;
123        self.map(move |x| {
124            let v = f(counter, x);
125            counter += 1;
126            v
127        })
128    }
129
130    /// Turn two incrementals into a tuple incremental.
131    /// Aka `both` in OCaml. This is named `zip` to match [Option::zip] and [Iterator::zip].
132    pub fn zip<T2: Value>(&self, other: &Incr<T2>) -> Incr<(T, T2)> {
133        if let Some(a) = self.node.constant() {
134            if let Some(b) = other.node.constant() {
135                return self.state().constant((a.clone(), b.clone()));
136            }
137        }
138        self.map2(other, |a, b| (a.clone(), b.clone()))
139    }
140
141    pub fn map_ref<F, R: Value>(&self, f: F) -> Incr<R>
142    where
143        F: for<'a> Fn(&'a T) -> &'a R + 'static + NotObserver,
144    {
145        let state = self.node.state();
146        let node = Node::create_rc::<R>(
147            state.weak(),
148            state.current_scope(),
149            Kind::MapRef(kind::MapRefNode {
150                input: self.node.packed(),
151                did_change: true.into(),
152                mapper: Box::new(move |x: &dyn ValueInternal| {
153                    let x = x.as_any().downcast_ref::<T>().unwrap();
154                    f(x) as &dyn ValueInternal
155                }),
156            }),
157        );
158        Incr { node }
159    }
160
161    /// A version of map that gives you a (weak) reference to the map node you're making, in the
162    /// closure.
163    ///
164    /// Useful for advanced usage where you want to add manual dependencies with the
165    /// [crate::expert] constructs.
166    pub fn map_cyclic<R: Value>(
167        &self,
168        mut cyclic: impl FnMut(WeakIncr<R>, &T) -> R + 'static + NotObserver,
169    ) -> Incr<R> {
170        let node = Rc::<Node>::new_cyclic(move |node_weak| {
171            let f = {
172                let weak = WeakIncr(node_weak.clone());
173                move |t: &dyn ValueInternal| -> SmallBox<dyn ValueInternal> {
174                    let t = t.as_any().downcast_ref::<T>().unwrap();
175                    new_unsized!(cyclic(weak.clone(), t))
176                }
177            };
178            let mapper = kind::MapNode {
179                input: self.node.packed(),
180                mapper: RefCell::new(new_unsized!(f)),
181            };
182            let state = self.node.state();
183            let mut node = Node::create::<R>(
184                state.weak(),
185                state.current_scope.borrow().clone(),
186                Kind::Map(mapper),
187            );
188            node.weak_self = node_weak.clone();
189            node
190        });
191        node.created_in.add_node(node.clone());
192
193        Incr { node }
194    }
195
196    /// A version of [Incr::map] that allows reuse of the old
197    /// value. You can use it to produce a new value. The main
198    /// use case is avoiding allocation.
199    ///
200    /// The return type of the closure is `(R, bool)`. The boolean
201    /// value is a replacement for the [Cutoff] system, because
202    /// the [Cutoff] functions require access to an old value and
203    /// a new value. With [Incr::map_with_old], you must figure out yourself
204    /// (without relying on PartialEq, for example) whether the
205    /// incremental node should propagate its changes.
206    ///
207    pub fn map_with_old<R, F>(&self, mut f: F) -> Incr<R>
208    where
209        R: Value,
210        F: FnMut(Option<R>, &T) -> (R, bool) + 'static + NotObserver,
211    {
212        let state = self.node.state();
213        let node = Node::create_rc::<R>(
214            state.weak(),
215            state.current_scope(),
216            Kind::MapWithOld(kind::MapWithOld {
217                input: self.node.packed(),
218                mapper: RefCell::new(new_unsized!(
219                    move |opt_r: Option<SmallBox<dyn ValueInternal>>, x: &dyn ValueInternal| {
220                        let opt_r = opt_r.and_then(|x: SmallBox<dyn ValueInternal>| {
221                            crate::boxes::downcast_inner::<R>(x)
222                        });
223                        let x = x.as_any().downcast_ref::<T>().unwrap();
224                        let (r, b) = f(opt_r, x);
225                        (new_unsized!(r), b)
226                    },
227                )),
228            }),
229        );
230        Incr { node }
231    }
232
233    /// A version of bind that includes a copy of the [crate::IncrState] (as [crate::WeakState])
234    /// to help you construct new incrementals within the bind.
235    pub fn binds<F, R>(&self, mut f: F) -> Incr<R>
236    where
237        R: Value,
238        F: FnMut(&WeakState, &T) -> Incr<R> + 'static + NotObserver,
239    {
240        let cloned = self.node.state().public_weak();
241        self.bind(move |value: &T| f(&cloned, value))
242    }
243
244    pub fn bind<F, R>(&self, mut f: F) -> Incr<R>
245    where
246        R: Value,
247        F: FnMut(&T) -> Incr<R> + 'static + NotObserver,
248    {
249        let state = self.node.state();
250        let bind = Rc::new_cyclic(|weak| kind::BindNode {
251            lhs: self.node.packed(),
252            mapper: RefCell::new(new_unsized!(
253                move |any_ref: &dyn ValueInternal| -> NodeRef {
254                    let downcast = any_ref
255                        .as_any()
256                        .downcast_ref::<T>()
257                        .expect("Type mismatch in bind function");
258                    f(downcast).node.packed()
259                },
260            )),
261            rhs: RefCell::new(None),
262            rhs_scope: Scope::Bind(weak.clone() as Weak<dyn BindScope>).into(),
263            all_nodes_created_on_rhs: RefCell::new(vec![]),
264            lhs_change: RefCell::new(Weak::<Node>::new()),
265            id_lhs_change: Cell::new(NodeId(0)),
266            main: RefCell::new(Weak::<Node>::new()),
267        });
268        let lhs_change = Node::create_rc::<()>(
269            state.weak(),
270            state.current_scope(),
271            Kind::BindLhsChange { bind: bind.clone() },
272        );
273        let main = Node::create_rc::<R>(
274            state.weak(),
275            state.current_scope(),
276            Kind::BindMain {
277                bind: bind.clone(),
278                lhs_change: lhs_change.packed(),
279            },
280        );
281        {
282            let mut bind_lhs_change = bind.lhs_change.borrow_mut();
283            let mut bind_main = bind.main.borrow_mut();
284            *bind_lhs_change = lhs_change.weak();
285            *bind_main = main.weak();
286            bind.id_lhs_change.set(lhs_change.id);
287        }
288
289        /* We set [lhs_change] to never cutoff so that whenever [lhs] changes, [main] is
290        recomputed.  This is necessary to handle cases where [f] returns an existing stable
291        node, in which case the [lhs_change] would be the only thing causing [main] to be
292        stale. */
293        Incremental::<()>::set_cutoff(&*lhs_change, Cutoff::Never);
294        Incr { node: main }
295    }
296
297    /// Creates an observer for this incremental.
298    ///
299    /// Observers are the primary way to get data out of the computation.
300    /// Their creation and lifetime inform Incremental which parts of the
301    /// computation graph are necessary, such that if you create many
302    /// variables and computations based on them, but only hook up some of
303    /// that to an observer, only the parts transitively necessary to
304    /// supply the observer with values are queued to be recomputed.
305    ///
306    /// That means, without an observer, `var.set(new_value)` does essentially
307    /// nothing, even if you have created incrementals like
308    /// `var.map(...).bind(...).map(...)`. In this fashion, you can safely
309    /// set up computation graphs before you need them, or refuse to dismantle
310    /// them, knowing the expensive computations they contain will not
311    /// grace the CPU until they're explicitly put back under the purview
312    /// of an Observer.
313    ///
314    /// Calling this multiple times on the same node produces multiple
315    /// observers. Only one is necessary to keep a part of a computation
316    /// graph alive and ticking.
317    pub fn observe(&self) -> Observer<T> {
318        let incr = self.clone();
319        let internal = incr.node.state().observe(incr);
320        Observer::new(internal)
321    }
322
323    /// Sets the cutoff function that determines (if it returns true)
324    /// whether to stop (cut off) propagating changes through the graph.
325    /// Note that this method can be called on `Var` as well as any
326    /// other [Incr].
327    ///
328    /// The default is [Cutoff::PartialEq]. So if your values do not change,
329    /// they will cut off propagation. There is a bound on all T in
330    /// `Incr<T>` used in incremental-rs, all values you pass around
331    /// must be PartialEq.
332    ///
333    /// You can also supply your own comparison function. This will chiefly
334    /// be useful for types like `Rc<T>`, not to avoid T: PartialEq (you
335    /// can't avoid that) but rather to avoid comparing a large structure
336    /// and simply compare the allocation's pointer value instead.
337    /// In that case, you can:
338    ///
339    /// ```rust
340    /// use std::rc::Rc;
341    /// use incremental::{IncrState, Cutoff};
342    /// let incr = IncrState::new();
343    /// let var = incr.var(Rc::new(5));
344    /// var.set_cutoff(Cutoff::Fn(Rc::ptr_eq));
345    /// // but note that doing this will now cause the change below
346    /// // to propagate, whereas before it would not as the two
347    /// // numbers are == equal:
348    /// var.set(Rc::new(5));
349    /// ```
350    ///
351    pub fn set_cutoff(&self, cutoff: Cutoff<T>) {
352        self.node.set_cutoff(cutoff);
353    }
354
355    /// A shorthand for using [Incr::set_cutoff] with a function pointer,
356    /// i.e. with [Cutoff::Fn]. Most comparison functions, including
357    /// closures, can be cast to a function pointer because they won't
358    /// capture any values.
359    ///
360    /// ```rust
361    /// use incremental::IncrState;
362    /// let incr = IncrState::new();
363    /// let var = incr.var(5);
364    /// var.set_cutoff_fn(|a, b| a == b);
365    /// var.set_cutoff_fn(i32::eq);
366    /// var.set_cutoff_fn(PartialEq::eq);
367    /// ```
368    pub fn set_cutoff_fn(&self, cutoff_fn: fn(&T, &T) -> bool) {
369        self.node.set_cutoff(Cutoff::Fn(cutoff_fn));
370    }
371
372    /// A shorthand for using [Incr::set_cutoff] with [Cutoff::FnBoxed] and
373    /// a closure that may capture its environment and mutate its captures.
374    ///
375    /// ```rust
376    /// use std::{cell::Cell, rc::Rc};
377    /// use incremental::IncrState;
378    /// let incr = IncrState::new();
379    /// let var = incr.var(5);
380    /// let capture = Rc::new(Cell::new(false));
381    /// var.set_cutoff_fn_boxed(move |_, _| capture.get());
382    /// ```
383    pub fn set_cutoff_fn_boxed<F>(&self, cutoff_fn: F)
384    where
385        F: FnMut(&T, &T) -> bool + Clone + 'static + NotObserver,
386    {
387        self.node.set_cutoff(Cutoff::FnBoxed(Box::new(cutoff_fn)));
388    }
389
390    pub fn save_dot_to_file(&self, named: &str) {
391        super::node::save_dot_to_file(&mut core::iter::once(self.node.erased()), named).unwrap()
392    }
393
394    pub fn depend_on<T2: Value>(&self, on: &Incr<T2>) -> Incr<T> {
395        let output = self.map2(on, |a, _| a.clone());
396        preserve_cutoff(self, &output);
397        output
398    }
399
400    pub fn on_update(&self, f: impl FnMut(NodeUpdate<&T>) + 'static + NotObserver) {
401        let state = self.node.state();
402        let now = state.stabilisation_num.get();
403        let handler = OnUpdateHandler::new(now, new_unsized!(f));
404        self.node.add_on_update_handler(handler);
405    }
406}
407
408fn preserve_cutoff<T: Value, O: Value>(input: &Incr<T>, output: &Incr<O>) {
409    let input_ = input.weak();
410    let output_ = output.weak();
411    output.set_cutoff_fn_boxed(move |_, _| {
412        if let Some((i, o)) = input_.upgrade().zip(output_.upgrade()) {
413            i.node.changed_at() == o.node.changed_at()
414        } else {
415            panic!("preserve_cutoff input or output was deallocated")
416        }
417    })
418}