1#![deny(missing_docs)]
2#![deny(warnings)]
3
4extern crate bit_set;
135extern crate either;
136extern crate parking_lot;
137extern crate take_mut;
138
139use std::num::NonZeroUsize;
140use std::ops::DerefMut;
141use std::sync::atomic::{AtomicUsize, Ordering};
142use std::sync::Arc;
143
144use bit_set::BitSet;
145use either::Either;
146use parking_lot::Mutex;
147
148pub trait Calc {
150 type Value;
152
153 fn add_dep(&mut self, seen: &mut BitSet, dep: NonZeroUsize);
163
164 fn eval(&mut self, dirty: &mut BitSet) -> (NonZeroUsize, Self::Value);
184}
185
186struct Counter(AtomicUsize);
187
188impl Counter {
189 pub fn new(first_value: NonZeroUsize) -> Self {
190 Counter(AtomicUsize::new(first_value.get()))
191 }
192
193 pub fn next(&self) -> NonZeroUsize {
194 let next = self.0.fetch_add(1, Ordering::SeqCst);
195 unsafe { NonZeroUsize::new_unchecked(next) }
196 }
197}
198
199struct GraphInner {
200 dirty: Mutex<BitSet>,
201 next_id: Counter,
202}
203
204const CONST_VERSION: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(1) };
205const FIRST_VERSION: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(2) };
206const FIRST_ID: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(1) };
207
208#[derive(Clone)]
212pub struct Node<C> {
213 calc: C,
214 graph: Option<Arc<GraphInner>>,
215}
216
217pub fn const_<T>(value: T) -> Node<Const<T>> {
219 Node {
220 calc: Const(value),
221 graph: None,
222 }
223}
224
225pub fn lazy<T, F: FnOnce() -> T>(f: F) -> Node<Lazy<T, F>> {
227 Node {
228 calc: Lazy(Either::Right(f)),
229 graph: None,
230 }
231}
232
233fn with_graph<T>(graph: &Option<Arc<GraphInner>>, f: impl FnOnce(&mut BitSet) -> T) -> T {
234 let mut dirty = graph.as_ref().map(|graph| graph.dirty.lock());
235 let dirty = dirty.as_mut().map(DerefMut::deref_mut);
236 let mut no_dirty = BitSet::new();
237 f(dirty.unwrap_or(&mut no_dirty))
238}
239
240impl<C: Calc> Node<C>
241where
242 C::Value: Clone,
243{
244 pub fn get_mut(&mut self) -> C::Value {
246 let calc = &mut self.calc;
247 with_graph(&self.graph, move |dirty| calc.eval(dirty).1)
248 }
249}
250
251pub type SharedNode<C> = Node<Arc<Mutex<C>>>;
253
254impl<C: Calc> Node<C> {
255 pub fn shared(self) -> SharedNode<C> {
257 let calc = Arc::new(Mutex::new(self.calc));
258 Node {
259 calc,
260 graph: self.graph,
261 }
262 }
263}
264
265pub type BoxNode<T> = Node<Box<Calc<Value = T> + Send>>;
267
268impl<C: Calc + Send + 'static> Node<C> {
269 pub fn boxed(self) -> BoxNode<C::Value> {
276 let calc: Box<Calc<Value = C::Value> + Send> = Box::new(self.calc);
277 Node {
278 calc,
279 graph: self.graph,
280 }
281 }
282}
283
284impl<C: Calc> SharedNode<C> {
285 pub fn get(&self) -> C::Value {
287 with_graph(&self.graph, move |dirty| self.calc.lock().eval(dirty).1)
288 }
289}
290
291impl<T: Clone> Node<Const<T>> {
292 pub fn get(&self) -> T {
294 self.calc.0.clone()
295 }
296}
297
298impl<T: Clone> Node<Source<T>> {
299 pub fn get(&self) -> T {
301 self.calc.inner.lock().value.clone()
302 }
303}
304
305impl<T> Node<Source<T>> {
306 pub fn update(&self, updater: impl FnOnce(&mut T) -> bool) {
308 let mut dirty = self.graph.as_ref().unwrap().dirty.lock();
309 let mut inner = self.calc.inner.lock();
310 if !updater(&mut inner.value) {
311 return;
312 }
313
314 inner.version = self.calc.next_version.next();
315 dirty.union_with(&inner.deps);
316 }
317
318 pub fn set(&self, value: T) {
320 self.update(move |value_cell| {
321 *value_cell = value;
322 true
323 })
324 }
325}
326
327fn alloc_id(graph: &Arc<GraphInner>) -> NonZeroUsize {
328 let id = graph.next_id.next();
329 graph.dirty.lock().insert(id.get());
330 id
331}
332
333struct SourceInner<T> {
334 version: NonZeroUsize,
335 value: T,
336 deps: BitSet,
337}
338
339#[derive(Clone)]
341pub struct Source<T> {
342 inner: Arc<Mutex<SourceInner<T>>>,
343 next_version: Arc<Counter>,
344}
345
346impl<T: Clone> Calc for Source<T> {
347 type Value = T;
348
349 fn add_dep(&mut self, _seen: &mut BitSet, dep: NonZeroUsize) {
350 self.inner.lock().deps.insert(dep.get());
351 }
352
353 fn eval(&mut self, _dirty: &mut BitSet) -> (NonZeroUsize, T) {
354 let inner = self.inner.lock();
355 (inner.version, inner.value.clone())
356 }
357}
358
359pub struct Const<T>(T);
361
362impl<T: Clone> Calc for Const<T> {
363 type Value = T;
364
365 fn add_dep(&mut self, _seen: &mut BitSet, _dep: NonZeroUsize) {}
366
367 fn eval(&mut self, _dirty: &mut BitSet) -> (NonZeroUsize, T) {
368 (CONST_VERSION, self.0.clone())
369 }
370}
371
372pub struct Lazy<T, F>(Either<T, F>);
374
375impl<T: Clone, F: FnOnce() -> T> Calc for Lazy<T, F> {
376 type Value = T;
377
378 fn add_dep(&mut self, _seen: &mut BitSet, _dep: NonZeroUsize) {}
379
380 fn eval(&mut self, _dirty: &mut BitSet) -> (NonZeroUsize, T) {
381 take_mut::take(&mut self.0, |value_or_f| match value_or_f {
382 Either::Left(value) => Either::Left(value),
383 Either::Right(f) => Either::Left(f()),
384 });
385
386 match &self.0 {
387 Either::Left(value) => (CONST_VERSION, value.clone()),
388 Either::Right(_) => unreachable!(),
389 }
390 }
391}
392
393pub struct Inspect<C, F> {
395 f: F,
396 last_version: usize,
397 prec: C,
398}
399
400impl<C: Calc, F: FnMut(&C::Value)> Calc for Inspect<C, F> {
401 type Value = C::Value;
402
403 fn add_dep(&mut self, seen: &mut BitSet<u32>, dep: NonZeroUsize) {
404 self.prec.add_dep(seen, dep)
405 }
406
407 fn eval(&mut self, dirty: &mut BitSet<u32>) -> (NonZeroUsize, C::Value) {
408 let (version, value) = self.prec.eval(dirty);
409 if version.get() > self.last_version {
410 self.last_version = version.get();
411 (self.f)(&value);
412 }
413
414 (version, value)
415 }
416}
417
418impl<C: Calc> Node<C> {
419 pub fn inspect<F: FnMut(&C::Value)>(self, f: F) -> Node<Inspect<C, F>> {
421 Node {
422 calc: Inspect {
423 f,
424 last_version: 0,
425 prec: self.calc,
426 },
427 graph: self.graph,
428 }
429 }
430}
431
432fn eval_func<A, T: Clone + PartialEq>(
433 dirty: &mut BitSet,
434 id: Option<NonZeroUsize>,
435 value_cell: &mut Option<(NonZeroUsize, T)>,
436 f1: impl FnOnce(&mut BitSet) -> (NonZeroUsize, A),
437 f2: impl FnOnce(A) -> T,
438) -> (NonZeroUsize, T) {
439 if let Some(id) = id {
440 let id = id.get();
441 if dirty.contains(id) {
442 dirty.remove(id);
443 } else {
444 let (version, value) = value_cell.as_ref().unwrap();
445 return (*version, value.clone());
446 }
447 } else if let Some((version, value)) = value_cell.as_ref() {
448 return (*version, value.clone());
449 }
450
451 let (prec_version, precs) = f1(dirty);
452
453 if let Some((version, value)) = value_cell {
454 if prec_version > *version {
455 let new_value = f2(precs);
456
457 if new_value != *value {
458 *version = prec_version;
459 *value = new_value.clone();
460 return (prec_version, new_value);
461 }
462 }
463
464 (*version, value.clone())
465 } else {
466 let value = f2(precs);
467 *value_cell = Some((prec_version, value.clone()));
468 (prec_version, value)
469 }
470}
471
472fn eval_update<A, T: Clone>(
473 dirty: &mut BitSet,
474 id: Option<NonZeroUsize>,
475 version_cell: &mut Option<NonZeroUsize>,
476 value_cell: &mut T,
477 f1: impl FnOnce(&mut BitSet) -> (NonZeroUsize, A),
478 f2: impl FnOnce(&mut T, A) -> bool,
479) -> (NonZeroUsize, T) {
480 if let Some(id) = id {
481 let id = id.get();
482 if dirty.contains(id) {
483 dirty.remove(id);
484 } else {
485 let version = version_cell.as_ref().unwrap();
486 return (*version, value_cell.clone());
487 }
488 } else if let Some(version) = version_cell.as_ref() {
489 return (*version, value_cell.clone());
490 }
491
492 let (prec_version, precs) = f1(dirty);
493
494 if let Some(version) = version_cell {
495 if prec_version > *version {
496 if f2(value_cell, precs) {
497 *version = prec_version;
498 return (prec_version, value_cell.clone());
499 }
500 }
501
502 (*version, value_cell.clone())
503 } else {
504 f2(value_cell, precs);
505 *version_cell = Some(prec_version);
506 (prec_version, value_cell.clone())
507 }
508}
509
510impl<C: Calc> Calc for Arc<Mutex<C>> {
512 type Value = C::Value;
513
514 fn add_dep(&mut self, seen: &mut BitSet, dep: NonZeroUsize) {
515 self.lock().add_dep(seen, dep)
516 }
517
518 fn eval(&mut self, dirty: &mut BitSet) -> (NonZeroUsize, C::Value) {
519 self.lock().eval(dirty)
520 }
521}
522
523impl<T> Calc for Box<Calc<Value = T> + Send> {
525 type Value = T;
526
527 fn add_dep(&mut self, seen: &mut BitSet, dep: NonZeroUsize) {
528 (**self).add_dep(seen, dep)
529 }
530
531 fn eval(&mut self, dirty: &mut BitSet) -> (NonZeroUsize, T) {
532 (**self).eval(dirty)
533 }
534}
535
536#[derive(Clone)]
538pub struct Graph {
539 inner: Arc<GraphInner>,
540 next_version: Arc<Counter>,
541}
542
543impl Graph {
544 pub fn new() -> Self {
546 Graph {
547 inner: Arc::new(GraphInner {
548 dirty: Mutex::new(BitSet::new()),
549 next_id: Counter::new(FIRST_ID),
550 }),
551 next_version: Arc::new(Counter::new(FIRST_VERSION)),
552 }
553 }
554
555 pub fn source<T>(&self, value: T) -> Node<Source<T>> {
557 let version = self.next_version.next();
558
559 let inner = SourceInner {
560 deps: BitSet::new(),
561 version,
562 value,
563 };
564
565 let calc = Source {
566 inner: Arc::new(Mutex::new(inner)),
567 next_version: self.next_version.clone(),
568 };
569
570 Node {
571 calc,
572 graph: Some(self.inner.clone()),
573 }
574 }
575}
576
577include!(concat!(env!("OUT_DIR"), "/funcs.rs"));
578
579#[test]
580fn test_nodes_are_send_sync() {
581 fn assert_send_sync<T: Send + Sync>(value: T) -> T {
582 value
583 }
584
585 let graph = assert_send_sync(Graph::new());
586 let c = const_("const");
587 let l = lazy(|| "lazy");
588 let s = assert_send_sync(graph.source("source".to_owned()));
589
590 let mut m = assert_send_sync(Node::zip3(c, l, s.clone(), |a, b, c| {
591 format!("{a} {b} {c}", a = a, b = b, c = c)
592 }));
593
594 assert_eq!("const lazy source", m.get_mut());
595
596 s.update(|text| {
597 *text += "2";
598 true
599 });
600
601 assert_eq!("const lazy source2", m.get_mut());
602}
603
604#[test]
605fn test_source() {
606 let graph = Graph::new();
607 let source = graph.source(1);
608 assert_eq!(1, source.get());
609
610 source.set(2);
611
612 assert_eq!(2, source.get());
613}
614
615#[test]
616fn test_const() {
617 let c = const_("hello");
618
619 assert_eq!("hello", c.get());
620}
621
622#[test]
623fn test_lazy() {
624 let mut lazy1 = lazy(|| "hello");
625 let _lazy2 = lazy(|| unreachable!());
626
627 assert_eq!("hello", lazy1.get_mut());
628}
629
630#[test]
631fn test_inspect() {
632 let graph = Graph::new();
633 let source = graph.source(1);
634 let inspect_count = AtomicUsize::new(0);
635
636 let mut map = source.clone().map(|n| n * n).inspect(|_| {
637 inspect_count.fetch_add(1, Ordering::SeqCst);
638 });
639
640 assert_eq!(0, inspect_count.load(Ordering::SeqCst));
641
642 assert_eq!(1, map.get_mut());
643 assert_eq!(1, inspect_count.load(Ordering::SeqCst));
644
645 source.set(2);
646 assert_eq!(1, inspect_count.load(Ordering::SeqCst));
647
648 assert_eq!(4, map.get_mut());
649 assert_eq!(2, inspect_count.load(Ordering::SeqCst));
650
651 source.set(2);
652 assert_eq!(2, inspect_count.load(Ordering::SeqCst));
653
654 assert_eq!(4, map.get_mut());
655 assert_eq!(2, inspect_count.load(Ordering::SeqCst));
656}
657
658#[test]
659fn test_map() {
660 let graph = Graph::new();
661 let source = graph.source(1);
662 let c = const_(2);
663 let map1 = source.clone().zip(c, |n, c| n * c);
664 let mut map2 = map1.map(|m| -m);
665
666 assert_eq!(-2, map2.get_mut());
667
668 source.set(2);
669
670 assert_eq!(-4, map2.get_mut());
671}
672
673#[test]
674fn test_map_cache() {
675 let graph = Graph::new();
676 let source = graph.source("hello");
677 let c = const_::<usize>(1);
678 let calc_count1 = AtomicUsize::new(0);
679 let calc_count2 = AtomicUsize::new(0);
680 let calc_count3 = AtomicUsize::new(0);
681
682 let calc_counts = || {
683 (
684 calc_count1.load(Ordering::SeqCst),
685 calc_count2.load(Ordering::SeqCst),
686 calc_count3.load(Ordering::SeqCst),
687 )
688 };
689
690 let map1 = source
691 .clone()
692 .map(|s| {
693 calc_count1.fetch_add(1, Ordering::SeqCst);
694 s.len()
695 })
696 .shared();
697
698 let map2 = Node::zip(source.clone(), c, |s, c| {
699 calc_count2.fetch_add(1, Ordering::SeqCst);
700 s.as_bytes()[c] as usize
701 });
702
703 let mut map3 = Node::zip3(map1.clone(), map2, map1, |x, y, z| {
704 calc_count3.fetch_add(1, Ordering::SeqCst);
705 x + y + z
706 });
707
708 assert_eq!((0, 0, 0), calc_counts());
709
710 assert_eq!(111, map3.get_mut());
711 assert_eq!((1, 1, 1), calc_counts());
712
713 source.set("jello");
714
715 assert_eq!(111, map3.get_mut());
716 assert_eq!((2, 2, 1), calc_counts());
717
718 source.set("jollo");
719
720 assert_eq!(121, map3.get_mut());
721 assert_eq!((3, 3, 2), calc_counts());
722}
723
724#[test]
725fn test_map_lazy() {
726 let graph = Graph::new();
727 let source = graph.source(1);
728 let _map = source.clone().map(|_| unreachable!());
729
730 assert_eq!(1, source.get());
731
732 source.set(2);
733
734 assert_eq!(2, source.get());
735}