1use crate::sse::fast_op_alloc::{DefaultFastOpAllocator, FastOpAllocator};
2use crate::sse::qmc_ising::IsingManager;
3use crate::sse::qmc_runner::QmcManager;
4use crate::sse::qmc_traits::*;
5use crate::sse::qmc_types::{Leg, OpSide};
6use crate::util::allocator::Factory;
7use crate::util::bondcontainer::BondContainer;
8#[cfg(feature = "serialize")]
9use serde::{Deserialize, Serialize};
10use smallvec::{smallvec, SmallVec};
11use std::cmp::{min, Reverse};
12use std::collections::BinaryHeap;
13use std::ops::{Index, IndexMut};
14
15pub type FastOp = BasicOp<SmallVec<[usize; 2]>, SmallVec<[bool; 2]>>;
17pub type FastOps = FastOpsTemplate<FastOp>;
19pub type FastOpNode = FastOpNodeTemplate<FastOp>;
21
22#[cfg(feature = "const_generics")]
24pub type FastOpN<const N: usize> = BasicOp<SmallVec<[usize; N]>, SmallVec<[bool; N]>>;
25#[cfg(feature = "const_generics")]
27pub type FastOpsN<const N: usize> = FastOpsTemplate<FastOpN<N>>;
28#[cfg(feature = "const_generics")]
30pub type FastOpNodeN<const N: usize> = FastOpNodeTemplate<FastOpN<N>, SmallVec<[Option<PRel>; N]>>;
31
32#[derive(Clone, Debug)]
34#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
35pub struct FastOpsTemplate<O: Op, ALLOC: FastOpAllocator = DefaultFastOpAllocator> {
36 pub(crate) ops: Vec<Option<FastOpNodeTemplate<O>>>,
40 pub(crate) n: usize,
41 pub(crate) p_ends: Option<(usize, usize)>,
42 pub(crate) var_ends: Vec<Option<(PRel, PRel)>>,
43
44 bond_counters: Option<Vec<usize>>,
46
47 alloc: ALLOC,
49}
50
51impl<O: Op + Clone, ALLOC: FastOpAllocator> FastOpsTemplate<O, ALLOC> {
52 pub fn new_from_nvars_and_nbonds_and_alloc(
55 nvars: usize,
56 nbonds: Option<usize>,
57 alloc: ALLOC,
58 ) -> Self {
59 Self {
60 ops: vec![],
61 n: 0,
62 p_ends: None,
63 var_ends: vec![None; nvars],
64 bond_counters: nbonds.map(|nbonds| vec![0; nbonds]),
65 alloc,
66 }
67 }
68
69 pub fn new_from_nvars_and_nbonds(nvars: usize, nbonds: Option<usize>) -> Self {
71 Self::new_from_nvars_and_nbonds_and_alloc(nvars, nbonds, Default::default())
72 }
73
74 pub fn new_from_nvars(nvars: usize) -> Self {
76 Self::new_from_nvars_and_nbonds(nvars, None)
77 }
78
79 pub fn new_from_ops<I>(nvars: usize, ps_and_ops: I) -> Self
81 where
82 I: IntoIterator<Item = (usize, O)>,
83 {
84 let mut man = Self::new_from_nvars(nvars);
85 man.clear_and_install_ops(ps_and_ops);
86 man
87 }
88
89 fn clear_and_install_ops<I>(&mut self, ps_and_ops: I)
90 where
91 I: IntoIterator<Item = (usize, O)>,
92 {
93 let nvars = self.var_ends.len();
94 let ps_and_ops = ps_and_ops.into_iter().collect::<Vec<_>>();
95 if ps_and_ops.is_empty() {
96 return;
97 }
98 let opslen = ps_and_ops.iter().map(|(p, _)| p).max().unwrap() + 1;
99 self.ops.clear();
100 self.ops.resize_with(opslen, || None);
101 self.var_ends.clear();
102 self.var_ends.resize_with(nvars, || None);
103 self.p_ends = None;
104
105 let last_p: Option<usize> = None;
106 let mut last_vars: Vec<Option<usize>> = self.get_instance();
107 let mut last_rels: Vec<Option<usize>> = self.get_instance();
108 last_vars.resize(nvars, None);
109 last_rels.resize(nvars, None);
110
111 let (last_p, last_vars, last_rels) = ps_and_ops.into_iter().fold(
112 (last_p, last_vars, last_rels),
113 |(last_p, mut last_vars, mut last_rels), (p, op)| {
114 assert!(last_p.map(|last_p| p > last_p).unwrap_or(true));
115
116 if let Some(last_p) = last_p {
117 let last_node = self.ops[last_p].as_mut().unwrap();
118 last_node.next_p = Some(p);
119 } else {
120 self.p_ends = Some((p, p))
121 }
122
123 let previous_for_vars = op
124 .get_vars()
125 .iter()
126 .cloned()
127 .enumerate()
128 .map(|(relv, v)| {
129 let last_tup = last_vars[v].zip(last_rels[v]);
130 if let Some((last_p, last_rel)) = last_tup {
131 let node = self.ops[last_p].as_mut().unwrap();
132 node.next_for_vars[last_rel] = Some(PRel { p, relv });
133 } else {
134 let end = PRel { p, relv };
135 self.var_ends[v] = Some((end, end));
136 }
137 last_vars[v] = Some(p);
138 last_rels[v] = Some(relv);
139 last_tup.map(PRel::from)
140 })
141 .collect();
142 let n_opvars = op.get_vars().len();
143 let mut node =
144 FastOpNodeTemplate::<O>::new(op, previous_for_vars, smallvec![None; n_opvars]);
145 node.previous_p = last_p;
146 self.ops[p] = Some(node);
147 self.n += 1;
148
149 (Some(p), last_vars, last_rels)
150 },
151 );
152 if let Some((_, p_end)) = self.p_ends.as_mut() {
153 *p_end = last_p.unwrap()
154 }
155 self.var_ends
156 .iter_mut()
157 .zip(
158 last_vars
159 .iter()
160 .cloned()
161 .zip(last_rels.iter().cloned())
162 .map(|(a, b)| a.zip(b)),
163 )
164 .for_each(|(ends, last_v)| {
165 if let Some((_, v_end)) = ends {
166 let (last_p, last_relv) = last_v.unwrap();
167 v_end.p = last_p;
168 v_end.relv = last_relv;
169 }
170 });
171 self.return_instance(last_vars);
172 self.return_instance(last_rels);
173 }
174}
175
176type LinkVars = SmallVec<[Option<PRel>; 2]>;
177
178#[derive(Clone, Debug)]
180#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
181pub struct FastOpNodeTemplate<O: Op, LV = LinkVars>
182where
183 LV: Index<usize, Output = Option<PRel>> + IndexMut<usize, Output = Option<PRel>>,
184{
185 pub(crate) op: O,
186 pub(crate) previous_p: Option<usize>,
187 pub(crate) next_p: Option<usize>,
188 pub(crate) previous_for_vars: LV,
189 pub(crate) next_for_vars: LV,
190}
191
192impl<O: Op, LV> FastOpNodeTemplate<O, LV>
193where
194 LV: Index<usize, Output = Option<PRel>> + IndexMut<usize, Output = Option<PRel>>,
195{
196 fn new(op: O, previous_for_vars: LV, next_for_vars: LV) -> Self {
197 Self {
200 op,
201 previous_p: None,
202 next_p: None,
203 previous_for_vars,
204 next_for_vars,
205 }
206 }
207}
208
209impl<O: Op + Clone, LV> OpNode<O> for FastOpNodeTemplate<O, LV>
210where
211 LV: Index<usize, Output = Option<PRel>> + IndexMut<usize, Output = Option<PRel>>,
212{
213 fn get_op(&self) -> O {
214 self.op.clone()
215 }
216
217 fn get_op_ref(&self) -> &O {
218 &self.op
219 }
220
221 fn get_op_mut(&mut self) -> &mut O {
222 &mut self.op
223 }
224}
225
226#[derive(Debug)]
228pub struct FastOpMutateArgs {
229 last_p: Option<usize>,
231 last_vars: Vec<Option<usize>>,
233 last_rels: Vec<Option<usize>>,
235 subvar_mapping: Option<(Vec<usize>, Vec<usize>)>,
237 unfilled: usize,
239}
240
241impl FastOpMutateArgs {
242 fn new<Fact>(nvars: usize, vars: Option<&[usize]>, fact: &mut Fact) -> Self
243 where
244 Fact: Factory<Vec<usize>> + Factory<Vec<Option<usize>>>,
245 {
246 let last_p: Option<usize> = None;
247 let mut last_vars: Vec<Option<usize>> = fact.get_instance();
248 let mut last_rels: Vec<Option<usize>> = fact.get_instance();
249 let last_size = vars.map(|vars| vars.len()).unwrap_or(nvars);
250 last_vars.resize(last_size, None);
251 last_rels.resize(last_size, None);
252
253 FastOpMutateArgs {
254 last_p,
255 last_vars,
256 last_rels,
257 subvar_mapping: vars.map(|vars| {
258 let mut vars_to_subvars: Vec<usize> = fact.get_instance();
259 let mut subvars_to_vars: Vec<usize> = fact.get_instance();
260 vars_to_subvars.resize(nvars, std::usize::MAX);
261 subvars_to_vars.extend_from_slice(vars);
262 subvars_to_vars
263 .iter()
264 .enumerate()
265 .for_each(|(i, v)| vars_to_subvars[*v] = i);
266 (vars_to_subvars, subvars_to_vars)
267 }),
268 unfilled: 0,
269 }
270 }
271}
272
273impl MutateArgs for FastOpMutateArgs {
274 type SubvarIndex = usize;
275
276 fn n_subvars(&self) -> usize {
277 self.last_vars.len()
278 }
279
280 fn subvar_to_var(&self, index: Self::SubvarIndex) -> usize {
281 match &self.subvar_mapping {
282 None => index,
283 Some((_, subvars)) => subvars[index],
284 }
285 }
286
287 fn var_to_subvar(&self, var: usize) -> Option<Self::SubvarIndex> {
288 match &self.subvar_mapping {
289 None => Some(var),
290 Some((allvars, _)) => {
291 let i = allvars[var];
292 if i == std::usize::MAX {
293 None
294 } else {
295 Some(i)
296 }
297 }
298 }
299 }
300}
301
302impl<O: Op + Clone, ALLOC: FastOpAllocator> DiagonalSubsection for FastOpsTemplate<O, ALLOC> {
303 type Args = FastOpMutateArgs;
304
305 fn mutate_p<T, F>(&mut self, f: F, p: usize, t: T, mut args: Self::Args) -> (T, Self::Args)
306 where
307 F: Fn(&Self, Option<&Self::Op>, T) -> (Option<Option<Self::Op>>, T),
308 {
309 let op_ref = self.get_pth(p);
310 let (new_op, t) = f(self, op_ref, t);
311
312 if let Some(new_op) = new_op {
314 debug_assert!(
316 {
317 op_ref
318 .map(|op| {
319 op.get_vars()
320 .iter()
321 .all(|v| args.var_to_subvar(*v).is_some())
322 })
323 .unwrap_or(true)
324 && new_op
325 .as_ref()
326 .map(|op| {
327 op.get_vars()
328 .iter()
329 .all(|v| args.var_to_subvar(*v).is_some())
330 })
331 .unwrap_or(true)
332
333 },
334 "Trying to mutate from or into an op which spans variables not prepared in the args."
335 );
336
337 let old_op_node = self.ops[p].take();
338
339 let same_vars = new_op
342 .as_ref()
343 .and_then(|new_op| {
344 old_op_node
345 .as_ref()
346 .map(|node| node.op.get_vars() == new_op.get_vars())
347 })
348 .unwrap_or(false);
349 if same_vars {
350 let new_op = new_op.unwrap();
352 let old_op_node = old_op_node.unwrap();
353 let mut node_ref = FastOpNodeTemplate::new(
354 new_op,
355 old_op_node.previous_for_vars,
356 old_op_node.next_for_vars,
357 );
358 node_ref.previous_p = old_op_node.previous_p;
359 node_ref.next_p = old_op_node.next_p;
360 if let Some(bond_counters) = self.bond_counters.as_mut() {
361 let bond = old_op_node.op.get_bond();
362 bond_counters[bond] -= 1;
363 let bond = node_ref.op.get_bond();
364 bond_counters[bond] += 1;
365 }
366 self.ops[p] = Some(node_ref);
367 } else {
368 if let Some(node_ref) = old_op_node {
369 debug_assert_eq!(args.last_p, node_ref.previous_p);
370
371 if let Some(last_p) = args.last_p {
374 let node = self.ops[last_p].as_mut().unwrap();
375 node.next_p = node_ref.next_p;
376 } else {
377 self.p_ends = if let Some((head, tail)) = self.p_ends {
379 debug_assert_eq!(head, p);
380 node_ref.next_p.map(|new_head| {
381 debug_assert_ne!(p, tail);
382 (new_head, tail)
383 })
384 } else {
385 unreachable!()
386 }
387 }
388 let next_p_node = node_ref.next_p.and_then(|p| self.ops[p].as_mut());
390 if let Some(next_p_node) = next_p_node {
391 next_p_node.previous_p = args.last_p
392 } else {
393 self.p_ends = if let Some((head, tail)) = self.p_ends {
395 debug_assert_eq!(tail, p);
396 node_ref.previous_p.map(|new_tail| {
397 debug_assert_ne!(head, p);
398 (head, new_tail)
399 })
400 } else {
401 None
403 }
404 }
405
406 let vars = node_ref.op.get_vars();
408 vars.iter().cloned().enumerate().for_each(|(relv, v)| {
409 let subvar = args.var_to_subvar(v).unwrap();
411 debug_assert_eq!(
412 args.last_vars[subvar],
413 node_ref.previous_for_vars[relv].map(|prel| prel.p),
414 "P position in args must match op being removed"
415 );
416 debug_assert_eq!(
417 args.last_rels[subvar],
418 node_ref.previous_for_vars[relv].map(|prel| prel.relv),
419 "Relative var in args must match op being removed"
420 );
421
422 if let Some((prev_p_for_v, prev_rel_indx)) =
423 args.last_vars[subvar].zip(args.last_rels[subvar])
424 {
425 let prev_p_for_v_ref = self.ops[prev_p_for_v].as_mut().unwrap();
426 prev_p_for_v_ref.next_for_vars[prev_rel_indx] =
427 node_ref.next_for_vars[relv];
428 } else {
429 self.var_ends[v] = if let Some((head, tail)) = self.var_ends[v] {
431 debug_assert_eq!(head, PRel { p, relv });
432 node_ref.next_for_vars[relv].map(|new_head| {
434 debug_assert_ne!(tail, PRel { p, relv });
435 (new_head, tail)
436 })
437 } else {
438 unreachable!()
439 }
440 }
441
442 if let Some(PRel {
444 p: next_p_for_v,
445 relv: next_rel_index,
446 }) = node_ref.next_for_vars[relv]
447 {
448 let next_p_for_v_ref = self.ops[next_p_for_v].as_mut().unwrap();
449 next_p_for_v_ref.previous_for_vars[next_rel_index] = args.last_vars
450 [subvar]
451 .zip(args.last_rels[subvar])
452 .map(PRel::from);
453 } else {
454 self.var_ends[v] = if let Some((head, tail)) = self.var_ends[v] {
455 debug_assert_eq!(tail, PRel { p, relv });
456 node_ref.previous_for_vars[relv].map(|new_tail| {
458 debug_assert_ne!(head, PRel { p, relv });
459 (head, new_tail)
460 })
461 } else {
462 None
464 }
465 }
466 });
467 self.n -= 1;
468 if let Some(bond_counters) = self.bond_counters.as_mut() {
469 let bond = node_ref.op.get_bond();
470 bond_counters[bond] -= 1;
471 }
472 }
473
474 if let Some(new_op) = new_op {
475 let (prevs, nexts): (LinkVars, LinkVars) = new_op
477 .get_vars()
478 .iter()
479 .cloned()
480 .map(|v| -> (Option<PRel>, Option<PRel>) {
481 let subvar = args.var_to_subvar(v).unwrap();
482 let prev_p_and_rel = args.last_vars[subvar]
483 .zip(args.last_rels[subvar])
484 .map(PRel::from);
485
486 let next_p_and_rel = if let Some(prev_p_for_v) = args.last_vars[subvar]
487 {
488 let prev_node_for_v = self.ops[prev_p_for_v].as_ref().unwrap();
490 let indx = prev_node_for_v.op.index_of_var(v).unwrap();
491 prev_node_for_v.next_for_vars[indx]
492 } else if let Some((head, _)) = self.var_ends[v] {
493 debug_assert_eq!(prev_p_and_rel, None);
495 Some(head)
496 } else {
497 None
499 };
500
501 (prev_p_and_rel, next_p_and_rel)
502 })
503 .unzip();
504
505 prevs
507 .iter()
508 .zip(new_op.get_vars().iter())
509 .enumerate()
510 .for_each(|(relv, (prev, v))| {
511 if let Some(prel) = prev {
512 let prev_p = prel.p;
513 let prev_rel = prel.relv;
514 let prev_node = self.ops[prev_p].as_mut().unwrap();
515 debug_assert_eq!(prev_node.get_op_ref().get_vars()[prev_rel], *v);
516 prev_node.next_for_vars[prev_rel] = Some(PRel::from((p, relv)));
517 } else {
518 self.var_ends[*v] = if let Some((head, tail)) = self.var_ends[*v] {
519 debug_assert!(head.p >= p);
520 Some((PRel { p, relv }, tail))
521 } else {
522 Some((PRel { p, relv }, PRel { p, relv }))
523 }
524 }
525 });
526
527 nexts
528 .iter()
529 .zip(new_op.get_vars().iter())
530 .enumerate()
531 .for_each(|(relv, (next, v))| {
532 if let Some(PRel {
533 p: next_p,
534 relv: next_rel,
535 }) = next
536 {
537 let next_node = self.ops[*next_p].as_mut().unwrap();
538 debug_assert_eq!(next_node.get_op_ref().get_vars()[*next_rel], *v);
539 next_node.previous_for_vars[*next_rel] = Some(PRel { p, relv });
540 } else {
541 self.var_ends[*v] = if let Some((head, tail)) = self.var_ends[*v] {
542 debug_assert!(tail.p <= p);
543 Some((head, PRel { p, relv }))
544 } else {
545 Some((PRel { p, relv }, PRel { p, relv }))
546 }
547 }
548 });
549
550 let mut node_ref = FastOpNodeTemplate::new(new_op, prevs, nexts);
551 node_ref.previous_p = args.last_p;
552 node_ref.next_p = if let Some(last_p) = args.last_p {
553 let last_p_node = self.ops[last_p].as_ref().unwrap();
554 last_p_node.next_p
555 } else if let Some((head, _)) = self.p_ends {
556 Some(head)
557 } else {
558 None
559 };
560
561 if let Some(prev) = node_ref.previous_p {
563 let prev_node = self.ops[prev].as_mut().unwrap();
564 prev_node.next_p = Some(p);
565 } else {
566 self.p_ends = if let Some((head, tail)) = self.p_ends {
567 debug_assert!(head >= p);
568 Some((p, tail))
569 } else {
570 Some((p, p))
571 }
572 };
573
574 if let Some(next) = node_ref.next_p {
575 let next_node = self.ops[next].as_mut().unwrap();
576 next_node.previous_p = Some(p);
577 } else {
578 self.p_ends = if let Some((head, tail)) = self.p_ends {
579 debug_assert!(tail <= p);
580 Some((head, p))
581 } else {
582 Some((p, p))
583 }
584 };
585 if let Some(bond_counters) = self.bond_counters.as_mut() {
586 let bond = node_ref.op.get_bond();
587 bond_counters[bond] += 1;
588 }
589 self.ops[p] = Some(node_ref);
590 self.n += 1;
591 }
592 }
593 }
594
595 if let Some(op) = self.ops[p].as_ref().map(|r| &r.op) {
596 op.get_vars()
597 .iter()
598 .cloned()
599 .enumerate()
600 .for_each(|(relv, v)| {
601 if let Some(subvar) = args.var_to_subvar(v) {
602 args.last_vars[subvar] = Some(p);
603 args.last_rels[subvar] = Some(relv);
604 }
605 });
606 args.last_p = Some(p)
607 }
608 (t, args)
609 }
610
611 fn mutate_subsection<T, F>(
612 &mut self,
613 pstart: usize,
614 pend: usize,
615 t: T,
616 f: F,
617 args: Option<Self::Args>,
618 ) -> T
619 where
620 F: Fn(&Self, Option<&Self::Op>, T) -> (Option<Option<Self::Op>>, T),
621 {
622 if pend > self.ops.len() {
623 self.ops.resize(pend, None)
624 };
625 let args = match args {
626 Some(args) => args,
627 None => {
628 let args = self.get_empty_args(SubvarAccess::All);
629 self.fill_args_at_p(pstart, args)
630 }
631 };
632 let (t, args) =
633 (pstart..pend).fold((t, args), |(t, args), p| self.mutate_p(&f, p, t, args));
634 self.return_args(args);
635
636 t
637 }
638
639 fn mutate_subsection_ops<T, F>(
640 &mut self,
641 pstart: usize,
642 pend: usize,
643 mut t: T,
644 f: F,
645 args: Option<Self::Args>,
646 ) -> T
647 where
648 F: Fn(&Self, &Self::Op, usize, T) -> (Option<Option<Self::Op>>, T),
649 {
650 if pend > self.ops.len() {
651 self.ops.resize(pend, None)
652 };
653
654 let mut args = match args {
655 Some(args) => args,
656 None => {
657 let args = self.get_empty_args(SubvarAccess::All);
658 self.fill_args_at_p(pstart, args)
659 }
660 };
661
662 let last_vars = &mut args.last_vars;
663 let last_rels = &mut args.last_rels;
664 let subvars = args
665 .subvar_mapping
666 .as_ref()
667 .map(|(_, subvars)| subvars.as_slice());
668
669 if let Some(vars) = subvars {
670 let mut p_heap: BinaryHeap<Reverse<usize>> = self.get_instance();
671
672 let prel_gen = last_vars
673 .iter()
674 .cloned()
675 .zip(last_rels.iter().cloned())
676 .map(|(a, b)| a.zip(b));
677 vars.iter().cloned().zip(prel_gen).for_each(|(var, prel)| {
678 if let Some((last_p, last_relv)) = prel {
679 let node = self.get_node_ref(last_p).unwrap();
680 let next_p = self.get_next_p_for_rel_var(last_relv, node);
681 debug_assert!(last_p < pstart);
683 debug_assert!(next_p.map(|next| next.p >= pstart).unwrap_or(true));
684
685 if let Some(PRel { p: next_p, .. }) = next_p {
687 p_heap.push(Reverse(next_p));
688 }
689 } else if let Some((start, _)) = self.var_ends[var] {
690 debug_assert!(start.p >= pstart);
691 p_heap.push(Reverse(start.p));
692 }
693 });
694
695 while let Some(p) = p_heap.pop().map(|rp| rp.0) {
700 if p > pend {
701 break;
702 }
703 while p_heap
705 .peek()
706 .map(|rp| rp.0)
707 .map(|rp| rp <= p)
708 .unwrap_or(false)
709 {
710 let popped = p_heap.pop();
711 debug_assert_eq!(Some(Reverse(p)), popped);
712 }
713 debug_assert!(p_heap.iter().all(|rp| rp.0 > p));
714
715 let node = self.get_node_ref(p).unwrap();
716 let op = node.get_op_ref();
718 debug_assert!(op
720 .get_vars()
721 .iter()
722 .cloned()
723 .any(|v| args.var_to_subvar(v).is_some()));
724
725 op.get_vars()
726 .iter()
727 .cloned()
728 .enumerate()
729 .filter_map(|(relv, v)| args.var_to_subvar(v).map(|_| relv))
730 .for_each(|relv| {
731 if let Some(prel) = self.get_next_p_for_rel_var(relv, node) {
732 p_heap.push(Reverse(prel.p));
733 }
734 });
735 let last_p = self.get_previous_p(node);
739 debug_assert!(
742 {
743 if last_p.is_none() {
744 args.last_p.is_none()
745 } else {
746 true
747 }
748 },
749 "The graph claims there are no prior ops but the args claim otherwise."
750 );
751 debug_assert!(
752 {
753 if last_p != args.last_p {
754 let last_node = self.get_node_ref(last_p.unwrap()).unwrap();
755 last_node
756 .get_op_ref()
757 .get_vars()
758 .iter()
759 .cloned()
760 .all(|v| args.var_to_subvar(v).is_none())
761 } else {
762 true
763 }
764 },
765 "Args and graph disagree on last_p even though last op overlaps with subvars."
766 );
767
768 args.last_p = last_p;
769
770 let ret = self.mutate_p(|s, op, t| f(s, op.unwrap(), p, t), p, t, args);
771 t = ret.0;
772 args = ret.1;
773 }
774 self.return_instance(p_heap);
775 } else {
776 let mut p = self.p_ends.and_then(|(start, pend)| {
778 if pstart <= start {
779 Some(start)
780 } else if start > pend {
781 None
782 } else {
783 self.ops[pstart..]
785 .iter()
786 .enumerate()
787 .find(|(_, op)| op.is_some())
788 .map(|(x, _)| x + pstart)
789 }
790 });
791 while let Some(node_p) = p {
792 if node_p > pend {
793 break;
794 } else {
795 let ret =
796 self.mutate_p(|s, op, t| f(s, op.unwrap(), node_p, t), node_p, t, args);
797 t = ret.0;
798 args = ret.1;
799
800 let node = self.ops[node_p].as_ref().unwrap();
801 p = node.next_p;
802 }
803 }
804 }
805 self.return_args(args);
806
807 t
808 }
809
810 fn get_empty_args(&mut self, vars: SubvarAccess<FastOpMutateArgs>) -> FastOpMutateArgs {
811 match vars {
812 SubvarAccess::All => {
813 let mut args = FastOpMutateArgs::new(self.get_nvars(), None, self);
814 args.unfilled = self.var_ends.iter().filter(|end| end.is_some()).count();
816 args
817 }
818 SubvarAccess::Varlist(vars) => {
819 let mut args = FastOpMutateArgs::new(self.get_nvars(), Some(vars), self);
820 args.unfilled = vars.iter().filter(|v| self.var_ends[**v].is_some()).count();
822 args
823 }
824 SubvarAccess::Args(mut args) => {
825 args.unfilled = (0..args.n_subvars())
827 .filter(|subvar| {
828 let v = args.subvar_to_var(*subvar);
829 args.last_vars[*subvar].is_none() && self.var_ends[v].is_some()
830 })
831 .count();
832 args
833 }
834 }
835 }
836
837 fn fill_args_at_p(&self, p: usize, empty_args: Self::Args) -> Self::Args {
839 let args = empty_args;
840 if args.unfilled > 0 {
841 self.iter_ops_above_p(
842 p,
843 args,
844 |p, node, mut args| {
845 if args.last_p.is_none() {
846 args.last_p = Some(p);
847 }
848 node.get_op_ref()
849 .get_vars()
850 .iter()
851 .cloned()
852 .enumerate()
853 .for_each(|(relv, v)| {
854 let subvar = args.var_to_subvar(v);
855 if let Some(subvar) = subvar {
857 if args.last_vars[subvar].is_none() {
858 debug_assert_eq!(args.last_rels[subvar], None);
859 args.last_vars[subvar] = Some(p);
860 args.last_rels[subvar] = Some(relv);
861 args.unfilled -= 1;
862 }
863 }
864 });
865 let cont = args.unfilled > 0;
866 (args, cont)
867 },
868 |node, mut args| {
869 node.get_op_ref()
870 .get_vars()
871 .iter()
872 .zip(node.previous_for_vars.iter())
873 .filter_map(|(v, prel)| prel.as_ref().map(|prel| (*v, prel)))
874 .for_each(|(v, prel)| {
875 let subvar = args.var_to_subvar(v);
876 if let Some(subvar) = subvar {
878 if args.last_vars[subvar].is_none() {
879 debug_assert_eq!(args.last_rels[subvar], None);
880 args.unfilled -= 1;
881 args.last_vars[subvar] = Some(prel.p);
882 args.last_rels[subvar] = Some(prel.relv);
883 }
884 }
885 });
886 args.last_p = node.previous_p;
887 let cont = args.unfilled > 0;
888 (args, cont)
889 },
890 )
891 } else {
892 args
893 }
894 }
895
896 fn fill_args_at_p_with_hint<It>(
897 &self,
898 p: usize,
899 args: &mut Self::Args,
900 vars: &[usize],
901 hint: It,
902 ) where
903 It: IntoIterator<Item = Option<usize>>,
904 {
905 let psel = p;
906 let iter_and_set = |mut pcheck: usize,
907 var: usize,
908 mut relv: usize,
909 subvar: usize,
910 args: &mut Self::Args| {
911 loop {
912 debug_assert!(pcheck < p);
913 let node = self
914 .get_node_ref(pcheck)
915 .expect("Gave a hint without an op");
916 let next = self
917 .get_next_p_for_rel_var(relv, node)
918 .unwrap_or_else(|| self.var_ends[var].unwrap().0);
919 if p_crosses(pcheck, next.p, psel) {
921 if pcheck < psel {
923 args.last_vars[subvar] = Some(pcheck);
924 args.last_rels[subvar] = Some(relv);
925 }
926 break;
927 } else {
928 pcheck = next.p;
929 relv = next.relv;
930 }
931 }
932 };
933
934 let set_using = |phint: usize, relv: usize, subvar: usize, args: &mut Self::Args| {
935 debug_assert_eq!(phint, p);
936 let node = self.get_node_ref(phint).expect("Gave a hint without an op");
937 let prel = node.previous_for_vars[relv];
938 args.last_vars[subvar] = prel.map(|prel| prel.p);
939 args.last_rels[subvar] = prel.map(|prel| prel.relv);
940 };
941
942 hint.into_iter()
944 .zip(vars.iter().cloned())
945 .enumerate()
946 .for_each(|(subvar, (phint, var))| {
947 debug_assert!(
948 phint
949 .map(|phint| self.get_node_ref(phint).is_some())
950 .unwrap_or(true),
951 "Hints must be to ops."
952 );
953 let var_start: Option<PRel> = self.var_ends[var].map(|(prel, _)| prel);
954 let can_use_hint_iter = phint.map(|phint| phint < p).unwrap_or(false);
955 let can_use_start_iter = var_start.map(|prel| prel.p < p).unwrap_or(false);
956 let use_exact: Option<PRel> = match (phint, var_start) {
957 (Some(phint), _) if phint == p => {
958 let node = self.get_node_ref(p).expect("Gave a hint without an op");
959 let relv = node
960 .get_op_ref()
961 .index_of_var(var)
962 .expect("Gave a hint with an op with the wrong variable.");
963 Some(PRel { p: phint, relv })
964 }
965 (_, Some(prel)) if prel.p == p => Some(prel),
966 _ => None,
967 };
968
969 if let Some(use_exact) = use_exact {
970 set_using(use_exact.p, use_exact.relv, subvar, args)
971 } else {
972 match (phint, var_start) {
974 (None, None) => {}
976
977 (None, Some(prel)) => {
979 if can_use_start_iter {
980 iter_and_set(prel.p, var, prel.relv, subvar, args)
981 }
982 }
983
984 (Some(phint), Some(prel)) => {
986 let node = self.get_node_ref(phint).expect("Gave a hint without an op");
987 let relv = node
988 .get_op_ref()
989 .index_of_var(var)
990 .expect("Gave a hint with an op with the wrong variable.");
991 match (can_use_hint_iter, can_use_start_iter) {
992 (false, false) => {}
993 (true, false) => iter_and_set(phint, var, relv, subvar, args),
994 (false, true) => iter_and_set(prel.p, var, prel.relv, subvar, args),
995 (true, true) => {
996 let prel = if phint < prel.p {
997 PRel { p: phint, relv }
998 } else {
999 prel
1000 };
1001 iter_and_set(prel.p, var, prel.relv, subvar, args)
1002 }
1003 }
1004 }
1005
1006 (Some(_), None) => {
1008 panic!("Gave a hint for a variable with no ops!")
1009 }
1010 }
1011 }
1012 });
1013
1014 args.last_p = (0..psel).rev().find(|p| self.get_node_ref(*p).is_some());
1016 }
1017
1018 fn return_args(&mut self, args: Self::Args) {
1019 self.return_instance(args.last_vars);
1020 self.return_instance(args.last_rels);
1021 if let Some((vars_to_subvars, subvars_to_vars)) = args.subvar_mapping {
1022 self.return_instance(vars_to_subvars);
1023 self.return_instance(subvars_to_vars);
1024 }
1025 }
1026
1027 fn get_propagated_substate_with_hint<It>(
1028 &self,
1029 p: usize,
1030 substate: &mut [bool],
1031 state: &[bool],
1032 vars: &[usize],
1033 hint: It,
1034 ) where
1035 It: IntoIterator<Item = Option<usize>>,
1036 {
1037 let psel = p;
1038 let iter_and_set = |mut pcheck: usize,
1039 var: usize,
1040 mut relv: usize,
1041 subvar: usize,
1042 substate: &mut [bool]| {
1043 loop {
1044 debug_assert!(pcheck < p);
1045 let node = self
1046 .get_node_ref(pcheck)
1047 .expect("Gave a hint without an op");
1048 let next = self
1049 .get_next_p_for_rel_var(relv, node)
1050 .unwrap_or_else(|| self.var_ends[var].unwrap().0);
1051 if p_crosses(pcheck, next.p, psel) {
1053 if pcheck < next.p {
1055 substate[subvar] = node.get_op_ref().get_outputs()[relv];
1056 }
1057 break;
1058 } else {
1059 pcheck = next.p;
1060 relv = next.relv;
1061 }
1062 }
1063 };
1064
1065 let set_using = |phint: usize, relv: usize, subvar: usize, substate: &mut [bool]| {
1066 debug_assert_eq!(phint, p);
1067 let node = self.get_node_ref(phint).expect("Gave a hint without an op");
1068 substate[subvar] = node.get_op_ref().get_inputs()[relv];
1069 };
1070
1071 hint.into_iter()
1073 .zip(vars.iter().cloned())
1074 .enumerate()
1075 .for_each(|(subvar, it)| {
1076 let (phint, var): (Option<usize>, usize) = it;
1078 debug_assert!(
1079 phint
1080 .map(|phint| self.get_node_ref(phint).is_some())
1081 .unwrap_or(true),
1082 "Hints must be to ops."
1083 );
1084 debug_assert!(
1085 phint
1086 .map(|phint| self
1087 .get_node_ref(phint)
1088 .unwrap()
1089 .get_op_ref()
1090 .index_of_var(var)
1091 .is_some())
1092 .unwrap_or(true),
1093 "Hints must point to ops with relevant variable."
1094 );
1095 substate[subvar] = state[var];
1096 let var_start: Option<PRel> = self.var_ends[var].map(|(prel, _)| prel);
1097 let phint = phint.and_then(|mut phint: usize| {
1098 let mut node = self.get_node_ref(phint).expect("Hints must be to ops");
1099 let mut relv = node
1100 .get_op_ref()
1101 .index_of_var(var)
1102 .expect("Hints must point to ops with relevant variables");
1103 while phint >= p {
1104 let prev = self.get_previous_p_for_rel_var(relv, node)?;
1105 phint = prev.p;
1106 relv = prev.relv;
1107 node = self.get_node_ref(phint).expect("Hints must be to ops");
1108 }
1109 Some(phint)
1110 });
1111 let can_use_hint_iter = phint.map(|phint| phint < p).unwrap_or(false);
1112 let can_use_start_iter = var_start.map(|prel| prel.p < p).unwrap_or(false);
1113 let use_exact = match (phint, var_start) {
1114 (Some(phint), _) if phint == p => {
1115 let node = self.get_node_ref(p).expect("Gave a hint without an op");
1116 let relv = node
1117 .get_op_ref()
1118 .index_of_var(var)
1119 .expect("Gave a hint with an op with the wrong variable.");
1120 Some(PRel { p: phint, relv })
1121 }
1122 (_, Some(prel)) if prel.p == p => Some(prel),
1123 _ => None,
1124 };
1125
1126 if let Some(use_exact) = use_exact {
1127 set_using(use_exact.p, use_exact.relv, subvar, substate)
1128 } else {
1129 match (phint, var_start) {
1131 (None, None) => {}
1133
1134 (None, Some(prel)) => {
1136 if can_use_start_iter {
1137 iter_and_set(prel.p, var, prel.relv, subvar, substate)
1138 }
1139 }
1140
1141 (Some(phint), Some(prel)) => {
1143 let node = self.get_node_ref(phint).expect("Gave a hint without an op");
1144 let relv = node
1145 .get_op_ref()
1146 .index_of_var(var)
1147 .expect("Gave a hint with an op with the wrong variable.");
1148 match (can_use_hint_iter, can_use_start_iter) {
1149 (false, false) => {}
1150 (true, false) => iter_and_set(phint, var, relv, subvar, substate),
1151 (false, true) => {
1152 iter_and_set(prel.p, var, prel.relv, subvar, substate)
1153 }
1154 (true, true) => {
1155 let prel = if phint < prel.p {
1156 PRel { p: phint, relv }
1157 } else {
1158 prel
1159 };
1160 iter_and_set(prel.p, var, prel.relv, subvar, substate)
1161 }
1162 }
1163 }
1164
1165 (Some(_), None) => {
1167 panic!("Gave a hint for a variable with no ops!")
1168 }
1169 }
1170 }
1171 });
1172 }
1173}
1174
1175fn p_crosses(pstart: usize, pend: usize, psel: usize) -> bool {
1176 match (pstart, pend) {
1177 (pstart, pend) if pstart < pend => (pstart < psel) && (psel <= pend),
1178 (pstart, pend) if pstart > pend => !((pend < psel) && (psel <= pstart)),
1179 _ => true,
1181 }
1182}
1183
1184impl<O: Op + Clone, ALLOC: FastOpAllocator> DiagonalUpdater for FastOpsTemplate<O, ALLOC> {
1185 fn mutate_ps<F, T>(&mut self, pstart: usize, pend: usize, t: T, f: F) -> T
1186 where
1187 F: Fn(&Self, Option<&Self::Op>, T) -> (Option<Option<Self::Op>>, T),
1188 {
1189 self.mutate_subsection(pstart, pend, t, f, None)
1190 }
1191
1192 fn mutate_ops<F, T>(&mut self, pstart: usize, pend: usize, t: T, f: F) -> T
1193 where
1194 F: Fn(&Self, &Self::Op, usize, T) -> (Option<Option<Self::Op>>, T),
1195 {
1196 self.mutate_subsection_ops(pstart, pend, t, f, None)
1197 }
1198
1199 fn try_iterate_ps<F, T, V>(&self, pstart: usize, pend: usize, t: T, f: F) -> Result<T, V>
1200 where
1201 F: Fn(&Self, Option<&Self::Op>, T) -> Result<T, V>,
1202 {
1203 self.ops[min(pstart, self.ops.len())..min(pend, self.ops.len())]
1204 .iter()
1205 .try_fold(t, |t, op| f(self, op.as_ref().map(|op| op.get_op_ref()), t))
1206 }
1207
1208 fn try_iterate_ops<F, T, V>(&self, pstart: usize, pend: usize, mut t: T, f: F) -> Result<T, V>
1209 where
1210 F: Fn(&Self, &Self::Op, usize, T) -> Result<T, V>,
1211 {
1212 let mut p = self.p_ends.and_then(|(start, pend)| {
1214 if pstart <= start {
1215 Some(start)
1216 } else if start > pend {
1217 None
1218 } else {
1219 self.ops[pstart..]
1221 .iter()
1222 .enumerate()
1223 .find(|(_, op)| op.is_some())
1224 .map(|(x, _)| x + pstart)
1225 }
1226 });
1227 while let Some(node_p) = p {
1228 if node_p > pend {
1229 break;
1230 } else {
1231 let node = self.ops[node_p].as_ref().unwrap();
1232 t = f(self, node.get_op_ref(), node_p, t)?;
1233 p = node.next_p;
1234 }
1235 }
1236 Ok(t)
1237 }
1238}
1239
1240impl<O: Op + Clone, ALLOC: FastOpAllocator> HeatBathDiagonalUpdater for FastOpsTemplate<O, ALLOC> {}
1241
1242impl<O: Op + Clone, ALLOC: FastOpAllocator> OpContainerConstructor for FastOpsTemplate<O, ALLOC> {
1243 fn new(nvars: usize) -> Self {
1244 Self::new_from_nvars(nvars)
1245 }
1246
1247 fn new_with_bonds(nvars: usize, nbonds: usize) -> Self {
1248 Self::new_from_nvars_and_nbonds(nvars, Some(nbonds))
1249 }
1250}
1251
1252impl<O: Op + Clone, ALLOC: FastOpAllocator> OpContainer for FastOpsTemplate<O, ALLOC> {
1253 type Op = O;
1254
1255 fn get_cutoff(&self) -> usize {
1256 self.ops.len()
1257 }
1258
1259 fn set_cutoff(&mut self, cutoff: usize) {
1260 if cutoff > self.ops.len() {
1261 self.ops.resize(cutoff, None)
1262 }
1263 }
1264
1265 fn get_n(&self) -> usize {
1266 self.n
1267 }
1268
1269 fn get_nvars(&self) -> usize {
1270 self.var_ends.len()
1271 }
1272
1273 fn get_pth(&self, p: usize) -> Option<&Self::Op> {
1274 if p < self.ops.len() {
1275 self.ops[p].as_ref().map(|opnode| &opnode.op)
1276 } else {
1277 None
1278 }
1279 }
1280
1281 fn get_count(&self, bond: usize) -> usize {
1282 self.bond_counters
1283 .as_ref()
1284 .map(|bc| bc.get(bond).copied().unwrap_or(0))
1285 .unwrap_or_else(|| {
1286 self.iterate_ops(0, self.get_cutoff(), 0, |_, op, _, count| {
1287 if op.get_bond() == bond {
1288 count + 1
1289 } else {
1290 count
1291 }
1292 })
1293 })
1294 }
1295
1296 fn itime_fold<F, T>(&self, state: &mut [bool], fold_fn: F, init: T) -> T
1297 where
1298 F: Fn(T, &[bool]) -> T,
1299 {
1300 let (t, _) = (0..self.get_cutoff()).fold((init, state), |(acc, state), p| {
1301 let t = fold_fn(acc, state);
1302 if let Some(op) = self.get_pth(p) {
1303 op.get_vars()
1304 .iter()
1305 .cloned()
1306 .enumerate()
1307 .for_each(|(relv, v)| {
1308 debug_assert_eq!(op.get_inputs()[relv], state[v]);
1309 state[v] = op.get_outputs()[relv];
1310 });
1311 }
1312 (t, state)
1313 });
1314 t
1315 }
1316}
1317
1318impl<O: Op + Clone, ALLOC: FastOpAllocator> LoopUpdater for FastOpsTemplate<O, ALLOC> {
1319 type Node = FastOpNodeTemplate<O>;
1320
1321 fn get_node_ref(&self, p: usize) -> Option<&Self::Node> {
1322 self.ops[p].as_ref()
1323 }
1324
1325 fn get_node_mut(&mut self, p: usize) -> Option<&mut Self::Node> {
1326 self.ops[p].as_mut()
1327 }
1328
1329 fn get_first_p(&self) -> Option<usize> {
1330 self.p_ends.map(|(p, _)| p)
1331 }
1332
1333 fn get_last_p(&self) -> Option<usize> {
1334 self.p_ends.map(|(_, p)| p)
1335 }
1336
1337 fn get_first_p_for_var(&self, var: usize) -> Option<PRel> {
1338 self.var_ends[var].map(|(start, _)| start)
1339 }
1340
1341 fn get_last_p_for_var(&self, var: usize) -> Option<PRel> {
1342 self.var_ends[var].map(|(_, end)| end)
1343 }
1344
1345 fn get_previous_p(&self, node: &Self::Node) -> Option<usize> {
1346 node.previous_p
1347 }
1348
1349 fn get_next_p(&self, node: &Self::Node) -> Option<usize> {
1350 node.next_p
1351 }
1352
1353 fn get_previous_p_for_rel_var(&self, relvar: usize, node: &Self::Node) -> Option<PRel> {
1354 node.previous_for_vars[relvar]
1355 }
1356
1357 fn get_next_p_for_rel_var(&self, relvar: usize, node: &Self::Node) -> Option<PRel> {
1358 node.next_for_vars[relvar]
1359 }
1360
1361 fn get_nth_p(&self, n: usize) -> usize {
1362 let n = n % self.n;
1363 let init = self.p_ends.map(|(head, _)| head).unwrap();
1364 (0..n).fold(init, |p, _| self.ops[p].as_ref().unwrap().next_p.unwrap())
1365 }
1366}
1367
1368impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<Vec<bool>> for FastOpsTemplate<O, ALLOC> {
1369 fn get_instance(&mut self) -> Vec<bool> {
1370 self.alloc.get_instance()
1371 }
1372
1373 fn return_instance(&mut self, t: Vec<bool>) {
1374 self.alloc.return_instance(t)
1375 }
1376}
1377
1378impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<Vec<usize>> for FastOpsTemplate<O, ALLOC> {
1379 fn get_instance(&mut self) -> Vec<usize> {
1380 self.alloc.get_instance()
1381 }
1382
1383 fn return_instance(&mut self, t: Vec<usize>) {
1384 self.alloc.return_instance(t)
1385 }
1386}
1387
1388impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<Vec<Option<usize>>>
1389 for FastOpsTemplate<O, ALLOC>
1390{
1391 fn get_instance(&mut self) -> Vec<Option<usize>> {
1392 self.alloc.get_instance()
1393 }
1394
1395 fn return_instance(&mut self, t: Vec<Option<usize>>) {
1396 self.alloc.return_instance(t)
1397 }
1398}
1399
1400impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<Vec<OpSide>> for FastOpsTemplate<O, ALLOC> {
1401 fn get_instance(&mut self) -> Vec<OpSide> {
1402 self.alloc.get_instance()
1403 }
1404
1405 fn return_instance(&mut self, t: Vec<OpSide>) {
1406 self.alloc.return_instance(t)
1407 }
1408}
1409impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<Vec<Leg>> for FastOpsTemplate<O, ALLOC> {
1410 fn get_instance(&mut self) -> Vec<Leg> {
1411 self.alloc.get_instance()
1412 }
1413
1414 fn return_instance(&mut self, t: Vec<Leg>) {
1415 self.alloc.return_instance(t)
1416 }
1417}
1418
1419impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<Vec<f64>> for FastOpsTemplate<O, ALLOC> {
1420 fn get_instance(&mut self) -> Vec<f64> {
1421 self.alloc.get_instance()
1422 }
1423
1424 fn return_instance(&mut self, t: Vec<f64>) {
1425 self.alloc.return_instance(t)
1426 }
1427}
1428impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<BondContainer<usize>>
1429 for FastOpsTemplate<O, ALLOC>
1430{
1431 fn get_instance(&mut self) -> BondContainer<usize> {
1432 self.alloc.get_instance()
1433 }
1434
1435 fn return_instance(&mut self, t: BondContainer<usize>) {
1436 self.alloc.return_instance(t)
1437 }
1438}
1439impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<BondContainer<VarPos>>
1440 for FastOpsTemplate<O, ALLOC>
1441{
1442 fn get_instance(&mut self) -> BondContainer<VarPos> {
1443 self.alloc.get_instance()
1444 }
1445
1446 fn return_instance(&mut self, t: BondContainer<VarPos>) {
1447 self.alloc.return_instance(t)
1448 }
1449}
1450
1451impl<O: Op + Clone, ALLOC: FastOpAllocator> Factory<BinaryHeap<Reverse<usize>>>
1452 for FastOpsTemplate<O, ALLOC>
1453{
1454 fn get_instance(&mut self) -> BinaryHeap<Reverse<usize>> {
1455 self.alloc.get_instance()
1456 }
1457
1458 fn return_instance(&mut self, t: BinaryHeap<Reverse<usize>>) {
1459 self.alloc.return_instance(t)
1460 }
1461}
1462
1463impl<O: Op + Clone, ALLOC: FastOpAllocator> ClusterUpdater for FastOpsTemplate<O, ALLOC> {}
1464
1465impl<O: Op + Clone, ALLOC: FastOpAllocator> RvbUpdater for FastOpsTemplate<O, ALLOC> {
1466 fn constant_ops_on_var(&self, var: usize, ps: &mut Vec<usize>) {
1467 let mut p_and_rel = self.get_first_p_for_var(var);
1468 while let Some(PRel {
1469 p: node_p,
1470 relv: node_relv,
1471 }) = p_and_rel
1472 {
1473 let node = self.get_node_ref(node_p).unwrap();
1474 debug_assert_eq!(node.get_op_ref().get_vars()[node_relv], var);
1475 if node.get_op_ref().is_constant() {
1476 ps.push(node_p);
1477 }
1478 p_and_rel = self.get_next_p_for_rel_var(node_relv, node);
1479 }
1480 }
1481
1482 fn spin_flips_on_var(&self, var: usize, ps: &mut Vec<usize>) {
1483 let mut p_and_rel = self.get_first_p_for_var(var);
1484 while let Some(PRel {
1485 p: node_p,
1486 relv: node_relv,
1487 }) = p_and_rel
1488 {
1489 let node = self.get_node_ref(node_p).unwrap();
1490 let op = node.get_op_ref();
1491 debug_assert_eq!(op.get_vars()[node_relv], var);
1492 if op.get_inputs()[node_relv] != op.get_outputs()[node_relv] {
1493 ps.push(node_p)
1494 };
1495 p_and_rel = self.get_next_p_for_rel_var(node_relv, node);
1496 }
1497 }
1498}
1499
1500impl<O: Op, ALLOC: FastOpAllocator> IsingManager for FastOpsTemplate<O, ALLOC> {}
1501impl<O: Op, ALLOC: FastOpAllocator> QmcManager for FastOpsTemplate<O, ALLOC> {}