1use std::fmt;
23use std::iter::DoubleEndedIterator;
24use std::ops;
25use std::slice;
26use std::u32;
27
28use intmap::{AsIndex, IntMap, IntSet};
29use alloc::{self, RegionAllocator};
30
31#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
32pub struct Var(u32);
33
34impl fmt::Debug for Var {
35 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
36 if self.0 == !0 {
37 write!(f, "UNDEF")
38 } else {
39 write!(f, "Var({})", self.0)
40 }
41 }
42}
43
44impl Var {
45 pub const UNDEF: Var = Var(!0);
46 pub(crate) fn from_idx(idx: u32) -> Self {
47 debug_assert!(idx < u32::MAX / 2, "Var::from_idx: index too large");
48 Var(idx)
49 }
50 pub fn idx(&self) -> u32 {
51 self.0
52 }
53}
54
55impl AsIndex for Var {
56 fn as_index(self) -> usize {
57 self.0 as usize
58 }
59 fn from_index(index: usize) -> Self {
60 Var(index as u32)
61 }
62}
63
64pub type VMap<V> = IntMap<Var, V>;
65
66#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
67pub struct Lit(u32);
68
69impl Lit {
70 pub const UNDEF: Lit = Lit(!1);
71 pub const ERROR: Lit = Lit(!0);
72 pub(crate) fn new(var: Var, sign: bool) -> Self {
73 Lit(var.0 * 2 + sign as u32)
74 }
75 pub(crate) fn from_idx(idx: u32) -> Self {
76 Lit(idx)
77 }
78 pub fn idx(&self) -> u32 {
79 self.0
80 }
81 pub fn sign(&self) -> bool {
82 (self.0 & 1) != 0
83 }
84 pub fn var(&self) -> Var {
85 Var(self.0 >> 1)
86 }
87}
88
89impl fmt::Debug for Lit {
90 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
91 if self.0 == !0 {
92 write!(f, "ERROR")
93 } else if self.0 == !1 {
94 write!(f, "UNDEF")
95 } else {
96 write!(f, "Lit({}, {})", self.0 / 2, (self.0 & 1) != 0)
97 }
98 }
99}
100
101impl ops::Not for Lit {
102 type Output = Self;
103 fn not(self) -> Self {
104 Lit(self.0 ^ 1)
105 }
106}
107impl ops::BitXor<bool> for Lit {
108 type Output = Self;
109 fn bitxor(self, rhs: bool) -> Self {
110 Lit(self.0 ^ rhs as u32)
111 }
112}
113impl ops::BitXorAssign<bool> for Lit {
114 fn bitxor_assign(&mut self, rhs: bool) {
115 *self = *self ^ rhs;
116 }
117}
118
119impl AsIndex for Lit {
120 fn as_index(self) -> usize {
121 self.0 as usize
122 }
123 fn from_index(index: usize) -> Self {
124 Lit(index as u32)
125 }
126}
127
128pub type LMap<V> = IntMap<Lit, V>;
129pub type LSet = IntSet<Lit>;
130
131#[allow(non_camel_case_types)]
132#[derive(Clone, Copy)]
133pub struct lbool(u8);
134
135impl fmt::Debug for lbool {
136 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
137 if self.0 == 0 {
138 write!(f, "TRUE")
139 } else if self.0 == 1 {
140 write!(f, "FALSE")
141 } else if self.0 <= 3 {
142 write!(f, "UNDEF")
143 } else {
144 write!(f, "lbool({})", self.0)
146 }
147 }
148}
149impl Default for lbool {
150 fn default() -> Self {
151 lbool(0)
152 }
153}
154
155impl lbool {
156 pub const TRUE: lbool = lbool(0);
157 pub const FALSE: lbool = lbool(1);
158 pub const UNDEF: lbool = lbool(2);
159 pub fn from_u8(v: u8) -> Self {
160 debug_assert!(v == (v & 3), "lbool::from_u8: invalid value");
161 lbool(v)
162 }
163 pub fn new(v: bool) -> Self {
164 lbool((!v) as u8)
165 }
166 pub fn to_u8(&self) -> u8 {
167 self.0
168 }
169}
170
171impl PartialEq for lbool {
172 fn eq(&self, rhs: &Self) -> bool {
173 self.0 == rhs.0 || (self.0 & rhs.0 & 2) != 0
174 }
175}
176
177impl Eq for lbool {}
178
179impl ops::BitXor<bool> for lbool {
180 type Output = lbool;
181 fn bitxor(self, rhs: bool) -> Self {
182 lbool(self.0 ^ rhs as u8)
183 }
184}
185impl ops::BitXorAssign<bool> for lbool {
186 fn bitxor_assign(&mut self, rhs: bool) {
187 *self = *self ^ rhs;
188 }
189}
190
191impl ops::BitAnd for lbool {
192 type Output = Self;
193 fn bitand(self, rhs: Self) -> Self {
194 let sel = (self.0 << 1) | (rhs.0 << 3);
202 let v = (0xF7F755F4_u32 >> sel) & 3;
203 lbool(v as u8)
204 }
205}
206impl ops::BitAndAssign for lbool {
207 fn bitand_assign(&mut self, rhs: Self) {
208 *self = *self & rhs;
209 }
210}
211
212impl ops::BitOr for lbool {
213 type Output = Self;
214 fn bitor(self, rhs: Self) -> Self {
215 let sel = (self.0 << 1) | (rhs.0 << 3);
223 let v = (0xFCFCF400_u32 >> sel) & 3;
224 lbool(v as u8)
225 }
226}
227impl ops::BitOrAssign for lbool {
228 fn bitor_assign(&mut self, rhs: Self) {
229 *self = *self | rhs;
230 }
231}
232
233#[derive(Debug, Clone, Copy)]
234pub struct ClauseRef<'a> {
235 header: ClauseHeader,
236 data: &'a [ClauseData],
237 extra: Option<ClauseData>,
238}
239#[derive(Debug)]
240pub struct ClauseMut<'a> {
241 header: &'a mut ClauseHeader,
242 data: &'a mut [ClauseData],
243 extra: Option<&'a mut ClauseData>,
244}
245
246impl<'a, 'b> PartialEq<ClauseRef<'b>> for ClauseRef<'a> {
247 fn eq(&self, rhs: &ClauseRef<'b>) -> bool {
248 self.data.as_ptr() == rhs.data.as_ptr()
249 }
250}
251impl<'a> Eq for ClauseRef<'a> {}
252
253impl<'a> ClauseRef<'a> {
254 pub fn mark(&self) -> u32 {
255 self.header.mark()
256 }
257 pub fn learnt(&self) -> bool {
258 self.header.learnt()
259 }
260 pub fn has_extra(&self) -> bool {
261 self.header.has_extra()
262 }
263 pub fn reloced(&self) -> bool {
264 self.header.reloced()
265 }
266 pub fn size(&self) -> u32 {
267 self.data.len() as u32
268 }
269 pub fn activity(&self) -> f32 {
270 debug_assert!(self.has_extra());
271 unsafe { self.extra.expect("no extra field").f32 }
272 }
273 pub fn abstraction(&self) -> u32 {
274 debug_assert!(self.has_extra());
275 unsafe { self.extra.expect("no extra field").u32 }
276 }
277 pub fn relocation(&self) -> CRef {
278 debug_assert!(self.reloced());
279 unsafe { self.data[0].cref }
280 }
281 pub fn iter(&self) -> ClauseIter {
282 ClauseIter(self.data.iter())
283 }
284}
285impl<'a> ClauseMut<'a> {
286 pub fn mark(&self) -> u32 {
287 self.header.mark()
288 }
289 pub fn learnt(&self) -> bool {
290 self.header.learnt()
291 }
292 pub fn has_extra(&self) -> bool {
293 self.header.has_extra()
294 }
295 pub fn reloced(&self) -> bool {
296 self.header.reloced()
297 }
298 pub fn size(&self) -> u32 {
299 self.data.len() as u32
300 }
301 pub fn set_mark(&mut self, mark: u32) {
302 debug_assert!(mark < 4);
303 self.header.set_mark(mark);
304 }
305 pub fn set_learnt(&mut self, learnt: bool) {
306 self.header.set_learnt(learnt);
307 }
308 pub fn set_has_extra(&mut self, has_extra: bool) {
309 self.header.set_has_extra(has_extra);
310 }
311 pub fn set_reloced(&mut self, reloced: bool) {
312 self.header.set_reloced(reloced);
313 }
314 pub fn activity(&self) -> f32 {
315 debug_assert!(self.has_extra());
316 unsafe { self.extra.as_ref().expect("no extra field").f32 }
317 }
318 pub fn set_activity(&mut self, activity: f32) {
319 debug_assert!(self.has_extra());
320 self.extra.as_mut().expect("no extra field").f32 = activity;
321 }
322 pub fn abstraction(&self) -> u32 {
323 debug_assert!(self.has_extra());
324 unsafe { self.extra.as_ref().expect("no extra field").u32 }
325 }
326 pub fn set_abstraction(&mut self, abstraction: u32) {
327 debug_assert!(self.has_extra());
328 self.extra.as_mut().expect("no extra field").u32 = abstraction;
329 }
330 pub fn relocation(&self) -> CRef {
331 debug_assert!(self.reloced());
332 unsafe { self.data[0].cref }
333 }
334 pub fn relocate(mut self, c: CRef) {
335 debug_assert!(!self.reloced());
336 self.set_reloced(true);
337 self.data[0].cref = c;
338 }
339 pub fn iter(&self) -> ClauseIter {
340 ClauseIter(self.data.iter())
341 }
342 pub fn iter_mut(&mut self) -> ClauseIterMut {
343 ClauseIterMut(self.data.iter_mut())
344 }
345 pub fn shrink(self, new_size: u32) {
346 debug_assert!(2 <= new_size);
347 debug_assert!(new_size <= self.size());
348 if new_size < self.size() {
349 self.header.set_size(new_size);
350 if let Some(extra) = self.extra {
351 self.data[new_size as usize] = *extra;
352 }
353 }
354 }
355 pub fn as_clause_ref(&mut self) -> ClauseRef {
356 ClauseRef {
357 header: *self.header,
358 data: self.data,
359 extra: self.extra.as_mut().map(|extra| **extra),
360 }
361 }
362}
363
364impl<'a> ops::Index<u32> for ClauseRef<'a> {
365 type Output = Lit;
366 fn index(&self, index: u32) -> &Self::Output {
367 unsafe { &self.data[index as usize].lit }
368 }
369}
370impl<'a> ops::Index<u32> for ClauseMut<'a> {
371 type Output = Lit;
372 fn index(&self, index: u32) -> &Self::Output {
373 unsafe { &self.data[index as usize].lit }
374 }
375}
376impl<'a> ops::IndexMut<u32> for ClauseMut<'a> {
377 fn index_mut(&mut self, index: u32) -> &mut Self::Output {
378 unsafe { &mut self.data[index as usize].lit }
379 }
380}
381
382#[derive(Debug, Clone)]
383pub struct ClauseIter<'a>(slice::Iter<'a, ClauseData>);
384
385impl<'a> Iterator for ClauseIter<'a> {
386 type Item = &'a Lit;
387 fn next(&mut self) -> Option<Self::Item> {
388 self.0.next().map(|lit| unsafe { &lit.lit })
389 }
390 fn size_hint(&self) -> (usize, Option<usize>) {
391 self.0.size_hint()
392 }
393}
394impl<'a> DoubleEndedIterator for ClauseIter<'a> {
395 fn next_back(&mut self) -> Option<Self::Item> {
396 self.0.next_back().map(|lit| unsafe { &lit.lit })
397 }
398}
399
400#[derive(Debug)]
401pub struct ClauseIterMut<'a>(slice::IterMut<'a, ClauseData>);
402
403impl<'a> Iterator for ClauseIterMut<'a> {
404 type Item = &'a mut Lit;
405 fn next(&mut self) -> Option<Self::Item> {
406 self.0.next().map(|lit| unsafe { &mut lit.lit })
407 }
408 fn size_hint(&self) -> (usize, Option<usize>) {
409 self.0.size_hint()
410 }
411}
412impl<'a> DoubleEndedIterator for ClauseIterMut<'a> {
413 fn next_back(&mut self) -> Option<Self::Item> {
414 self.0.next_back().map(|lit| unsafe { &mut lit.lit })
415 }
416}
417
418#[derive(Debug)]
419pub struct ClauseAllocator {
420 ra: RegionAllocator<ClauseData>,
421 extra_clause_field: bool,
422}
423#[derive(Clone, Copy)]
424pub union ClauseData {
425 u32: u32,
426 f32: f32,
427 cref: CRef,
428 header: ClauseHeader,
429 lit: Lit,
430}
431
432impl Default for ClauseData {
433 fn default() -> Self {
434 ClauseData { u32: 0 }
435 }
436}
437impl fmt::Debug for ClauseData {
438 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
439 write!(f, "ClauseData({})", unsafe { self.u32 })
440 }
441}
442
443#[derive(Clone, Copy)]
449pub struct ClauseHeader(u32);
450
451impl fmt::Debug for ClauseHeader {
452 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
453 f.debug_struct("ClauseHeader")
454 .field("mark", &self.mark())
455 .field("learnt", &self.learnt())
456 .field("has_extra", &self.has_extra())
457 .field("reloced", &self.reloced())
458 .field("size", &self.size())
459 .finish()
460 }
461}
462
463impl ClauseHeader {
464 pub fn new(mark: u32, learnt: bool, has_extra: bool, reloced: bool, size: u32) -> Self {
465 debug_assert!(mark < 4);
466 debug_assert!(size < (1 << 27));
467 ClauseHeader(
468 (mark << 30) | ((learnt as u32) << 29) | ((has_extra as u32) << 28)
469 | ((reloced as u32) << 27) | size,
470 )
471 }
472 pub fn mark(&self) -> u32 {
473 self.0 >> 30
474 }
475 pub fn learnt(&self) -> bool {
476 (self.0 & (1 << 29)) != 0
477 }
478 pub fn has_extra(&self) -> bool {
479 (self.0 & (1 << 28)) != 0
480 }
481 pub fn reloced(&self) -> bool {
482 (self.0 & (1 << 27)) != 0
483 }
484 pub fn size(&self) -> u32 {
485 self.0 & ((1 << 27) - 1)
486 }
487 pub fn set_mark(&mut self, mark: u32) {
488 debug_assert!(mark < 4);
489 self.0 = (self.0 & !(3 << 30)) | (mark << 30);
490 }
491 pub fn set_learnt(&mut self, learnt: bool) {
492 self.0 = (self.0 & !(1 << 29)) | ((learnt as u32) << 29);
493 }
494 pub fn set_has_extra(&mut self, has_extra: bool) {
495 self.0 = (self.0 & !(1 << 28)) | ((has_extra as u32) << 28);
496 }
497 pub fn set_reloced(&mut self, reloced: bool) {
498 self.0 = (self.0 & !(1 << 27)) | ((reloced as u32) << 27);
499 }
500 pub fn set_size(&mut self, size: u32) {
501 debug_assert!(size < (1 << 27));
502 self.0 = (self.0 & !((1 << 27) - 1)) | size;
503 }
504}
505
506impl ClauseAllocator {
507 pub const UNIT_SIZE: u32 = 32;
508 pub fn with_start_cap(start_cap: u32) -> Self {
509 Self {
510 ra: RegionAllocator::new(start_cap),
511 extra_clause_field: false,
512 }
513 }
514 pub fn new() -> Self {
515 Self::with_start_cap(1024 * 1024)
516 }
517 pub fn len(&self) -> u32 {
518 self.ra.len()
519 }
520 pub fn wasted(&self) -> u32 {
521 self.ra.wasted()
522 }
523 pub fn alloc_with_learnt(&mut self, clause: &[Lit], learnt: bool) -> CRef {
524 let use_extra = learnt | self.extra_clause_field;
525 let cid = self.ra.alloc(1 + clause.len() as u32 + use_extra as u32);
526 self.ra[cid].header = ClauseHeader::new(0, learnt, use_extra, false, clause.len() as u32);
527 let clause_ptr = cid + 1;
528 for (i, &lit) in clause.iter().enumerate() {
529 self.ra[clause_ptr + i as u32].lit = lit;
530 }
531 if use_extra {
532 if learnt {
533 self.ra[clause_ptr + clause.len() as u32].f32 = 0.0;
534 } else {
535 let mut abstraction: u32 = 0;
536 for &lit in clause {
537 abstraction |= 1 << (lit.var().idx() & 31);
538 }
539 self.ra[clause_ptr + clause.len() as u32].u32 = abstraction;
540 }
541 }
542
543 cid
544 }
545 pub fn alloc(&mut self, clause: &[Lit]) -> CRef {
546 self.alloc_with_learnt(clause, false)
547 }
548 pub fn alloc_copy(&mut self, from: ClauseRef) -> CRef {
549 let use_extra = from.learnt() | self.extra_clause_field;
550 let cid = self.ra.alloc(1 + from.size() + use_extra as u32);
551 self.ra[cid].header = from.header;
552 unsafe { &mut self.ra[cid].header }.set_has_extra(use_extra);
554 for (i, &lit) in from.iter().enumerate() {
555 self.ra[cid + 1 + i as u32].lit = lit;
556 }
557 if use_extra {
558 if from.learnt() {
559 self.ra[cid + from.size()].f32 = unsafe { from.extra.unwrap().f32 };
560 } else {
561 self.ra[cid + from.size()].u32 = unsafe { from.extra.unwrap().u32 };
562 }
563 }
564 cid
565 }
566
567 pub fn free(&mut self, cr: CRef) {
568 let size = {
569 let c = self.get_ref(cr);
570 1 + c.size() + c.has_extra() as u32
571 };
572 self.ra.free(size);
573 }
574
575 pub fn free_amount(&mut self, size: u32) {
576 self.ra.free(size);
577 }
578
579 pub fn reloc(&mut self, cr: &mut CRef, to: &mut ClauseAllocator) {
580 let mut c = self.get_mut(*cr);
581
582 if c.reloced() {
583 *cr = c.relocation();
584 return;
585 }
586
587 *cr = to.alloc_copy(c.as_clause_ref());
588 c.relocate(*cr);
589 }
590
591 pub fn get_ref(&self, cr: CRef) -> ClauseRef {
592 let header = unsafe { self.ra[cr].header };
593 let has_extra = header.has_extra();
594 let size = header.size();
595
596 let data = self.ra.subslice(cr + 1, size);
597 let extra = if has_extra {
598 Some(self.ra[cr + 1 + size])
599 } else {
600 None
601 };
602 ClauseRef {
603 header,
604 data,
605 extra,
606 }
607 }
608 pub fn get_mut(&mut self, cr: CRef) -> ClauseMut {
609 let header = unsafe { self.ra[cr].header };
610 let has_extra = header.has_extra();
611 let size = header.size();
612 let len = 1 + size + has_extra as u32;
613
614 let subslice = self.ra.subslice_mut(cr, len);
615 let (subslice0, subslice) = subslice.split_at_mut(1);
616 let (subslice1, subslice2) = subslice.split_at_mut(size as usize);
617 ClauseMut {
618 header: unsafe { &mut subslice0[0].header },
619 data: subslice1,
620 extra: subslice2.first_mut(),
621 }
622 }
623}
624
625pub type CRef = alloc::Ref<ClauseData>;
626
627pub trait DeletePred<V> {
628 fn deleted(&self, &V) -> bool;
629}
630
631#[derive(Debug, Clone)]
632pub struct OccListsData<K: AsIndex, V> {
633 occs: IntMap<K, Vec<V>>,
634 dirty: IntMap<K, bool>,
635 dirties: Vec<K>,
636}
637
638impl<K: AsIndex, V> OccListsData<K, V> {
639 pub fn new() -> Self {
640 Self {
641 occs: IntMap::new(),
642 dirty: IntMap::new(),
643 dirties: Vec::new(),
644 }
645 }
646 pub fn init(&mut self, idx: K) {
647 self.occs.reserve_default(idx);
648 self.occs[idx].clear();
649 self.dirty.reserve(idx, false);
650 }
651
652 pub fn promote<P: DeletePred<V>>(&mut self, pred: P) -> OccLists<K, V, P> {
653 OccLists {
654 data: self,
655 pred: pred,
656 }
657 }
658
659 pub fn lookup_mut_pred<P: DeletePred<V>>(&mut self, idx: K, pred: &P) -> &mut Vec<V> {
660 if self.dirty[idx] {
661 self.clean_pred(idx, pred);
662 }
663 &mut self.occs[idx]
664 }
665
666 pub fn clean_all_pred<P: DeletePred<V>>(&mut self, pred: &P) {
667 for &x in &self.dirties {
668 if self.dirty[x] {
670 self.occs[x].retain(|x| !pred.deleted(x));
672 self.dirty[x] = false;
673 }
674 }
675 self.dirties.clear();
676 }
677
678 pub fn clean_pred<P: DeletePred<V>>(&mut self, idx: K, pred: &P) {
679 self.occs[idx].retain(|x| !pred.deleted(x));
680 self.dirty[idx] = false;
681 }
682
683 pub fn smudge(&mut self, idx: K) {
684 if !self.dirty[idx] {
685 self.dirty[idx] = true;
686 self.dirties.push(idx);
687 }
688 }
689
690 pub fn clear(&mut self) {
691 self.occs.clear();
692 self.dirty.clear();
693 self.dirties.clear();
694 }
695
696 pub fn free(&mut self) {
697 self.occs.free();
698 self.dirty.free();
699 self.dirties.clear();
700 self.dirties.shrink_to_fit();
701 }
702}
703
704impl<K: AsIndex, V> ops::Index<K> for OccListsData<K, V> {
705 type Output = Vec<V>;
706 fn index(&self, index: K) -> &Self::Output {
707 &self.occs[index]
708 }
709}
710impl<K: AsIndex, V> ops::IndexMut<K> for OccListsData<K, V> {
711 fn index_mut(&mut self, index: K) -> &mut Self::Output {
712 &mut self.occs[index]
713 }
714}
715
716pub struct OccLists<'a, K: AsIndex + 'a, V: 'a, P: DeletePred<V>> {
717 data: &'a mut OccListsData<K, V>,
718 pred: P,
719}
720
721impl<'a, K: AsIndex + 'a, V: 'a, P: DeletePred<V>> OccLists<'a, K, V, P> {
722 pub fn lookup_mut(&mut self, idx: K) -> &mut Vec<V> {
723 self.data.lookup_mut_pred(idx, &self.pred)
724 }
725
726 pub fn clean_all(&mut self) {
727 self.data.clean_all_pred(&self.pred)
728 }
729
730 pub fn clean(&mut self, idx: K) {
731 self.data.clean_pred(idx, &self.pred)
732 }
733}
734
735impl<'a, K: AsIndex + 'a, V: 'a, P: DeletePred<V>> ops::Deref for OccLists<'a, K, V, P> {
736 type Target = OccListsData<K, V>;
737 fn deref(&self) -> &Self::Target {
738 &self.data
739 }
740}
741
742impl<'a, K: AsIndex + 'a, V: 'a, P: DeletePred<V>> ops::DerefMut for OccLists<'a, K, V, P> {
743 fn deref_mut(&mut self) -> &mut Self::Target {
744 &mut self.data
745 }
746}