#![allow(missing_docs)]
#![allow(dead_code)]
#[allow(unused_imports)]
use crate::prelude::*;
use core::cmp::Ordering;
pub type TermId = u32;
pub type TheoryId = u32;
pub type DecisionLevel = u32;
pub type Timestamp = u64;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Literal {
pub term: TermId,
pub polarity: bool,
}
impl Literal {
pub fn positive(term: TermId) -> Self {
Self {
term,
polarity: true,
}
}
pub fn negative(term: TermId) -> Self {
Self {
term,
polarity: false,
}
}
pub fn negate(self) -> Self {
Self {
term: self.term,
polarity: !self.polarity,
}
}
}
#[derive(Debug, Clone)]
pub struct PropagationEvent {
pub literal: Literal,
pub level: DecisionLevel,
pub timestamp: Timestamp,
pub theory: TheoryId,
pub reason: PropagationReason,
}
#[derive(Debug, Clone)]
pub enum PropagationReason {
Decision,
UnitPropagation {
clause: ClauseId,
},
TheoryPropagation {
explanation: Vec<Literal>,
},
EqualityPropagation {
lhs: TermId,
rhs: TermId,
},
}
pub type ClauseId = u32;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct PropagationPriority {
pub level: u32,
pub activity: u32,
pub decision_level: DecisionLevel,
}
impl Ord for PropagationPriority {
fn cmp(&self, other: &Self) -> Ordering {
self.level
.cmp(&other.level)
.then_with(|| self.activity.cmp(&other.activity))
.then_with(|| other.decision_level.cmp(&self.decision_level))
}
}
impl PartialOrd for PropagationPriority {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Debug, Clone)]
struct PendingPropagation {
event: PropagationEvent,
priority: PropagationPriority,
}
impl PartialEq for PendingPropagation {
fn eq(&self, other: &Self) -> bool {
self.priority == other.priority
}
}
impl Eq for PendingPropagation {}
impl Ord for PendingPropagation {
fn cmp(&self, other: &Self) -> Ordering {
self.priority.cmp(&other.priority)
}
}
impl PartialOrd for PendingPropagation {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Debug, Clone)]
pub struct WatchList {
clauses: Vec<ClauseId>,
theories: FxHashSet<TheoryId>,
}
impl WatchList {
fn new() -> Self {
Self {
clauses: Vec::new(),
theories: FxHashSet::default(),
}
}
fn add_clause(&mut self, clause: ClauseId) {
self.clauses.push(clause);
}
fn add_theory(&mut self, theory: TheoryId) {
self.theories.insert(theory);
}
fn watching_clauses(&self) -> &[ClauseId] {
&self.clauses
}
fn watching_theories(&self) -> impl Iterator<Item = TheoryId> + '_ {
self.theories.iter().copied()
}
}
#[derive(Debug, Clone)]
pub struct PropagationTrail {
trail: Vec<PropagationEvent>,
level_boundaries: Vec<usize>,
current_level: DecisionLevel,
}
impl PropagationTrail {
fn new() -> Self {
Self {
trail: Vec::new(),
level_boundaries: vec![0],
current_level: 0,
}
}
fn push_level(&mut self) {
self.current_level += 1;
self.level_boundaries.push(self.trail.len());
}
fn add_event(&mut self, event: PropagationEvent) {
self.trail.push(event);
}
fn backtrack(&mut self, level: DecisionLevel) -> Vec<PropagationEvent> {
if level >= self.current_level {
return Vec::new();
}
let boundary = if (level as usize + 1) < self.level_boundaries.len() {
self.level_boundaries[level as usize + 1]
} else {
self.trail.len()
};
let undone = self.trail.split_off(boundary);
self.level_boundaries.truncate(level as usize + 1);
self.current_level = level;
undone
}
fn current_level(&self) -> DecisionLevel {
self.current_level
}
fn trail(&self) -> &[PropagationEvent] {
&self.trail
}
fn clear(&mut self) {
self.trail.clear();
self.level_boundaries.clear();
self.level_boundaries.push(0);
self.current_level = 0;
}
}
#[derive(Debug, Clone)]
pub struct DependencyGraph {
forward: FxHashMap<TermId, FxHashSet<TermId>>,
backward: FxHashMap<TermId, FxHashSet<TermId>>,
}
impl DependencyGraph {
fn new() -> Self {
Self {
forward: FxHashMap::default(),
backward: FxHashMap::default(),
}
}
fn add_dependency(&mut self, dependency: TermId, dependent: TermId) {
self.forward
.entry(dependency)
.or_default()
.insert(dependent);
self.backward
.entry(dependent)
.or_default()
.insert(dependency);
}
fn get_dependents(&self, term: TermId) -> impl Iterator<Item = TermId> + '_ {
self.forward
.get(&term)
.into_iter()
.flat_map(|set| set.iter().copied())
}
fn get_dependencies(&self, term: TermId) -> impl Iterator<Item = TermId> + '_ {
self.backward
.get(&term)
.into_iter()
.flat_map(|set| set.iter().copied())
}
fn clear(&mut self) {
self.forward.clear();
self.backward.clear();
}
}
#[derive(Debug, Clone)]
pub struct PropagationCache {
cache: FxHashMap<(TermId, TheoryId), (Vec<Literal>, Timestamp)>,
current_timestamp: Timestamp,
}
impl PropagationCache {
fn new() -> Self {
Self {
cache: FxHashMap::default(),
current_timestamp: 0,
}
}
fn tick(&mut self) {
self.current_timestamp += 1;
}
fn get(&self, term: TermId, theory: TheoryId) -> Option<&Vec<Literal>> {
self.cache
.get(&(term, theory))
.map(|(result, _timestamp)| result)
}
fn cache_result(&mut self, term: TermId, theory: TheoryId, result: Vec<Literal>) {
self.cache
.insert((term, theory), (result, self.current_timestamp));
}
fn invalidate(&mut self, term: TermId, theory: TheoryId) {
self.cache.remove(&(term, theory));
}
fn clear(&mut self) {
self.cache.clear();
self.current_timestamp = 0;
}
}
#[derive(Debug, Clone)]
pub struct PropagationConfig {
pub incremental: bool,
pub lazy: bool,
pub priority_based: bool,
pub caching: bool,
pub max_queue_size: usize,
pub batch_size: usize,
pub track_dependencies: bool,
}
impl Default for PropagationConfig {
fn default() -> Self {
Self {
incremental: true,
lazy: true,
priority_based: true,
caching: true,
max_queue_size: 100000,
batch_size: 1000,
track_dependencies: true,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct PropagationStats {
pub propagations: u64,
pub lazy_propagations: u64,
pub cache_hits: u64,
pub cache_misses: u64,
pub incremental_propagations: u64,
pub backtracks: u64,
pub dependency_updates: u64,
}
pub struct OptimizedPropagationEngine {
config: PropagationConfig,
stats: PropagationStats,
queue: BinaryHeap<PendingPropagation>,
watch_lists: FxHashMap<TermId, WatchList>,
trail: PropagationTrail,
dependencies: DependencyGraph,
cache: PropagationCache,
assignment_timestamps: FxHashMap<TermId, Timestamp>,
assignments: FxHashMap<TermId, bool>,
clause_activities: FxHashMap<ClauseId, u32>,
activity_decay: f64,
}
impl OptimizedPropagationEngine {
pub fn new() -> Self {
Self::with_config(PropagationConfig::default())
}
pub fn with_config(config: PropagationConfig) -> Self {
Self {
config,
stats: PropagationStats::default(),
queue: BinaryHeap::new(),
watch_lists: FxHashMap::default(),
trail: PropagationTrail::new(),
dependencies: DependencyGraph::new(),
cache: PropagationCache::new(),
assignment_timestamps: FxHashMap::default(),
assignments: FxHashMap::default(),
clause_activities: FxHashMap::default(),
activity_decay: 0.95,
}
}
pub fn stats(&self) -> &PropagationStats {
&self.stats
}
pub fn decision_level(&self) -> DecisionLevel {
self.trail.current_level()
}
pub fn push_decision_level(&mut self) {
self.trail.push_level();
}
pub fn backtrack(&mut self, level: DecisionLevel) -> Result<(), String> {
if level > self.decision_level() {
return Err("Cannot backtrack to future level".to_string());
}
let undone = self.trail.backtrack(level);
for event in undone {
self.assignments.remove(&event.literal.term);
self.assignment_timestamps.remove(&event.literal.term);
if self.config.caching {
self.invalidate_cache_for_term(event.literal.term);
}
}
self.stats.backtracks += 1;
Ok(())
}
pub fn add_watch(&mut self, term: TermId, clause: ClauseId) {
let watch_list = self.watch_lists.entry(term).or_insert_with(WatchList::new);
watch_list.add_clause(clause);
}
pub fn add_theory_watch(&mut self, term: TermId, theory: TheoryId) {
let watch_list = self.watch_lists.entry(term).or_insert_with(WatchList::new);
watch_list.add_theory(theory);
}
pub fn propagate(
&mut self,
literal: Literal,
theory: TheoryId,
reason: PropagationReason,
priority: u32,
) -> Result<(), String> {
if let Some(&assigned_value) = self.assignments.get(&literal.term) {
if assigned_value != literal.polarity {
return Err("Conflict: literal already assigned with opposite polarity".to_string());
}
return Ok(()); }
let event = PropagationEvent {
literal,
level: self.decision_level(),
timestamp: self.cache.current_timestamp,
theory,
reason,
};
let activity = self.clause_activities.get(&0).copied().unwrap_or(0);
let pending = PendingPropagation {
event,
priority: PropagationPriority {
level: priority,
activity,
decision_level: self.decision_level(),
},
};
if self.config.priority_based {
self.queue.push(pending);
} else {
self.propagate_immediate(pending.event)?;
}
Ok(())
}
fn propagate_immediate(&mut self, event: PropagationEvent) -> Result<(), String> {
self.assignments
.insert(event.literal.term, event.literal.polarity);
self.assignment_timestamps
.insert(event.literal.term, event.timestamp);
self.trail.add_event(event.clone());
self.stats.propagations += 1;
self.cache.tick();
if self.config.lazy {
self.trigger_watches(event.literal.term)?;
}
Ok(())
}
pub fn process_queue(&mut self) -> Result<(), String> {
let mut count = 0;
while let Some(pending) = self.queue.pop() {
self.propagate_immediate(pending.event)?;
count += 1;
if count >= self.config.batch_size {
break;
}
}
Ok(())
}
fn trigger_watches(&mut self, term: TermId) -> Result<(), String> {
if let Some(watch_list) = self.watch_lists.get(&term) {
self.stats.lazy_propagations += 1;
for &_clause_id in watch_list.watching_clauses() {
}
for _theory_id in watch_list.watching_theories() {
}
}
Ok(())
}
pub fn add_dependency(&mut self, dependency: TermId, dependent: TermId) {
if !self.config.track_dependencies {
return;
}
self.dependencies.add_dependency(dependency, dependent);
self.stats.dependency_updates += 1;
}
pub fn get_affected_terms(&self, term: TermId) -> Vec<TermId> {
if !self.config.track_dependencies {
return Vec::new();
}
self.dependencies.get_dependents(term).collect()
}
pub fn incremental_propagate(&mut self, term: TermId, theory: TheoryId) -> Result<(), String> {
if !self.config.incremental {
return Ok(());
}
if self.config.caching {
self.invalidate_cache_for_term(term);
}
let affected: Vec<_> = self.get_affected_terms(term);
for affected_term in affected {
if self.config.caching {
self.cache.invalidate(affected_term, theory);
}
self.stats.incremental_propagations += 1;
}
Ok(())
}
fn invalidate_cache_for_term(&mut self, term: TermId) {
for theory_id in 0..10 {
self.cache.invalidate(term, theory_id);
}
}
pub fn check_cache(&mut self, term: TermId, theory: TheoryId) -> Option<Vec<Literal>> {
if !self.config.caching {
return None;
}
if let Some(result) = self.cache.get(term, theory) {
self.stats.cache_hits += 1;
Some(result.clone())
} else {
self.stats.cache_misses += 1;
None
}
}
pub fn cache_propagation(&mut self, term: TermId, theory: TheoryId, result: Vec<Literal>) {
if !self.config.caching {
return;
}
self.cache.cache_result(term, theory, result);
}
pub fn bump_clause_activity(&mut self, clause: ClauseId) {
let activity = self.clause_activities.entry(clause).or_insert(0);
*activity += 1;
}
pub fn decay_activities(&mut self) {
for activity in self.clause_activities.values_mut() {
*activity = (*activity as f64 * self.activity_decay) as u32;
}
}
pub fn get_assignment(&self, term: TermId) -> Option<bool> {
self.assignments.get(&term).copied()
}
pub fn is_assigned(&self, term: TermId) -> bool {
self.assignments.contains_key(&term)
}
pub fn trail(&self) -> &[PropagationEvent] {
self.trail.trail()
}
pub fn clear(&mut self) {
self.queue.clear();
self.watch_lists.clear();
self.trail.clear();
self.dependencies.clear();
self.cache.clear();
self.assignment_timestamps.clear();
self.assignments.clear();
self.clause_activities.clear();
}
pub fn reset_stats(&mut self) {
self.stats = PropagationStats::default();
}
}
impl Default for OptimizedPropagationEngine {
fn default() -> Self {
Self::new()
}
}
pub struct LazyPropagator {
watches: FxHashMap<Literal, Vec<ClauseId>>,
clauses: FxHashMap<ClauseId, Vec<Literal>>,
next_clause_id: ClauseId,
}
impl LazyPropagator {
pub fn new() -> Self {
Self {
watches: FxHashMap::default(),
clauses: FxHashMap::default(),
next_clause_id: 0,
}
}
pub fn add_clause(&mut self, literals: Vec<Literal>) -> ClauseId {
let clause_id = self.next_clause_id;
self.next_clause_id += 1;
if literals.len() >= 2 {
self.watches.entry(literals[0]).or_default().push(clause_id);
self.watches.entry(literals[1]).or_default().push(clause_id);
}
self.clauses.insert(clause_id, literals);
clause_id
}
pub fn propagate_assignment(
&mut self,
literal: Literal,
_assignments: &FxHashMap<TermId, bool>,
) -> Vec<ClauseId> {
let negated = literal.negate();
let mut triggered = Vec::new();
if let Some(watching_clauses) = self.watches.get(&negated) {
for &clause_id in watching_clauses {
triggered.push(clause_id);
}
}
triggered
}
pub fn get_clause(&self, clause_id: ClauseId) -> Option<&Vec<Literal>> {
self.clauses.get(&clause_id)
}
pub fn clear(&mut self) {
self.watches.clear();
self.clauses.clear();
self.next_clause_id = 0;
}
}
impl Default for LazyPropagator {
fn default() -> Self {
Self::new()
}
}
pub struct IncrementalPropagator {
dependencies: DependencyGraph,
dirty: FxHashSet<TermId>,
propagation_order: Vec<TermId>,
}
impl IncrementalPropagator {
pub fn new() -> Self {
Self {
dependencies: DependencyGraph::new(),
dirty: FxHashSet::default(),
propagation_order: Vec::new(),
}
}
pub fn add_dependency(&mut self, dependency: TermId, dependent: TermId) {
self.dependencies.add_dependency(dependency, dependent);
}
pub fn mark_dirty(&mut self, term: TermId) {
self.dirty.insert(term);
for dependent in self.dependencies.get_dependents(term) {
self.dirty.insert(dependent);
}
}
pub fn get_dirty_terms(&self) -> impl Iterator<Item = TermId> + '_ {
self.dirty.iter().copied()
}
pub fn clear_dirty(&mut self) {
self.dirty.clear();
}
pub fn compute_propagation_order(&mut self) -> Vec<TermId> {
self.propagation_order.clear();
self.propagation_order.extend(self.dirty.iter().copied());
self.propagation_order.clone()
}
pub fn clear(&mut self) {
self.dependencies.clear();
self.dirty.clear();
self.propagation_order.clear();
}
}
impl Default for IncrementalPropagator {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_literal_creation() {
let lit = Literal::positive(1);
assert_eq!(lit.term, 1);
assert!(lit.polarity);
}
#[test]
fn test_literal_negation() {
let lit = Literal::positive(1);
let neg = lit.negate();
assert_eq!(neg.term, 1);
assert!(!neg.polarity);
}
#[test]
fn test_propagation_engine_creation() {
let engine = OptimizedPropagationEngine::new();
assert_eq!(engine.decision_level(), 0);
}
#[test]
fn test_decision_level() {
let mut engine = OptimizedPropagationEngine::new();
engine.push_decision_level();
assert_eq!(engine.decision_level(), 1);
}
#[test]
fn test_backtrack() {
let mut engine = OptimizedPropagationEngine::new();
engine.push_decision_level();
engine.push_decision_level();
assert_eq!(engine.decision_level(), 2);
engine.backtrack(1).expect("Backtrack failed");
assert_eq!(engine.decision_level(), 1);
}
#[test]
fn test_watch_list() {
let mut watch_list = WatchList::new();
watch_list.add_clause(1);
watch_list.add_theory(0);
assert_eq!(watch_list.watching_clauses().len(), 1);
}
#[test]
fn test_propagation_trail() {
let mut trail = PropagationTrail::new();
trail.push_level();
let event = PropagationEvent {
literal: Literal::positive(1),
level: 1,
timestamp: 0,
theory: 0,
reason: PropagationReason::Decision,
};
trail.add_event(event);
assert_eq!(trail.trail().len(), 1);
}
#[test]
fn test_trail_backtrack() {
let mut trail = PropagationTrail::new();
trail.push_level();
let event1 = PropagationEvent {
literal: Literal::positive(1),
level: 1,
timestamp: 0,
theory: 0,
reason: PropagationReason::Decision,
};
trail.add_event(event1);
trail.push_level();
let event2 = PropagationEvent {
literal: Literal::positive(2),
level: 2,
timestamp: 1,
theory: 0,
reason: PropagationReason::Decision,
};
trail.add_event(event2);
let undone = trail.backtrack(1);
assert_eq!(undone.len(), 1);
assert_eq!(trail.trail().len(), 1);
}
#[test]
fn test_dependency_graph() {
let mut deps = DependencyGraph::new();
deps.add_dependency(1, 2);
deps.add_dependency(1, 3);
let dependents: Vec<_> = deps.get_dependents(1).collect();
assert_eq!(dependents.len(), 2);
}
#[test]
fn test_propagation_cache() {
let mut cache = PropagationCache::new();
let result = vec![Literal::positive(1)];
cache.cache_result(1, 0, result.clone());
assert_eq!(cache.get(1, 0), Some(&result));
}
#[test]
fn test_cache_invalidation() {
let mut cache = PropagationCache::new();
let result = vec![Literal::positive(1)];
cache.cache_result(1, 0, result);
cache.invalidate(1, 0);
assert_eq!(cache.get(1, 0), None);
}
#[test]
fn test_lazy_propagator() {
let mut prop = LazyPropagator::new();
let clause = vec![Literal::positive(1), Literal::positive(2)];
let clause_id = prop.add_clause(clause);
assert_eq!(prop.get_clause(clause_id).map(|c| c.len()), Some(2));
}
#[test]
fn test_incremental_propagator() {
let mut prop = IncrementalPropagator::new();
prop.add_dependency(1, 2);
prop.mark_dirty(1);
let dirty: Vec<_> = prop.get_dirty_terms().collect();
assert!(dirty.contains(&1));
assert!(dirty.contains(&2));
}
#[test]
fn test_priority_ordering() {
let p1 = PropagationPriority {
level: 1,
activity: 0,
decision_level: 0,
};
let p2 = PropagationPriority {
level: 2,
activity: 0,
decision_level: 0,
};
assert!(p2 > p1);
}
#[test]
fn test_propagate() {
let mut engine = OptimizedPropagationEngine::new();
let literal = Literal::positive(1);
engine
.propagate(literal, 0, PropagationReason::Decision, 100)
.expect("Propagation failed");
engine.process_queue().expect("Process queue failed");
assert_eq!(engine.stats().propagations, 1);
}
#[test]
fn test_assignment_tracking() {
let mut engine = OptimizedPropagationEngine::new();
let literal = Literal::positive(1);
engine
.propagate(literal, 0, PropagationReason::Decision, 100)
.expect("Propagation failed");
engine.process_queue().expect("Queue processing failed");
assert_eq!(engine.get_assignment(1), Some(true));
}
}