1#![allow(unsafe_code)]
7
8use crate::literal::Lit;
9#[allow(unused_imports)]
10use crate::prelude::*;
11
12#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
14pub struct ClauseRef(u32);
15
16impl ClauseRef {
17 pub const fn null() -> Self {
19 Self(u32::MAX)
20 }
21
22 pub const fn is_null(self) -> bool {
24 self.0 == u32::MAX
25 }
26}
27
28#[repr(C, align(8))]
30struct ClauseHeader {
31 len: u32,
33 activity: f32,
35 lbd: u32,
37 flags: u32,
39}
40
41const FLAG_DELETED: u32 = 1 << 0;
42const FLAG_LEARNED: u32 = 1 << 1;
43
44impl ClauseHeader {
45 fn new(len: u32, learned: bool) -> Self {
46 Self {
47 len,
48 activity: 0.0,
49 lbd: len,
50 flags: if learned { FLAG_LEARNED } else { 0 },
51 }
52 }
53
54 fn is_deleted(&self) -> bool {
55 (self.flags & FLAG_DELETED) != 0
56 }
57
58 fn mark_deleted(&mut self) {
59 self.flags |= FLAG_DELETED;
60 }
61
62 #[allow(dead_code)]
63 fn is_learned(&self) -> bool {
64 (self.flags & FLAG_LEARNED) != 0
65 }
66}
67
68pub struct ClauseArena {
70 buffer: Vec<u8>,
72 pos: usize,
74 num_clauses: usize,
76 num_deleted: usize,
78 wasted_bytes: usize,
80}
81
82impl ClauseArena {
83 pub fn new(initial_capacity: usize) -> Self {
85 Self {
86 buffer: Vec::with_capacity(initial_capacity),
87 pos: 0,
88 num_clauses: 0,
89 num_deleted: 0,
90 wasted_bytes: 0,
91 }
92 }
93
94 pub fn alloc(&mut self, lits: &[Lit], learned: bool) -> ClauseRef {
96 let header = ClauseHeader::new(lits.len() as u32, learned);
97 let header_size = core::mem::size_of::<ClauseHeader>();
98 let lits_size = core::mem::size_of_val(lits);
99 let total_size = header_size + lits_size;
100
101 let align_offset = (8 - (self.pos % 8)) % 8;
103 let aligned_pos = self.pos + align_offset;
104
105 while aligned_pos + total_size > self.buffer.capacity() {
107 let new_capacity = if self.buffer.capacity() == 0 {
108 4096
109 } else {
110 self.buffer.capacity() * 2
111 };
112 self.buffer.reserve(new_capacity - self.buffer.capacity());
113 }
114
115 let clause_ref = ClauseRef(aligned_pos as u32);
117 unsafe {
118 let header_ptr = self.buffer.as_mut_ptr().add(aligned_pos) as *mut ClauseHeader;
119 core::ptr::write(header_ptr, header);
120
121 let lits_ptr = header_ptr.add(1) as *mut Lit;
123 core::ptr::copy_nonoverlapping(lits.as_ptr(), lits_ptr, lits.len());
124 }
125
126 self.pos = aligned_pos + total_size;
127 if self.pos > self.buffer.len() {
128 unsafe {
129 self.buffer.set_len(self.pos);
130 }
131 }
132 self.num_clauses += 1;
133
134 clause_ref
135 }
136
137 pub fn get(&self, cref: ClauseRef) -> Option<&[Lit]> {
139 if cref.is_null() || cref.0 as usize >= self.buffer.len() {
140 return None;
141 }
142
143 unsafe {
144 let header_ptr = self.buffer.as_ptr().add(cref.0 as usize) as *const ClauseHeader;
145 let header = &*header_ptr;
146
147 if header.is_deleted() {
148 return None;
149 }
150
151 let lits_ptr = header_ptr.add(1) as *const Lit;
152 Some(core::slice::from_raw_parts(lits_ptr, header.len as usize))
153 }
154 }
155
156 pub fn get_activity(&self, cref: ClauseRef) -> Option<f32> {
158 if cref.is_null() || cref.0 as usize >= self.buffer.len() {
159 return None;
160 }
161
162 unsafe {
163 let header_ptr = self.buffer.as_ptr().add(cref.0 as usize) as *const ClauseHeader;
164 Some((*header_ptr).activity)
165 }
166 }
167
168 pub fn set_activity(&mut self, cref: ClauseRef, activity: f32) {
170 if cref.is_null() || cref.0 as usize >= self.buffer.len() {
171 return;
172 }
173
174 unsafe {
175 let header_ptr = self.buffer.as_mut_ptr().add(cref.0 as usize) as *mut ClauseHeader;
176 (*header_ptr).activity = activity;
177 }
178 }
179
180 pub fn get_lbd(&self, cref: ClauseRef) -> Option<u32> {
182 if cref.is_null() || cref.0 as usize >= self.buffer.len() {
183 return None;
184 }
185
186 unsafe {
187 let header_ptr = self.buffer.as_ptr().add(cref.0 as usize) as *const ClauseHeader;
188 Some((*header_ptr).lbd)
189 }
190 }
191
192 pub fn set_lbd(&mut self, cref: ClauseRef, lbd: u32) {
194 if cref.is_null() || cref.0 as usize >= self.buffer.len() {
195 return;
196 }
197
198 unsafe {
199 let header_ptr = self.buffer.as_mut_ptr().add(cref.0 as usize) as *mut ClauseHeader;
200 (*header_ptr).lbd = lbd;
201 }
202 }
203
204 pub fn delete(&mut self, cref: ClauseRef) {
206 if cref.is_null() || cref.0 as usize >= self.buffer.len() {
207 return;
208 }
209
210 unsafe {
211 let header_ptr = self.buffer.as_mut_ptr().add(cref.0 as usize) as *mut ClauseHeader;
212 let header = &mut *header_ptr;
213
214 if !header.is_deleted() {
215 header.mark_deleted();
216 self.num_deleted += 1;
217
218 let header_size = core::mem::size_of::<ClauseHeader>();
219 let lits_size = header.len as usize * core::mem::size_of::<Lit>();
220 self.wasted_bytes += header_size + lits_size;
221 }
222 }
223 }
224
225 pub fn needs_compaction(&self) -> bool {
227 if self.num_clauses == 0 {
228 return false;
229 }
230 let waste_ratio = self.wasted_bytes as f64 / self.buffer.len() as f64;
231 waste_ratio > 0.3 || self.num_deleted > self.num_clauses / 2
232 }
233
234 pub fn compact(&mut self) -> crate::prelude::HashMap<ClauseRef, ClauseRef> {
237 let mut mapping = crate::prelude::HashMap::new();
238 let mut new_buffer: Vec<u8> = Vec::with_capacity(self.buffer.len() - self.wasted_bytes);
239 let mut new_pos = 0;
240
241 let mut offset = 0;
242 while offset < self.pos {
243 let align_offset = (8 - (offset % 8)) % 8;
245 offset += align_offset;
246
247 if offset >= self.pos {
248 break;
249 }
250
251 unsafe {
252 let header_ptr = self.buffer.as_ptr().add(offset) as *const ClauseHeader;
253 let header = &*header_ptr;
254
255 let header_size = core::mem::size_of::<ClauseHeader>();
256 let lits_size = header.len as usize * core::mem::size_of::<Lit>();
257 let clause_size = header_size + lits_size;
258
259 if !header.is_deleted() {
260 let old_ref = ClauseRef(offset as u32);
262 let new_align = (8 - (new_pos % 8)) % 8;
263 new_pos += new_align;
264
265 let new_ref = ClauseRef(new_pos as u32);
266 mapping.insert(old_ref, new_ref);
267
268 while new_pos + clause_size > new_buffer.capacity() {
270 new_buffer.reserve(4096);
271 }
272
273 let src = self.buffer.as_ptr().add(offset);
275 let dst = new_buffer.as_mut_ptr().add(new_pos);
276 core::ptr::copy_nonoverlapping(src, dst, clause_size);
277
278 new_pos += clause_size;
279 }
280
281 offset += clause_size;
282 }
283 }
284
285 unsafe {
286 new_buffer.set_len(new_pos);
287 }
288
289 self.buffer = new_buffer;
290 self.pos = new_pos;
291 self.num_clauses -= self.num_deleted;
292 self.num_deleted = 0;
293 self.wasted_bytes = 0;
294
295 mapping
296 }
297
298 pub fn stats(&self) -> MemoryStats {
300 MemoryStats {
301 total_bytes: self.buffer.capacity(),
302 used_bytes: self.pos,
303 wasted_bytes: self.wasted_bytes,
304 num_clauses: self.num_clauses,
305 num_deleted: self.num_deleted,
306 }
307 }
308}
309
310impl Drop for ClauseArena {
311 fn drop(&mut self) {
312 }
314}
315
316#[derive(Debug, Clone)]
318pub struct MemoryStats {
319 pub total_bytes: usize,
321 pub used_bytes: usize,
323 pub wasted_bytes: usize,
325 pub num_clauses: usize,
327 pub num_deleted: usize,
329}
330
331impl MemoryStats {
332 pub fn efficiency(&self) -> f64 {
334 if self.total_bytes == 0 {
335 return 1.0;
336 }
337 (self.used_bytes - self.wasted_bytes) as f64 / self.total_bytes as f64
338 }
339
340 pub fn waste_ratio(&self) -> f64 {
342 if self.used_bytes == 0 {
343 return 0.0;
344 }
345 self.wasted_bytes as f64 / self.used_bytes as f64
346 }
347}
348
349#[cfg(test)]
350mod tests {
351 use super::*;
352
353 #[test]
354 fn test_arena_alloc() {
355 let mut arena = ClauseArena::new(1024);
356
357 let lits = vec![
358 Lit::pos(crate::literal::Var(0)),
359 Lit::pos(crate::literal::Var(1)),
360 ];
361 let cref = arena.alloc(&lits, false);
362
363 assert!(!cref.is_null());
364 assert_eq!(arena.get(cref), Some(&lits[..]));
365 }
366
367 #[test]
368 fn test_arena_delete() {
369 let mut arena = ClauseArena::new(1024);
370
371 let lits = vec![
372 Lit::pos(crate::literal::Var(0)),
373 Lit::pos(crate::literal::Var(1)),
374 ];
375 let cref = arena.alloc(&lits, false);
376
377 arena.delete(cref);
378 assert_eq!(arena.get(cref), None);
379 }
380
381 #[test]
382 fn test_arena_activity() {
383 let mut arena = ClauseArena::new(1024);
384
385 let lits = vec![
386 Lit::pos(crate::literal::Var(0)),
387 Lit::pos(crate::literal::Var(1)),
388 ];
389 let cref = arena.alloc(&lits, false);
390
391 arena.set_activity(cref, 1.5);
392 assert_eq!(arena.get_activity(cref), Some(1.5));
393 }
394
395 #[test]
396 fn test_arena_compact() {
397 let mut arena = ClauseArena::new(1024);
398
399 let lits1 = vec![Lit::pos(crate::literal::Var(0))];
400 let lits2 = vec![Lit::pos(crate::literal::Var(1))];
401 let lits3 = vec![Lit::pos(crate::literal::Var(2))];
402
403 let cref1 = arena.alloc(&lits1, false);
404 let cref2 = arena.alloc(&lits2, false);
405 let cref3 = arena.alloc(&lits3, false);
406
407 arena.delete(cref2);
408
409 let mapping = arena.compact();
410
411 let new_cref1 = mapping.get(&cref1).copied().unwrap_or(cref1);
413 let new_cref3 = mapping.get(&cref3).copied().unwrap_or(cref3);
414
415 assert_eq!(arena.get(new_cref1), Some(&lits1[..]));
416 assert_eq!(arena.get(new_cref3), Some(&lits3[..]));
417 assert_eq!(arena.num_deleted, 0);
418 }
419
420 #[test]
421 fn test_memory_stats() {
422 let arena = ClauseArena::new(1024);
423 let stats = arena.stats();
424
425 assert_eq!(stats.num_clauses, 0);
426 assert!(stats.efficiency() >= 0.0);
427 assert_eq!(stats.waste_ratio(), 0.0);
428 }
429}