1use std::sync::atomic::{AtomicI64, AtomicU8, AtomicU32, Ordering};
13
14pub const MAX_ASSERTION_SLOTS: usize = 128;
16
17const SLOT_MSG_LEN: usize = 64;
19
20pub const ASSERTION_TABLE_MEM_SIZE: usize =
24 8 + MAX_ASSERTION_SLOTS * std::mem::size_of::<AssertionSlot>();
25
26#[repr(u8)]
28#[derive(Debug, Clone, Copy, PartialEq, Eq)]
29pub enum AssertKind {
30 Always = 0,
32 AlwaysOrUnreachable = 1,
34 Sometimes = 2,
36 Reachable = 3,
38 Unreachable = 4,
40 NumericAlways = 5,
42 NumericSometimes = 6,
44 BooleanSometimesAll = 7,
46}
47
48impl AssertKind {
49 pub fn from_u8(v: u8) -> Option<Self> {
51 match v {
52 0 => Some(Self::Always),
53 1 => Some(Self::AlwaysOrUnreachable),
54 2 => Some(Self::Sometimes),
55 3 => Some(Self::Reachable),
56 4 => Some(Self::Unreachable),
57 5 => Some(Self::NumericAlways),
58 6 => Some(Self::NumericSometimes),
59 7 => Some(Self::BooleanSometimesAll),
60 _ => None,
61 }
62 }
63}
64
65#[repr(u8)]
67#[derive(Debug, Clone, Copy, PartialEq, Eq)]
68pub enum AssertCmp {
69 Gt = 0,
71 Ge = 1,
73 Lt = 2,
75 Le = 3,
77}
78
79#[repr(C)]
83pub struct AssertionSlot {
84 pub msg_hash: u32,
86 pub kind: u8,
88 pub must_hit: u8,
90 pub maximize: u8,
92 pub split_triggered: u8,
94 pub pass_count: u64,
96 pub fail_count: u64,
98 pub watermark: i64,
100 pub split_watermark: i64,
102 pub frontier: u8,
104 pub _pad: [u8; 7],
106 pub msg: [u8; SLOT_MSG_LEN],
108}
109
110impl AssertionSlot {
111 pub fn msg_str(&self) -> &str {
113 let len = self
114 .msg
115 .iter()
116 .position(|&b| b == 0)
117 .unwrap_or(SLOT_MSG_LEN);
118 std::str::from_utf8(&self.msg[..len]).unwrap_or("???")
119 }
120}
121
122pub fn msg_hash(msg: &str) -> u32 {
124 let mut h: u32 = 0x811c9dc5;
125 for b in msg.bytes() {
126 h ^= b as u32;
127 h = h.wrapping_mul(0x01000193);
128 }
129 h
130}
131
132unsafe fn find_or_alloc_slot(
141 table_ptr: *mut u8,
142 hash: u32,
143 kind: AssertKind,
144 must_hit: u8,
145 maximize: u8,
146 msg: &str,
147) -> (*mut AssertionSlot, usize) {
148 unsafe {
149 let next_atomic = &*(table_ptr as *const AtomicU32);
150 let count = next_atomic.load(Ordering::Relaxed) as usize;
151 let base = table_ptr.add(8) as *mut AssertionSlot;
152
153 for i in 0..count.min(MAX_ASSERTION_SLOTS) {
155 let slot = base.add(i);
156 if (*slot).msg_hash == hash {
157 return (slot, i);
158 }
159 }
160
161 let new_idx = next_atomic.fetch_add(1, Ordering::Relaxed) as usize;
163 if new_idx >= MAX_ASSERTION_SLOTS {
164 next_atomic.fetch_sub(1, Ordering::Relaxed);
165 return (std::ptr::null_mut(), 0);
166 }
167
168 let slot = base.add(new_idx);
169 let mut msg_buf = [0u8; SLOT_MSG_LEN];
170 let n = msg.len().min(SLOT_MSG_LEN - 1);
171 msg_buf[..n].copy_from_slice(&msg.as_bytes()[..n]);
172
173 std::ptr::write(
174 slot,
175 AssertionSlot {
176 msg_hash: hash,
177 kind: kind as u8,
178 must_hit,
179 maximize,
180 split_triggered: 0,
181 pass_count: 0,
182 fail_count: 0,
183 watermark: if maximize == 1 { i64::MIN } else { i64::MAX },
184 split_watermark: if maximize == 1 { i64::MIN } else { i64::MAX },
185 frontier: 0,
186 _pad: [0; 7],
187 msg: msg_buf,
188 },
189 );
190 (slot, new_idx)
191 }
192}
193
194fn assertion_split(slot_idx: usize, hash: u32) {
199 let bm_ptr = crate::context::COVERAGE_BITMAP_PTR.with(|c| c.get());
201 if !bm_ptr.is_null() {
202 let bm = unsafe { crate::coverage::CoverageBitmap::new(bm_ptr) };
203 bm.set_bit(hash as usize);
204 }
205
206 let vm_ptr = crate::context::EXPLORED_MAP_PTR.with(|c| c.get());
208 if !vm_ptr.is_null() {
209 let vm = unsafe { crate::coverage::ExploredMap::new(vm_ptr) };
210 let bm_ptr2 = crate::context::COVERAGE_BITMAP_PTR.with(|c| c.get());
211 if !bm_ptr2.is_null() {
212 let bm = unsafe { crate::coverage::CoverageBitmap::new(bm_ptr2) };
213 vm.merge_from(&bm);
214 }
215 }
216
217 if crate::context::explorer_is_active() {
219 crate::split_loop::dispatch_split("", slot_idx % MAX_ASSERTION_SLOTS);
220 }
221}
222
223pub fn assertion_bool(kind: AssertKind, must_hit: bool, condition: bool, msg: &str) {
231 let table_ptr = crate::context::get_assertion_table_ptr();
232 if table_ptr.is_null() {
233 return;
234 }
235
236 let hash = msg_hash(msg);
237 let must_hit_u8 = if must_hit { 1 } else { 0 };
238
239 let (slot, slot_idx) =
241 unsafe { find_or_alloc_slot(table_ptr, hash, kind, must_hit_u8, 0, msg) };
242 if slot.is_null() {
243 return;
244 }
245
246 unsafe {
248 match kind {
249 AssertKind::Always | AssertKind::AlwaysOrUnreachable | AssertKind::NumericAlways => {
250 if condition {
251 let pc = &*((&(*slot).pass_count) as *const u64 as *const AtomicI64);
252 pc.fetch_add(1, Ordering::Relaxed);
253 } else {
254 let fc = &*((&(*slot).fail_count) as *const u64 as *const AtomicI64);
255 let prev = fc.fetch_add(1, Ordering::Relaxed);
256 if prev == 0 {
257 eprintln!("[ASSERTION FAILED] {} (kind={:?})", msg, kind);
258 }
259 }
260 }
261 AssertKind::Sometimes | AssertKind::Reachable => {
262 if condition {
263 let pc = &*((&(*slot).pass_count) as *const u64 as *const AtomicI64);
264 pc.fetch_add(1, Ordering::Relaxed);
265
266 let ft = &*((&(*slot).split_triggered) as *const u8 as *const AtomicU8);
268 if ft
269 .compare_exchange(0, 1, Ordering::Relaxed, Ordering::Relaxed)
270 .is_ok()
271 {
272 assertion_split(slot_idx, hash);
273 }
274 } else {
275 let fc = &*((&(*slot).fail_count) as *const u64 as *const AtomicI64);
276 fc.fetch_add(1, Ordering::Relaxed);
277 }
278 }
279 AssertKind::Unreachable => {
280 let pc = &*((&(*slot).pass_count) as *const u64 as *const AtomicI64);
283 let prev = pc.fetch_add(1, Ordering::Relaxed);
284 if prev == 0 {
285 eprintln!("[UNREACHABLE REACHED] {}", msg);
286 }
287 }
288 _ => {}
289 }
290 }
291}
292
293pub fn assertion_numeric(
304 kind: AssertKind,
305 cmp: AssertCmp,
306 maximize: bool,
307 left: i64,
308 right: i64,
309 msg: &str,
310) {
311 let table_ptr = crate::context::get_assertion_table_ptr();
312 if table_ptr.is_null() {
313 return;
314 }
315
316 let hash = msg_hash(msg);
317 let maximize_u8 = if maximize { 1 } else { 0 };
318
319 let (slot, slot_idx) =
321 unsafe { find_or_alloc_slot(table_ptr, hash, kind, 1, maximize_u8, msg) };
322 if slot.is_null() {
323 return;
324 }
325
326 let passes = match cmp {
328 AssertCmp::Gt => left > right,
329 AssertCmp::Ge => left >= right,
330 AssertCmp::Lt => left < right,
331 AssertCmp::Le => left <= right,
332 };
333
334 unsafe {
336 if passes {
337 let pc = &*((&(*slot).pass_count) as *const u64 as *const AtomicI64);
338 pc.fetch_add(1, Ordering::Relaxed);
339 } else {
340 let fc = &*((&(*slot).fail_count) as *const u64 as *const AtomicI64);
341 let prev = fc.fetch_add(1, Ordering::Relaxed);
342 if kind == AssertKind::NumericAlways && prev == 0 {
343 eprintln!(
344 "[NUMERIC ASSERTION FAILED] {} (left={}, right={}, cmp={:?})",
345 msg, left, right, cmp
346 );
347 }
348 }
349
350 let wm = &*((&(*slot).watermark) as *const i64 as *const AtomicI64);
352 let mut current = wm.load(Ordering::Relaxed);
353 loop {
354 let is_better = if maximize {
355 left > current
356 } else {
357 left < current
358 };
359 if !is_better {
360 break;
361 }
362 match wm.compare_exchange_weak(current, left, Ordering::Relaxed, Ordering::Relaxed) {
363 Ok(_) => break,
364 Err(actual) => current = actual,
365 }
366 }
367
368 if kind == AssertKind::NumericSometimes {
370 let fw = &*((&(*slot).split_watermark) as *const i64 as *const AtomicI64);
371 let mut fork_current = fw.load(Ordering::Relaxed);
372 loop {
373 let is_better = if maximize {
374 left > fork_current
375 } else {
376 left < fork_current
377 };
378 if !is_better {
379 break;
380 }
381 match fw.compare_exchange_weak(
382 fork_current,
383 left,
384 Ordering::Relaxed,
385 Ordering::Relaxed,
386 ) {
387 Ok(_) => {
388 assertion_split(slot_idx, hash);
389 break;
390 }
391 Err(actual) => fork_current = actual,
392 }
393 }
394 }
395 }
396}
397
398pub fn assertion_sometimes_all(msg: &str, named_bools: &[(&str, bool)]) {
405 let table_ptr = crate::context::get_assertion_table_ptr();
406 if table_ptr.is_null() {
407 return;
408 }
409
410 let hash = msg_hash(msg);
411
412 let (slot, slot_idx) =
414 unsafe { find_or_alloc_slot(table_ptr, hash, AssertKind::BooleanSometimesAll, 1, 0, msg) };
415 if slot.is_null() {
416 return;
417 }
418
419 let true_count = named_bools.iter().filter(|(_, v)| *v).count() as u8;
421
422 unsafe {
424 let pc = &*((&(*slot).pass_count) as *const u64 as *const AtomicI64);
426 pc.fetch_add(1, Ordering::Relaxed);
427
428 let fr = &*((&(*slot).frontier) as *const u8 as *const AtomicU8);
430 let mut current = fr.load(Ordering::Relaxed);
431 loop {
432 if true_count <= current {
433 break;
434 }
435 match fr.compare_exchange_weak(
436 current,
437 true_count,
438 Ordering::Relaxed,
439 Ordering::Relaxed,
440 ) {
441 Ok(_) => {
442 assertion_split(slot_idx, hash);
443 break;
444 }
445 Err(actual) => current = actual,
446 }
447 }
448 }
449}
450
451pub fn assertion_read_all() -> Vec<AssertionSlotSnapshot> {
455 let table_ptr = crate::context::get_assertion_table_ptr();
456 if table_ptr.is_null() {
457 return Vec::new();
458 }
459
460 unsafe {
461 let count = (*(table_ptr as *const u32)) as usize;
462 let count = count.min(MAX_ASSERTION_SLOTS);
463 let base = table_ptr.add(8) as *const AssertionSlot;
464
465 (0..count)
466 .map(|i| {
467 let slot = &*base.add(i);
468 AssertionSlotSnapshot {
469 msg: slot.msg_str().to_string(),
470 kind: slot.kind,
471 must_hit: slot.must_hit,
472 pass_count: slot.pass_count,
473 fail_count: slot.fail_count,
474 watermark: slot.watermark,
475 frontier: slot.frontier,
476 }
477 })
478 .collect()
479 }
480}
481
482#[derive(Debug, Clone)]
484pub struct AssertionSlotSnapshot {
485 pub msg: String,
487 pub kind: u8,
489 pub must_hit: u8,
491 pub pass_count: u64,
493 pub fail_count: u64,
495 pub watermark: i64,
497 pub frontier: u8,
499}
500
501#[cfg(test)]
502mod tests {
503 use super::*;
504
505 #[test]
506 fn test_msg_hash_deterministic() {
507 let h1 = msg_hash("test_assertion");
508 let h2 = msg_hash("test_assertion");
509 assert_eq!(h1, h2);
510 }
511
512 #[test]
513 fn test_msg_hash_no_collision() {
514 let names = ["a", "b", "c", "timeout", "connect", "retry"];
515 let hashes: Vec<u32> = names.iter().map(|n| msg_hash(n)).collect();
516 for i in 0..hashes.len() {
517 for j in (i + 1)..hashes.len() {
518 assert_ne!(
519 hashes[i], hashes[j],
520 "{} and {} collide",
521 names[i], names[j]
522 );
523 }
524 }
525 }
526
527 #[test]
528 fn test_slot_size_stable() {
529 assert_eq!(std::mem::size_of::<AssertionSlot>(), 112);
534 }
535
536 #[test]
537 fn test_assertion_bool_noop_when_inactive() {
538 assertion_bool(AssertKind::Sometimes, true, true, "test");
540 assertion_bool(AssertKind::Always, true, false, "test2");
541 }
542
543 #[test]
544 fn test_assertion_numeric_noop_when_inactive() {
545 assertion_numeric(
547 AssertKind::NumericAlways,
548 AssertCmp::Gt,
549 false,
550 10,
551 5,
552 "test",
553 );
554 }
555
556 #[test]
557 fn test_assertion_read_all_when_inactive() {
558 let slots = assertion_read_all();
560 assert!(slots.is_empty());
561 }
562
563 #[test]
564 fn test_assert_kind_from_u8() {
565 assert_eq!(AssertKind::from_u8(0), Some(AssertKind::Always));
566 assert_eq!(
567 AssertKind::from_u8(7),
568 Some(AssertKind::BooleanSometimesAll)
569 );
570 assert_eq!(AssertKind::from_u8(8), None);
571 }
572}