Skip to main content

landauer_allocator/
lib.rs

1//! Pillar: II. PACR field: Λ.
2//!
3//! **Landauer-on-Drop Global Allocator** — the thermodynamic nervous system.
4//!
5//! Standalone crate extracted from `aevum-core` so other crates can depend on
6//! Landauer accounting without pulling in the full `aevum-core` dependency tree.
7//!
8//! # Why intercept `dealloc`?
9//!
10//! In Rust, when a variable's lifetime ends the `Drop` trait fires and heap
11//! memory is returned to the allocator.  This deallocation is physically the
12//! moment when bits are irreversibly erased — the exact event that Landauer's
13//! principle taxes.
14//!
15//! By wrapping every `dealloc` call we count bits erased continuously, without
16//! any application code having to opt in.  TGP is not a function you call;
17//! it is a law the system cannot escape.
18//!
19//! # Safety contract
20//!
21//! Each unsafe block below wraps exactly one system-allocator call and one
22//! atomic counter increment.  There is:
23//! - No pointer arithmetic
24//! - No raw memory reads or writes
25//! - No lifetime violations
26//! - No data races (AtomicU64 with Relaxed ordering is sound here: we only
27//!   need monotonicity, not happens-before synchronisation)
28
29// Crate-level: deny unsafe. The one exception is the inner mod below.
30#![deny(unsafe_code)]
31
32// This is the ONLY #[allow(unsafe_code)] in this crate.
33#[allow(unsafe_code)]
34mod inner {
35    use std::alloc::{GlobalAlloc, Layout, System};
36    use std::sync::atomic::{AtomicU64, Ordering};
37
38    /// Cumulative bits erased since process start.
39    ///
40    /// Monotonically increasing. Never reset. Never decremented.
41    pub static BITS_ERASED: AtomicU64 = AtomicU64::new(0);
42
43    /// The Landauer allocator wraps the system allocator and counts bit
44    /// erasures on every `dealloc` call.
45    pub struct LandauerAllocator;
46
47    // SAFETY JUSTIFICATION:
48    // `GlobalAlloc` requires `unsafe impl` because the trait contract depends
49    // on pointer validity guarantees that the compiler cannot verify statically.
50    // We delegate entirely to `System` for actual memory management.
51    // Our additions: one `fetch_add` on an `AtomicU64` in `dealloc`.
52    // No pointer arithmetic, no aliasing, no lifetime violations.
53    #[allow(unsafe_code)]
54    unsafe impl GlobalAlloc for LandauerAllocator {
55        unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
56            // SAFETY: layout validated by caller (Rust stdlib contract).
57            unsafe { System.alloc(layout) }
58        }
59
60        unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
61            // Count bits BEFORE releasing the memory — the erasure event.
62            let bits = layout.size() as u64 * 8;
63            BITS_ERASED.fetch_add(bits, Ordering::Relaxed);
64            // SAFETY: ptr and layout are the exact pair alloc returned.
65            unsafe { System.dealloc(ptr, layout) }
66        }
67    }
68}
69
70pub use inner::LandauerAllocator;
71use inner::BITS_ERASED;
72use std::sync::atomic::Ordering;
73
74// ── Public API ────────────────────────────────────────────────────────────────
75
76/// Read the cumulative bits erased since process start.
77///
78/// O(1), lock-free.
79#[must_use]
80pub fn bits_erased() -> u64 {
81    BITS_ERASED.load(Ordering::Relaxed)
82}
83
84/// Compute the Landauer dissipation cost in joules for `bits` erased at
85/// temperature `temperature_k` (Kelvin).
86///
87/// `Λ = bits × k_B × T × ln(2)`
88#[must_use]
89pub fn landauer_cost_joules(bits: u64, temperature_k: f64) -> f64 {
90    const K_B: f64 = 1.380_649e-23;
91    bits as f64 * K_B * temperature_k * std::f64::consts::LN_2
92}
93
94// ── Tests ─────────────────────────────────────────────────────────────────────
95
96#[cfg(test)]
97mod tests {
98    use super::*;
99
100    #[test]
101    fn bits_erased_monotonically_increases() {
102        let before = bits_erased();
103        let v: Vec<u8> = (0..1024).map(|i| i as u8).collect();
104        drop(v);
105        let after = bits_erased();
106        assert!(
107            after >= before,
108            "bits_erased must be monotonically non-decreasing: before={before}, after={after}"
109        );
110    }
111
112    #[test]
113    fn bits_erased_increases_on_heap_allocation() {
114        let before = bits_erased();
115        let _v: Vec<u8> = vec![0u8; 4096];
116        drop(_v);
117        let after = bits_erased();
118        assert!(after >= before, "after={after} should be ≥ before={before}");
119    }
120
121    #[test]
122    fn landauer_cost_at_300k_matches_constant() {
123        let cost = landauer_cost_joules(1, 300.0);
124        let expected = 2.870_979e-21_f64;
125        let ratio = cost / expected;
126        assert!(
127            (ratio - 1.0).abs() < 1e-4,
128            "Λ(1 bit, 300 K) = {cost:.6e}, expected ≈ {expected:.6e}"
129        );
130    }
131
132    #[test]
133    fn landauer_cost_scales_linearly_with_bits() {
134        let one = landauer_cost_joules(1, 300.0);
135        let thousand = landauer_cost_joules(1_000, 300.0);
136        let ratio = thousand / one;
137        assert!(
138            (ratio - 1_000.0).abs() < 1.0,
139            "Λ must scale linearly: ratio={ratio}"
140        );
141    }
142
143    #[test]
144    fn landauer_cost_scales_linearly_with_temperature() {
145        let at_300 = landauer_cost_joules(100, 300.0);
146        let at_600 = landauer_cost_joules(100, 600.0);
147        let ratio = at_600 / at_300;
148        assert!(
149            (ratio - 2.0).abs() < 1e-10,
150            "Λ must be proportional to T: ratio={ratio}"
151        );
152    }
153
154    #[test]
155    fn landauer_cost_zero_bits_is_zero() {
156        let cost = landauer_cost_joules(0, 300.0);
157        assert_eq!(cost, 0.0, "0 bits erased → 0 joules");
158    }
159}