1#[allow(unused_imports)]
11use crate::prelude::*;
12
13#[derive(Debug, Default, Clone)]
15pub struct MemoryOptStats {
16 pub total_allocated: usize,
18 pub total_freed: usize,
20 pub current_usage: usize,
22 pub peak_usage: usize,
24 pub allocations: u64,
26 pub frees: u64,
28 pub pool_hits: u64,
30 pub pool_misses: u64,
32 pub fragmentation: f64,
34}
35
36impl MemoryOptStats {
37 #[must_use]
39 pub fn hit_rate(&self) -> f64 {
40 let total_requests = self.pool_hits + self.pool_misses;
41 if total_requests == 0 {
42 0.0
43 } else {
44 self.pool_hits as f64 / total_requests as f64
45 }
46 }
47
48 #[must_use]
50 pub fn avg_allocation_size(&self) -> f64 {
51 if self.allocations == 0 {
52 0.0
53 } else {
54 self.total_allocated as f64 / self.allocations as f64
55 }
56 }
57}
58
59#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
61pub enum SizeClass {
62 Binary,
64 Ternary,
66 Small,
68 Medium,
70 Large,
72 VeryLarge,
74}
75
76impl SizeClass {
77 #[must_use]
79 pub fn from_size(size: usize) -> Self {
80 match size {
81 0..=2 => Self::Binary,
82 3 => Self::Ternary,
83 4..=8 => Self::Small,
84 9..=16 => Self::Medium,
85 17..=32 => Self::Large,
86 _ => Self::VeryLarge,
87 }
88 }
89
90 #[must_use]
92 pub fn buffer_size(self) -> usize {
93 match self {
94 Self::Binary => 16,
95 Self::Ternary => 24,
96 Self::Small => 64,
97 Self::Medium => 128,
98 Self::Large => 256,
99 Self::VeryLarge => 512,
100 }
101 }
102}
103
104#[derive(Debug)]
106struct MemoryPool {
107 free_blocks: Vec<Vec<u8>>,
109 allocated_blocks: usize,
111 max_blocks: usize,
113}
114
115impl MemoryPool {
116 fn new(max_blocks: usize) -> Self {
117 Self {
118 free_blocks: Vec::new(),
119 allocated_blocks: 0,
120 max_blocks,
121 }
122 }
123
124 fn allocate(&mut self, size: usize) -> (Option<Vec<u8>>, bool) {
125 if let Some(block) = self.free_blocks.pop() {
126 self.allocated_blocks += 1;
127 (Some(block), true) } else if self.allocated_blocks < self.max_blocks {
129 self.allocated_blocks += 1;
130 (Some(vec![0; size]), false) } else {
132 (None, false) }
134 }
135
136 fn free(&mut self, block: Vec<u8>) {
137 if self.allocated_blocks > 0 {
138 self.allocated_blocks -= 1;
139 }
140 if self.free_blocks.len() < self.max_blocks {
141 self.free_blocks.push(block);
142 }
143 }
144
145 fn clear(&mut self) {
146 self.free_blocks.clear();
147 self.allocated_blocks = 0;
148 }
149}
150
151#[derive(Debug)]
153pub struct MemoryOptimizer {
154 pools: HashMap<SizeClass, MemoryPool>,
156 stats: MemoryOptStats,
158 enable_prefetch: bool,
160}
161
162impl MemoryOptimizer {
163 #[must_use]
165 pub fn new() -> Self {
166 let mut pools = HashMap::new();
167
168 pools.insert(SizeClass::Binary, MemoryPool::new(1000));
170 pools.insert(SizeClass::Ternary, MemoryPool::new(800));
171 pools.insert(SizeClass::Small, MemoryPool::new(500));
172 pools.insert(SizeClass::Medium, MemoryPool::new(200));
173 pools.insert(SizeClass::Large, MemoryPool::new(50));
174 pools.insert(SizeClass::VeryLarge, MemoryPool::new(10));
175
176 Self {
177 pools,
178 stats: MemoryOptStats::default(),
179 enable_prefetch: true,
180 }
181 }
182
183 pub fn set_prefetch(&mut self, enable: bool) {
185 self.enable_prefetch = enable;
186 }
187
188 #[must_use]
190 pub fn stats(&self) -> &MemoryOptStats {
191 &self.stats
192 }
193
194 pub fn reset_stats(&mut self) {
196 self.stats = MemoryOptStats::default();
197 }
198
199 pub fn allocate(&mut self, num_literals: usize) -> Vec<u8> {
201 let size_class = SizeClass::from_size(num_literals);
202 let buffer_size = size_class.buffer_size();
203
204 self.stats.allocations += 1;
205 self.stats.total_allocated += buffer_size;
206 self.stats.current_usage += buffer_size;
207
208 if self.stats.current_usage > self.stats.peak_usage {
209 self.stats.peak_usage = self.stats.current_usage;
210 }
211
212 if let Some(pool) = self.pools.get_mut(&size_class) {
213 let (block_opt, is_hit) = pool.allocate(buffer_size);
214 if let Some(block) = block_opt {
215 if is_hit {
216 self.stats.pool_hits += 1;
217 } else {
218 self.stats.pool_misses += 1;
219 }
220 return block;
221 }
222 }
223
224 self.stats.pool_misses += 1;
225 vec![0; buffer_size]
226 }
227
228 pub fn free(&mut self, buffer: Vec<u8>, num_literals: usize) {
230 let size_class = SizeClass::from_size(num_literals);
231 let buffer_size = size_class.buffer_size();
232
233 self.stats.frees += 1;
234 self.stats.total_freed += buffer_size;
235 if self.stats.current_usage >= buffer_size {
236 self.stats.current_usage -= buffer_size;
237 }
238
239 if let Some(pool) = self.pools.get_mut(&size_class) {
240 pool.free(buffer);
241 }
242 }
243
244 #[inline]
246 pub fn prefetch<T>(&self, _ptr: *const T) {
247 #[cfg(target_arch = "x86_64")]
248 {
249 if self.enable_prefetch {
250 }
254 }
255 }
256
257 pub fn compact(&mut self) {
259 for pool in self.pools.values_mut() {
260 let target_size = pool.free_blocks.len() / 2;
262 pool.free_blocks.truncate(target_size);
263 }
264
265 self.update_fragmentation();
267 }
268
269 pub fn clear_pools(&mut self) {
271 for pool in self.pools.values_mut() {
272 pool.clear();
273 }
274 self.stats.current_usage = 0;
275 }
276
277 #[must_use]
279 pub fn usage_by_size_class(&self) -> HashMap<SizeClass, usize> {
280 let mut usage = HashMap::new();
281 for (&size_class, pool) in &self.pools {
282 let class_usage = pool.allocated_blocks * size_class.buffer_size();
283 usage.insert(size_class, class_usage);
284 }
285 usage
286 }
287
288 #[must_use]
290 pub fn is_memory_pressure_high(&self) -> bool {
291 let usage_ratio = self.stats.current_usage as f64 / self.stats.peak_usage.max(1) as f64;
294 usage_ratio > 0.9 || self.stats.fragmentation > 0.5
295 }
296
297 #[must_use]
299 pub fn recommend_action(&self) -> MemoryAction {
300 if self.is_memory_pressure_high() {
301 if self.stats.fragmentation > 0.5 {
302 MemoryAction::Compact
303 } else {
304 MemoryAction::ReduceClauseDatabase
305 }
306 } else if self.stats.hit_rate() < 0.5 {
307 MemoryAction::ExpandPools
308 } else {
309 MemoryAction::None
310 }
311 }
312
313 fn update_fragmentation(&mut self) {
315 let mut total_free = 0;
316 let mut total_capacity = 0;
317
318 for (size_class, pool) in &self.pools {
319 total_free += pool.free_blocks.len() * size_class.buffer_size();
320 total_capacity += pool.max_blocks * size_class.buffer_size();
321 }
322
323 if total_capacity > 0 {
324 let free_ratio = total_free as f64 / total_capacity as f64;
326 let usage_ratio = self.stats.current_usage as f64 / self.stats.peak_usage.max(1) as f64;
327
328 self.stats.fragmentation = (free_ratio * usage_ratio).min(1.0);
330 }
331 }
332}
333
334impl Default for MemoryOptimizer {
335 fn default() -> Self {
336 Self::new()
337 }
338}
339
340#[derive(Debug, Clone, Copy, PartialEq, Eq)]
342pub enum MemoryAction {
343 None,
345 Compact,
347 ReduceClauseDatabase,
349 ExpandPools,
351}
352
353#[cfg(test)]
354mod tests {
355 use super::*;
356
357 #[test]
358 fn test_size_class_from_size() {
359 assert_eq!(SizeClass::from_size(2), SizeClass::Binary);
360 assert_eq!(SizeClass::from_size(3), SizeClass::Ternary);
361 assert_eq!(SizeClass::from_size(5), SizeClass::Small);
362 assert_eq!(SizeClass::from_size(10), SizeClass::Medium);
363 assert_eq!(SizeClass::from_size(20), SizeClass::Large);
364 assert_eq!(SizeClass::from_size(50), SizeClass::VeryLarge);
365 }
366
367 #[test]
368 fn test_size_class_buffer_size() {
369 assert_eq!(SizeClass::Binary.buffer_size(), 16);
370 assert_eq!(SizeClass::Ternary.buffer_size(), 24);
371 assert_eq!(SizeClass::Small.buffer_size(), 64);
372 }
373
374 #[test]
375 fn test_memory_optimizer_creation() {
376 let opt = MemoryOptimizer::new();
377 assert_eq!(opt.stats().allocations, 0);
378 }
379
380 #[test]
381 fn test_allocate_and_free() {
382 let mut opt = MemoryOptimizer::new();
383
384 let buffer = opt.allocate(2);
385 assert_eq!(buffer.len(), 16); assert_eq!(opt.stats().allocations, 1);
388 assert!(opt.stats().current_usage > 0);
389
390 opt.free(buffer, 2);
391 assert_eq!(opt.stats().frees, 1);
392 }
393
394 #[test]
395 fn test_pool_hits() {
396 let mut opt = MemoryOptimizer::new();
397
398 let buffer1 = opt.allocate(2);
400 assert_eq!(opt.stats().pool_misses, 1);
401 assert_eq!(opt.stats().pool_hits, 0);
402
403 opt.free(buffer1, 2);
405
406 let _buffer2 = opt.allocate(2);
408 assert_eq!(opt.stats().pool_hits, 1);
409 assert_eq!(opt.stats().pool_misses, 1);
410 }
411
412 #[test]
413 fn test_hit_rate() {
414 let mut opt = MemoryOptimizer::new();
415
416 let buffer1 = opt.allocate(2);
417 opt.free(buffer1, 2);
418 let _buffer2 = opt.allocate(2);
419
420 let hit_rate = opt.stats().hit_rate();
421 assert!(hit_rate > 0.0);
422 assert!(hit_rate <= 1.0);
423 }
424
425 #[test]
426 fn test_peak_usage() {
427 let mut opt = MemoryOptimizer::new();
428
429 let _buffer1 = opt.allocate(2);
430 let peak1 = opt.stats().peak_usage;
431
432 let _buffer2 = opt.allocate(10);
433 let peak2 = opt.stats().peak_usage;
434
435 assert!(peak2 >= peak1);
436 }
437
438 #[test]
439 fn test_compact() {
440 let mut opt = MemoryOptimizer::new();
441
442 for _ in 0..10 {
444 let buffer = opt.allocate(2);
445 opt.free(buffer, 2);
446 }
447
448 opt.compact();
449 }
451
452 #[test]
453 fn test_clear_pools() {
454 let mut opt = MemoryOptimizer::new();
455
456 let _buffer = opt.allocate(2);
457 assert!(opt.stats().current_usage > 0);
458
459 opt.clear_pools();
460 assert_eq!(opt.stats().current_usage, 0);
461 }
462
463 #[test]
464 fn test_usage_by_size_class() {
465 let mut opt = MemoryOptimizer::new();
466
467 let _buffer1 = opt.allocate(2);
468 let _buffer2 = opt.allocate(10);
469
470 let usage = opt.usage_by_size_class();
471 assert!(usage.contains_key(&SizeClass::Binary));
472 assert!(usage.contains_key(&SizeClass::Medium));
473 }
474
475 #[test]
476 fn test_memory_action() {
477 let opt = MemoryOptimizer::new();
478 let action = opt.recommend_action();
479 assert!(matches!(
481 action,
482 MemoryAction::None | MemoryAction::ExpandPools
483 ));
484 }
485
486 #[test]
487 fn test_prefetch() {
488 let opt = MemoryOptimizer::new();
489 let value = 42;
490 opt.prefetch(&value);
491 }
493
494 #[test]
495 fn test_set_prefetch() {
496 let mut opt = MemoryOptimizer::new();
497 opt.set_prefetch(false);
498 assert!(!opt.enable_prefetch);
499
500 opt.set_prefetch(true);
501 assert!(opt.enable_prefetch);
502 }
503
504 #[test]
505 fn test_avg_allocation_size() {
506 let mut opt = MemoryOptimizer::new();
507
508 let _buffer1 = opt.allocate(2);
509 let _buffer2 = opt.allocate(10);
510
511 let avg = opt.stats().avg_allocation_size();
512 assert!(avg > 0.0);
513 }
514
515 #[test]
516 fn test_reset_stats() {
517 let mut opt = MemoryOptimizer::new();
518
519 let _buffer = opt.allocate(2);
520 assert_eq!(opt.stats().allocations, 1);
521
522 opt.reset_stats();
523 assert_eq!(opt.stats().allocations, 0);
524 }
525}