use super::functions::*;
use crate::expr_util::lift_loose_bvars;
use std::collections::HashMap;
#[allow(dead_code)]
pub struct PathBuf {
components: Vec<String>,
}
#[allow(dead_code)]
impl PathBuf {
pub fn new() -> Self {
Self {
components: Vec::new(),
}
}
pub fn push(&mut self, comp: impl Into<String>) {
self.components.push(comp.into());
}
pub fn pop(&mut self) {
self.components.pop();
}
pub fn as_str(&self) -> String {
self.components.join("/")
}
pub fn depth(&self) -> usize {
self.components.len()
}
pub fn clear(&mut self) {
self.components.clear();
}
}
#[allow(dead_code)]
pub struct RawFnPtr {
ptr: usize,
arity: usize,
name: String,
}
#[allow(dead_code)]
impl RawFnPtr {
pub fn new(ptr: usize, arity: usize, name: impl Into<String>) -> Self {
Self {
ptr,
arity,
name: name.into(),
}
}
pub fn arity(&self) -> usize {
self.arity
}
pub fn name(&self) -> &str {
&self.name
}
pub fn raw(&self) -> usize {
self.ptr
}
}
#[allow(dead_code)]
pub struct TransformStat {
before: StatSummary,
after: StatSummary,
}
#[allow(dead_code)]
impl TransformStat {
pub fn new() -> Self {
Self {
before: StatSummary::new(),
after: StatSummary::new(),
}
}
pub fn record_before(&mut self, v: f64) {
self.before.record(v);
}
pub fn record_after(&mut self, v: f64) {
self.after.record(v);
}
pub fn mean_ratio(&self) -> Option<f64> {
let b = self.before.mean()?;
let a = self.after.mean()?;
if b.abs() < f64::EPSILON {
return None;
}
Some(a / b)
}
}
#[allow(dead_code)]
pub struct TokenBucket {
capacity: u64,
tokens: u64,
refill_per_ms: u64,
last_refill: std::time::Instant,
}
#[allow(dead_code)]
impl TokenBucket {
pub fn new(capacity: u64, refill_per_ms: u64) -> Self {
Self {
capacity,
tokens: capacity,
refill_per_ms,
last_refill: std::time::Instant::now(),
}
}
pub fn try_consume(&mut self, n: u64) -> bool {
self.refill();
if self.tokens >= n {
self.tokens -= n;
true
} else {
false
}
}
fn refill(&mut self) {
let now = std::time::Instant::now();
let elapsed_ms = now.duration_since(self.last_refill).as_millis() as u64;
if elapsed_ms > 0 {
let new_tokens = elapsed_ms * self.refill_per_ms;
self.tokens = (self.tokens + new_tokens).min(self.capacity);
self.last_refill = now;
}
}
pub fn available(&self) -> u64 {
self.tokens
}
pub fn capacity(&self) -> u64 {
self.capacity
}
}
#[allow(dead_code)]
pub struct SlidingSum {
window: Vec<f64>,
capacity: usize,
pos: usize,
sum: f64,
count: usize,
}
#[allow(dead_code)]
impl SlidingSum {
pub fn new(capacity: usize) -> Self {
Self {
window: vec![0.0; capacity],
capacity,
pos: 0,
sum: 0.0,
count: 0,
}
}
pub fn push(&mut self, val: f64) {
let oldest = self.window[self.pos];
self.sum -= oldest;
self.sum += val;
self.window[self.pos] = val;
self.pos = (self.pos + 1) % self.capacity;
if self.count < self.capacity {
self.count += 1;
}
}
pub fn sum(&self) -> f64 {
self.sum
}
pub fn mean(&self) -> Option<f64> {
if self.count == 0 {
None
} else {
Some(self.sum / self.count as f64)
}
}
pub fn count(&self) -> usize {
self.count
}
}
#[allow(dead_code)]
pub struct ConfigNode {
key: String,
value: Option<String>,
children: Vec<ConfigNode>,
}
#[allow(dead_code)]
impl ConfigNode {
pub fn leaf(key: impl Into<String>, value: impl Into<String>) -> Self {
Self {
key: key.into(),
value: Some(value.into()),
children: Vec::new(),
}
}
pub fn section(key: impl Into<String>) -> Self {
Self {
key: key.into(),
value: None,
children: Vec::new(),
}
}
pub fn add_child(&mut self, child: ConfigNode) {
self.children.push(child);
}
pub fn key(&self) -> &str {
&self.key
}
pub fn value(&self) -> Option<&str> {
self.value.as_deref()
}
pub fn num_children(&self) -> usize {
self.children.len()
}
pub fn lookup(&self, path: &str) -> Option<&str> {
let mut parts = path.splitn(2, '.');
let head = parts.next()?;
let tail = parts.next();
if head != self.key {
return None;
}
match tail {
None => self.value.as_deref(),
Some(rest) => self.children.iter().find_map(|c| c.lookup_relative(rest)),
}
}
fn lookup_relative(&self, path: &str) -> Option<&str> {
let mut parts = path.splitn(2, '.');
let head = parts.next()?;
let tail = parts.next();
if head != self.key {
return None;
}
match tail {
None => self.value.as_deref(),
Some(rest) => self.children.iter().find_map(|c| c.lookup_relative(rest)),
}
}
}
#[allow(dead_code)]
pub struct SimpleDag {
edges: Vec<Vec<usize>>,
}
#[allow(dead_code)]
impl SimpleDag {
pub fn new(n: usize) -> Self {
Self {
edges: vec![Vec::new(); n],
}
}
pub fn add_edge(&mut self, from: usize, to: usize) {
if from < self.edges.len() {
self.edges[from].push(to);
}
}
pub fn successors(&self, node: usize) -> &[usize] {
self.edges.get(node).map(|v| v.as_slice()).unwrap_or(&[])
}
pub fn can_reach(&self, from: usize, to: usize) -> bool {
let mut visited = vec![false; self.edges.len()];
self.dfs(from, to, &mut visited)
}
fn dfs(&self, cur: usize, target: usize, visited: &mut Vec<bool>) -> bool {
if cur == target {
return true;
}
if cur >= visited.len() || visited[cur] {
return false;
}
visited[cur] = true;
for &next in self.successors(cur) {
if self.dfs(next, target, visited) {
return true;
}
}
false
}
pub fn topological_sort(&self) -> Option<Vec<usize>> {
let n = self.edges.len();
let mut in_degree = vec![0usize; n];
for succs in &self.edges {
for &s in succs {
if s < n {
in_degree[s] += 1;
}
}
}
let mut queue: std::collections::VecDeque<usize> =
(0..n).filter(|&i| in_degree[i] == 0).collect();
let mut order = Vec::new();
while let Some(node) = queue.pop_front() {
order.push(node);
for &s in self.successors(node) {
if s < n {
in_degree[s] -= 1;
if in_degree[s] == 0 {
queue.push_back(s);
}
}
}
}
if order.len() == n {
Some(order)
} else {
None
}
}
pub fn num_nodes(&self) -> usize {
self.edges.len()
}
}
#[allow(dead_code)]
pub enum Either2<A, B> {
First(A),
Second(B),
}
#[allow(dead_code)]
impl<A, B> Either2<A, B> {
pub fn is_first(&self) -> bool {
matches!(self, Either2::First(_))
}
pub fn is_second(&self) -> bool {
matches!(self, Either2::Second(_))
}
pub fn first(self) -> Option<A> {
match self {
Either2::First(a) => Some(a),
_ => None,
}
}
pub fn second(self) -> Option<B> {
match self {
Either2::Second(b) => Some(b),
_ => None,
}
}
pub fn map_first<C, F: FnOnce(A) -> C>(self, f: F) -> Either2<C, B> {
match self {
Either2::First(a) => Either2::First(f(a)),
Either2::Second(b) => Either2::Second(b),
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub enum DecisionNode {
Leaf(String),
Branch {
key: String,
val: String,
yes_branch: Box<DecisionNode>,
no_branch: Box<DecisionNode>,
},
}
#[allow(dead_code)]
impl DecisionNode {
pub fn evaluate(&self, ctx: &std::collections::HashMap<String, String>) -> &str {
match self {
DecisionNode::Leaf(action) => action.as_str(),
DecisionNode::Branch {
key,
val,
yes_branch,
no_branch,
} => {
let actual = ctx.get(key).map(|s| s.as_str()).unwrap_or("");
if actual == val.as_str() {
yes_branch.evaluate(ctx)
} else {
no_branch.evaluate(ctx)
}
}
}
}
pub fn depth(&self) -> usize {
match self {
DecisionNode::Leaf(_) => 0,
DecisionNode::Branch {
yes_branch,
no_branch,
..
} => 1 + yes_branch.depth().max(no_branch.depth()),
}
}
}
#[allow(dead_code)]
pub struct StackCalc {
stack: Vec<i64>,
}
#[allow(dead_code)]
impl StackCalc {
pub fn new() -> Self {
Self { stack: Vec::new() }
}
pub fn push(&mut self, n: i64) {
self.stack.push(n);
}
pub fn add(&mut self) {
let b = self
.stack
.pop()
.expect("stack must have at least two values for add");
let a = self
.stack
.pop()
.expect("stack must have at least two values for add");
self.stack.push(a + b);
}
pub fn sub(&mut self) {
let b = self
.stack
.pop()
.expect("stack must have at least two values for sub");
let a = self
.stack
.pop()
.expect("stack must have at least two values for sub");
self.stack.push(a - b);
}
pub fn mul(&mut self) {
let b = self
.stack
.pop()
.expect("stack must have at least two values for mul");
let a = self
.stack
.pop()
.expect("stack must have at least two values for mul");
self.stack.push(a * b);
}
pub fn peek(&self) -> Option<i64> {
self.stack.last().copied()
}
pub fn depth(&self) -> usize {
self.stack.len()
}
}
#[allow(dead_code)]
pub struct Stopwatch {
start: std::time::Instant,
splits: Vec<f64>,
}
#[allow(dead_code)]
impl Stopwatch {
pub fn start() -> Self {
Self {
start: std::time::Instant::now(),
splits: Vec::new(),
}
}
pub fn split(&mut self) {
self.splits.push(self.elapsed_ms());
}
pub fn elapsed_ms(&self) -> f64 {
self.start.elapsed().as_secs_f64() * 1000.0
}
pub fn splits(&self) -> &[f64] {
&self.splits
}
pub fn num_splits(&self) -> usize {
self.splits.len()
}
}
pub struct CacheManager {
pub(crate) whnf: WhnfCache,
pub(crate) defeq: DefEqCache,
pub(crate) infer: InferCache,
}
impl CacheManager {
pub fn new() -> Self {
CacheManager {
whnf: WhnfCache::new(1024, false),
defeq: DefEqCache::new(512),
infer: InferCache::new(256),
}
}
pub fn with_capacities(whnf_cap: usize, defeq_cap: usize, infer_cap: usize) -> Self {
CacheManager {
whnf: WhnfCache::new(whnf_cap, false),
defeq: DefEqCache::new(defeq_cap),
infer: InferCache::new(infer_cap),
}
}
pub fn whnf_mut(&mut self) -> &mut WhnfCache {
&mut self.whnf
}
pub fn defeq_mut(&mut self) -> &mut DefEqCache {
&mut self.defeq
}
pub fn infer_mut(&mut self) -> &mut InferCache {
&mut self.infer
}
pub fn clear_all(&mut self) {
self.whnf.clear();
self.defeq.clear();
self.infer.clear();
}
pub fn resize_all(&mut self, whnf_cap: usize, defeq_cap: usize, infer_cap: usize) {
self.whnf = WhnfCache::new(whnf_cap, self.whnf.is_transparent());
self.defeq = DefEqCache::new(defeq_cap);
self.infer = InferCache::new(infer_cap);
}
pub fn statistics(&self) -> CacheStatistics {
let (whnf_hits, whnf_misses) = self.whnf.stats();
let (defeq_hits, defeq_misses) = self.defeq.stats();
let (infer_hits, infer_misses) = self.infer.stats();
CacheStatistics {
whnf_hits,
whnf_misses,
whnf_hit_rate: self.whnf.hit_rate(),
defeq_hits,
defeq_misses,
defeq_hit_rate: self.defeq.hit_rate(),
infer_hits,
infer_misses,
infer_hit_rate: self.infer.hit_rate(),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SimplifiedExpr {
Var(String),
App(Box<SimplifiedExpr>, Box<SimplifiedExpr>),
Lambda(String, Box<SimplifiedExpr>),
Pi(String, Box<SimplifiedExpr>, Box<SimplifiedExpr>),
}
impl SimplifiedExpr {
pub fn hash(&self) -> u64 {
match self {
SimplifiedExpr::Var(name) => {
let mut bytes = vec![0u8];
bytes.extend_from_slice(name.as_bytes());
fnv1a_hash(&bytes)
}
SimplifiedExpr::App(f, arg) => {
let f_hash = f.hash();
let arg_hash = arg.hash();
let mut bytes = vec![1u8];
bytes.extend_from_slice(&f_hash.to_le_bytes());
bytes.extend_from_slice(&arg_hash.to_le_bytes());
fnv1a_hash(&bytes)
}
SimplifiedExpr::Lambda(name, body) => {
let body_hash = body.hash();
let mut bytes = vec![2u8];
bytes.extend_from_slice(name.as_bytes());
bytes.extend_from_slice(&body_hash.to_le_bytes());
fnv1a_hash(&bytes)
}
SimplifiedExpr::Pi(name, typ, body) => {
let typ_hash = typ.hash();
let body_hash = body.hash();
let mut bytes = vec![3u8];
bytes.extend_from_slice(name.as_bytes());
bytes.extend_from_slice(&typ_hash.to_le_bytes());
bytes.extend_from_slice(&body_hash.to_le_bytes());
fnv1a_hash(&bytes)
}
}
}
}
#[allow(dead_code)]
pub struct WindowIterator<'a, T> {
pub(super) data: &'a [T],
pub(super) pos: usize,
pub(super) window: usize,
}
#[allow(dead_code)]
impl<'a, T> WindowIterator<'a, T> {
pub fn new(data: &'a [T], window: usize) -> Self {
Self {
data,
pos: 0,
window,
}
}
}
#[allow(dead_code)]
pub struct RewriteRuleSet {
rules: Vec<RewriteRule>,
}
#[allow(dead_code)]
impl RewriteRuleSet {
pub fn new() -> Self {
Self { rules: Vec::new() }
}
pub fn add(&mut self, rule: RewriteRule) {
self.rules.push(rule);
}
pub fn len(&self) -> usize {
self.rules.len()
}
pub fn is_empty(&self) -> bool {
self.rules.is_empty()
}
pub fn conditional_rules(&self) -> Vec<&RewriteRule> {
self.rules.iter().filter(|r| r.conditional).collect()
}
pub fn unconditional_rules(&self) -> Vec<&RewriteRule> {
self.rules.iter().filter(|r| !r.conditional).collect()
}
pub fn get(&self, name: &str) -> Option<&RewriteRule> {
self.rules.iter().find(|r| r.name == name)
}
}
pub struct LruCache<K: Clone + Eq + std::hash::Hash, V: Clone> {
capacity: usize,
map: HashMap<K, usize>,
nodes: Vec<Node<K, V>>,
head: Option<usize>,
tail: Option<usize>,
hits: u64,
misses: u64,
}
impl<K: Clone + Eq + std::hash::Hash, V: Clone> LruCache<K, V> {
pub fn new(capacity: usize) -> Self {
assert!(capacity > 0, "LRU cache capacity must be > 0");
LruCache {
capacity,
map: HashMap::new(),
nodes: Vec::new(),
head: None,
tail: None,
hits: 0,
misses: 0,
}
}
pub fn get(&mut self, key: &K) -> Option<V> {
if let Some(&idx) = self.map.get(key) {
self.hits += 1;
self.move_to_head(idx);
Some(self.nodes[idx].value.clone())
} else {
self.misses += 1;
None
}
}
pub fn insert(&mut self, key: K, value: V) {
if let Some(&idx) = self.map.get(&key) {
self.nodes[idx].value = value;
self.move_to_head(idx);
} else {
if self.nodes.len() >= self.capacity {
self.evict_lru();
}
let new_idx = self.nodes.len();
let node = Node {
key: key.clone(),
value,
prev: None,
next: self.head,
};
self.nodes.push(node);
self.map.insert(key, new_idx);
if let Some(old_head) = self.head {
self.nodes[old_head].prev = Some(new_idx);
}
self.head = Some(new_idx);
if self.tail.is_none() {
self.tail = Some(new_idx);
}
}
}
pub fn remove(&mut self, key: &K) -> Option<V> {
if let Some(&idx) = self.map.get(key) {
let node = &self.nodes[idx];
let prev = node.prev;
let next = node.next;
if let Some(p) = prev {
self.nodes[p].next = next;
} else {
self.tail = next;
}
if let Some(n) = next {
self.nodes[n].prev = prev;
} else {
self.head = prev;
}
self.map.remove(key);
Some(self.nodes[idx].value.clone())
} else {
None
}
}
pub fn contains_key(&self, key: &K) -> bool {
self.map.contains_key(key)
}
pub fn len(&self) -> usize {
self.map.len()
}
pub fn is_empty(&self) -> bool {
self.map.is_empty()
}
pub fn capacity(&self) -> usize {
self.capacity
}
pub fn clear(&mut self) {
self.map.clear();
self.nodes.clear();
self.head = None;
self.tail = None;
self.hits = 0;
self.misses = 0;
}
pub fn stats(&self) -> (u64, u64) {
(self.hits, self.misses)
}
pub fn hit_rate(&self) -> f64 {
let total = self.hits + self.misses;
if total == 0 {
0.0
} else {
(self.hits as f64 / total as f64) * 100.0
}
}
fn move_to_head(&mut self, idx: usize) {
if self.head == Some(idx) {
return;
}
let prev = self.nodes[idx].prev;
let next = self.nodes[idx].next;
if let Some(p) = prev {
self.nodes[p].next = next;
}
if let Some(n) = next {
self.nodes[n].prev = prev;
} else {
self.tail = prev;
}
self.nodes[idx].prev = None;
self.nodes[idx].next = self.head;
if let Some(old_head) = self.head {
self.nodes[old_head].prev = Some(idx);
}
self.head = Some(idx);
}
fn evict_lru(&mut self) {
if let Some(tail_idx) = self.tail {
let key = self.nodes[tail_idx].key.clone();
let prev = self.nodes[tail_idx].prev;
if let Some(p) = prev {
self.nodes[p].next = None;
self.head = Some(p);
} else {
self.head = None;
}
self.tail = prev;
self.map.remove(&key);
self.nodes.remove(tail_idx);
self.nodes.iter().enumerate().for_each(|(i, node)| {
*self
.map
.get_mut(&node.key)
.expect("node key must exist in map") = i;
});
}
}
}
pub struct BloomFilterApprox {
bits: Vec<bool>,
size: usize,
}
impl BloomFilterApprox {
pub fn new(size: usize) -> Self {
Self {
bits: vec![false; size],
size,
}
}
fn hash1(data: &[u8], size: usize) -> usize {
fnv1a_hash(data) as usize % size
}
fn hash2(data: &[u8], size: usize) -> usize {
let h = fnv1a_hash(data).wrapping_mul(0x9e3779b9_7f4a7c15);
h as usize % size
}
pub fn insert<T: AsRef<[u8]>>(&mut self, key: T) {
let bytes = key.as_ref();
let h1 = Self::hash1(bytes, self.size);
let h2 = Self::hash2(bytes, self.size);
self.bits[h1] = true;
self.bits[h2] = true;
}
pub fn might_contain<T: AsRef<[u8]>>(&self, key: T) -> bool {
let bytes = key.as_ref();
let h1 = Self::hash1(bytes, self.size);
let h2 = Self::hash2(bytes, self.size);
self.bits[h1] && self.bits[h2]
}
pub fn clear(&mut self) {
self.bits.iter_mut().for_each(|b| *b = false);
}
pub fn set_bit_count(&self) -> usize {
self.bits.iter().filter(|&&b| b).count()
}
pub fn size(&self) -> usize {
self.size
}
}
pub struct MultiLevelCache<K: Clone + Eq + std::hash::Hash, V: Clone> {
l1: LruCache<K, V>,
l2: LruCache<K, V>,
l1_hits: u64,
l2_hits: u64,
misses: u64,
}
impl<K: Clone + Eq + std::hash::Hash, V: Clone> MultiLevelCache<K, V> {
pub fn new(l1_cap: usize, l2_cap: usize) -> Self {
Self {
l1: LruCache::new(l1_cap),
l2: LruCache::new(l2_cap),
l1_hits: 0,
l2_hits: 0,
misses: 0,
}
}
pub fn get(&mut self, key: &K) -> Option<V> {
if let Some(v) = self.l1.get(key) {
self.l1_hits += 1;
return Some(v);
}
if let Some(v) = self.l2.get(key) {
self.l2_hits += 1;
self.l1.insert(key.clone(), v.clone());
return Some(v);
}
self.misses += 1;
None
}
pub fn insert(&mut self, key: K, value: V) {
self.l1.insert(key.clone(), value.clone());
self.l2.insert(key, value);
}
pub fn insert_l2_only(&mut self, key: K, value: V) {
self.l2.insert(key, value);
}
pub fn clear_l1(&mut self) {
self.l1.clear();
}
pub fn clear_all(&mut self) {
self.l1.clear();
self.l2.clear();
self.l1_hits = 0;
self.l2_hits = 0;
self.misses = 0;
}
pub fn l1_hits(&self) -> u64 {
self.l1_hits
}
pub fn l2_hits(&self) -> u64 {
self.l2_hits
}
pub fn misses(&self) -> u64 {
self.misses
}
pub fn total_requests(&self) -> u64 {
self.l1_hits + self.l2_hits + self.misses
}
pub fn hit_rate(&self) -> f64 {
let total = self.total_requests();
if total == 0 {
0.0
} else {
((self.l1_hits + self.l2_hits) as f64 / total as f64) * 100.0
}
}
}
pub struct DefEqCache {
pub(crate) cache: LruCache<(u64, u64), bool>,
}
impl DefEqCache {
pub fn new(capacity: usize) -> Self {
DefEqCache {
cache: LruCache::new(capacity),
}
}
pub fn check_cache(&mut self, expr1: &SimplifiedExpr, expr2: &SimplifiedExpr) -> Option<bool> {
let hash1 = expr1.hash();
let hash2 = expr2.hash();
if let Some(result) = self.cache.get(&(hash1, hash2)) {
return Some(result);
}
if let Some(result) = self.cache.get(&(hash2, hash1)) {
return Some(result);
}
None
}
pub fn store_result(&mut self, expr1: &SimplifiedExpr, expr2: &SimplifiedExpr, result: bool) {
let hash1 = expr1.hash();
let hash2 = expr2.hash();
self.cache.insert((hash1, hash2), result);
self.cache.insert((hash2, hash1), result);
}
pub fn clear(&mut self) {
self.cache.clear();
}
pub fn stats(&self) -> (u64, u64) {
self.cache.stats()
}
pub fn hit_rate(&self) -> f64 {
self.cache.hit_rate()
}
}
#[allow(dead_code)]
pub struct FocusStack<T> {
items: Vec<T>,
}
#[allow(dead_code)]
impl<T> FocusStack<T> {
pub fn new() -> Self {
Self { items: Vec::new() }
}
pub fn focus(&mut self, item: T) {
self.items.push(item);
}
pub fn blur(&mut self) -> Option<T> {
self.items.pop()
}
pub fn current(&self) -> Option<&T> {
self.items.last()
}
pub fn depth(&self) -> usize {
self.items.len()
}
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
}
#[allow(dead_code)]
pub struct StatSummary {
count: u64,
sum: f64,
min: f64,
max: f64,
}
#[allow(dead_code)]
impl StatSummary {
pub fn new() -> Self {
Self {
count: 0,
sum: 0.0,
min: f64::INFINITY,
max: f64::NEG_INFINITY,
}
}
pub fn record(&mut self, val: f64) {
self.count += 1;
self.sum += val;
if val < self.min {
self.min = val;
}
if val > self.max {
self.max = val;
}
}
pub fn mean(&self) -> Option<f64> {
if self.count == 0 {
None
} else {
Some(self.sum / self.count as f64)
}
}
pub fn min(&self) -> Option<f64> {
if self.count == 0 {
None
} else {
Some(self.min)
}
}
pub fn max(&self) -> Option<f64> {
if self.count == 0 {
None
} else {
Some(self.max)
}
}
pub fn count(&self) -> u64 {
self.count
}
}
#[derive(Clone, Debug)]
struct TtlEntry<V> {
value: V,
expires_at: u64,
}
#[allow(dead_code)]
pub struct SparseVec<T: Default + Clone + PartialEq> {
entries: std::collections::HashMap<usize, T>,
default_: T,
logical_len: usize,
}
#[allow(dead_code)]
impl<T: Default + Clone + PartialEq> SparseVec<T> {
pub fn new(len: usize) -> Self {
Self {
entries: std::collections::HashMap::new(),
default_: T::default(),
logical_len: len,
}
}
pub fn set(&mut self, idx: usize, val: T) {
if val == self.default_ {
self.entries.remove(&idx);
} else {
self.entries.insert(idx, val);
}
}
pub fn get(&self, idx: usize) -> &T {
self.entries.get(&idx).unwrap_or(&self.default_)
}
pub fn len(&self) -> usize {
self.logical_len
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn nnz(&self) -> usize {
self.entries.len()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct RewriteRule {
pub name: String,
pub lhs: String,
pub rhs: String,
pub conditional: bool,
}
#[allow(dead_code)]
impl RewriteRule {
pub fn unconditional(
name: impl Into<String>,
lhs: impl Into<String>,
rhs: impl Into<String>,
) -> Self {
Self {
name: name.into(),
lhs: lhs.into(),
rhs: rhs.into(),
conditional: false,
}
}
pub fn conditional(
name: impl Into<String>,
lhs: impl Into<String>,
rhs: impl Into<String>,
) -> Self {
Self {
name: name.into(),
lhs: lhs.into(),
rhs: rhs.into(),
conditional: true,
}
}
pub fn display(&self) -> String {
format!("{}: {} → {}", self.name, self.lhs, self.rhs)
}
}
#[derive(Clone, Debug)]
pub struct CacheStatistics {
pub whnf_hits: u64,
pub whnf_misses: u64,
pub whnf_hit_rate: f64,
pub defeq_hits: u64,
pub defeq_misses: u64,
pub defeq_hit_rate: f64,
pub infer_hits: u64,
pub infer_misses: u64,
pub infer_hit_rate: f64,
}
impl CacheStatistics {
pub fn total_hits(&self) -> u64 {
self.whnf_hits + self.defeq_hits + self.infer_hits
}
pub fn total_misses(&self) -> u64 {
self.whnf_misses + self.defeq_misses + self.infer_misses
}
pub fn overall_hit_rate(&self) -> f64 {
let total = self.total_hits() + self.total_misses();
if total == 0 {
0.0
} else {
(self.total_hits() as f64 / total as f64) * 100.0
}
}
}
#[allow(dead_code)]
pub struct FlatSubstitution {
pairs: Vec<(String, String)>,
}
#[allow(dead_code)]
impl FlatSubstitution {
pub fn new() -> Self {
Self { pairs: Vec::new() }
}
pub fn add(&mut self, from: impl Into<String>, to: impl Into<String>) {
self.pairs.push((from.into(), to.into()));
}
pub fn apply(&self, s: &str) -> String {
let mut result = s.to_string();
for (from, to) in &self.pairs {
result = result.replace(from.as_str(), to.as_str());
}
result
}
pub fn len(&self) -> usize {
self.pairs.len()
}
pub fn is_empty(&self) -> bool {
self.pairs.is_empty()
}
}
#[allow(dead_code)]
pub struct NonEmptyVec<T> {
head: T,
tail: Vec<T>,
}
#[allow(dead_code)]
impl<T> NonEmptyVec<T> {
pub fn singleton(val: T) -> Self {
Self {
head: val,
tail: Vec::new(),
}
}
pub fn push(&mut self, val: T) {
self.tail.push(val);
}
pub fn first(&self) -> &T {
&self.head
}
pub fn last(&self) -> &T {
self.tail.last().unwrap_or(&self.head)
}
pub fn len(&self) -> usize {
1 + self.tail.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn to_vec(&self) -> Vec<&T> {
let mut v = vec![&self.head];
v.extend(self.tail.iter());
v
}
}
pub struct InferCache {
pub(crate) cache: LruCache<u64, SimplifiedExpr>,
}
impl InferCache {
pub fn new(capacity: usize) -> Self {
InferCache {
cache: LruCache::new(capacity),
}
}
pub fn lookup(&mut self, expr: &SimplifiedExpr) -> Option<SimplifiedExpr> {
let hash = expr.hash();
self.cache.get(&hash)
}
pub fn store(&mut self, expr: &SimplifiedExpr, inferred_type: SimplifiedExpr) {
let hash = expr.hash();
self.cache.insert(hash, inferred_type);
}
pub fn clear(&mut self) {
self.cache.clear();
}
pub fn stats(&self) -> (u64, u64) {
self.cache.stats()
}
pub fn hit_rate(&self) -> f64 {
self.cache.hit_rate()
}
pub fn len(&self) -> usize {
self.cache.len()
}
pub fn is_empty(&self) -> bool {
self.cache.is_empty()
}
}
#[allow(dead_code)]
pub struct StringPool {
free: Vec<String>,
}
#[allow(dead_code)]
impl StringPool {
pub fn new() -> Self {
Self { free: Vec::new() }
}
pub fn take(&mut self) -> String {
self.free.pop().unwrap_or_default()
}
pub fn give(&mut self, mut s: String) {
s.clear();
self.free.push(s);
}
pub fn free_count(&self) -> usize {
self.free.len()
}
}
pub struct WhnfCache {
pub(crate) cache: LruCache<u64, SimplifiedExpr>,
transparency_mode: bool,
}
impl WhnfCache {
pub fn new(capacity: usize, transparency_mode: bool) -> Self {
WhnfCache {
cache: LruCache::new(capacity),
transparency_mode,
}
}
pub fn lookup(&mut self, expr: &SimplifiedExpr) -> Option<SimplifiedExpr> {
if !self.transparency_mode {
let hash = expr.hash();
self.cache.get(&hash)
} else {
None
}
}
pub fn store(&mut self, expr: &SimplifiedExpr, whnf: SimplifiedExpr) {
if !self.transparency_mode {
let hash = expr.hash();
self.cache.insert(hash, whnf);
}
}
pub fn clear(&mut self) {
self.cache.clear();
}
pub fn stats(&self) -> (u64, u64) {
self.cache.stats()
}
pub fn hit_rate(&self) -> f64 {
self.cache.hit_rate()
}
pub fn set_transparency(&mut self, mode: bool) {
self.transparency_mode = mode;
if mode {
self.cache.clear();
}
}
pub fn is_transparent(&self) -> bool {
self.transparency_mode
}
}
#[allow(dead_code)]
pub struct VersionedRecord<T: Clone> {
history: Vec<T>,
}
#[allow(dead_code)]
impl<T: Clone> VersionedRecord<T> {
pub fn new(initial: T) -> Self {
Self {
history: vec![initial],
}
}
pub fn update(&mut self, val: T) {
self.history.push(val);
}
pub fn current(&self) -> &T {
self.history
.last()
.expect("VersionedRecord history is always non-empty after construction")
}
pub fn at_version(&self, n: usize) -> Option<&T> {
self.history.get(n)
}
pub fn version(&self) -> usize {
self.history.len() - 1
}
pub fn has_history(&self) -> bool {
self.history.len() > 1
}
}
pub struct TtlCache<K: Clone + Eq + std::hash::Hash, V: Clone> {
entries: HashMap<K, TtlEntry<V>>,
step: u64,
default_ttl: u64,
}
impl<K: Clone + Eq + std::hash::Hash, V: Clone> TtlCache<K, V> {
pub fn new(default_ttl: u64) -> Self {
Self {
entries: HashMap::new(),
step: 0,
default_ttl,
}
}
pub fn tick(&mut self) {
self.step += 1;
}
pub fn tick_n(&mut self, n: u64) {
self.step += n;
}
pub fn insert(&mut self, key: K, value: V) {
self.entries.insert(
key,
TtlEntry {
value,
expires_at: self.step + self.default_ttl,
},
);
}
pub fn insert_with_ttl(&mut self, key: K, value: V, ttl: u64) {
self.entries.insert(
key,
TtlEntry {
value,
expires_at: self.step + ttl,
},
);
}
pub fn get(&self, key: &K) -> Option<V> {
self.entries.get(key).and_then(|e| {
if self.step < e.expires_at {
Some(e.value.clone())
} else {
None
}
})
}
pub fn purge_expired(&mut self) {
let step = self.step;
self.entries.retain(|_, e| step < e.expires_at);
}
pub fn len(&self) -> usize {
self.entries.len()
}
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
pub fn current_step(&self) -> u64 {
self.step
}
pub fn clear(&mut self) {
self.entries.clear();
}
}
#[allow(dead_code)]
pub struct WriteOnce<T> {
value: std::cell::Cell<Option<T>>,
}
#[allow(dead_code)]
impl<T: Copy> WriteOnce<T> {
pub fn new() -> Self {
Self {
value: std::cell::Cell::new(None),
}
}
pub fn write(&self, val: T) -> bool {
if self.value.get().is_some() {
return false;
}
self.value.set(Some(val));
true
}
pub fn read(&self) -> Option<T> {
self.value.get()
}
pub fn is_written(&self) -> bool {
self.value.get().is_some()
}
}
#[derive(Clone, Debug)]
struct Node<K, V> {
key: K,
value: V,
prev: Option<usize>,
next: Option<usize>,
}
#[allow(dead_code)]
pub struct LabelSet {
labels: Vec<String>,
}
#[allow(dead_code)]
impl LabelSet {
pub fn new() -> Self {
Self { labels: Vec::new() }
}
pub fn add(&mut self, label: impl Into<String>) {
let s = label.into();
if !self.labels.contains(&s) {
self.labels.push(s);
}
}
pub fn has(&self, label: &str) -> bool {
self.labels.iter().any(|l| l == label)
}
pub fn count(&self) -> usize {
self.labels.len()
}
pub fn all(&self) -> &[String] {
&self.labels
}
}
#[allow(dead_code)]
pub struct TransitiveClosure {
adj: Vec<Vec<usize>>,
n: usize,
}
#[allow(dead_code)]
impl TransitiveClosure {
pub fn new(n: usize) -> Self {
Self {
adj: vec![Vec::new(); n],
n,
}
}
pub fn add_edge(&mut self, from: usize, to: usize) {
if from < self.n {
self.adj[from].push(to);
}
}
pub fn reachable_from(&self, start: usize) -> Vec<usize> {
let mut visited = vec![false; self.n];
let mut queue = std::collections::VecDeque::new();
queue.push_back(start);
while let Some(node) = queue.pop_front() {
if node >= self.n || visited[node] {
continue;
}
visited[node] = true;
for &next in &self.adj[node] {
queue.push_back(next);
}
}
(0..self.n).filter(|&i| visited[i]).collect()
}
pub fn can_reach(&self, from: usize, to: usize) -> bool {
self.reachable_from(from).contains(&to)
}
}
#[allow(dead_code)]
pub struct SmallMap<K: Ord + Clone, V: Clone> {
entries: Vec<(K, V)>,
}
#[allow(dead_code)]
impl<K: Ord + Clone, V: Clone> SmallMap<K, V> {
pub fn new() -> Self {
Self {
entries: Vec::new(),
}
}
pub fn insert(&mut self, key: K, val: V) {
match self.entries.binary_search_by_key(&&key, |(k, _)| k) {
Ok(i) => self.entries[i].1 = val,
Err(i) => self.entries.insert(i, (key, val)),
}
}
pub fn get(&self, key: &K) -> Option<&V> {
self.entries
.binary_search_by_key(&key, |(k, _)| k)
.ok()
.map(|i| &self.entries[i].1)
}
pub fn len(&self) -> usize {
self.entries.len()
}
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
pub fn keys(&self) -> Vec<&K> {
self.entries.iter().map(|(k, _)| k).collect()
}
pub fn values(&self) -> Vec<&V> {
self.entries.iter().map(|(_, v)| v).collect()
}
}