mod preload;
pub use preload::*;
#[cfg(not(feature = "std"))]
use alloc::{string::ToString, vec::Vec};
use crate::error::{ContextError, Result};
use crate::protocol::RawData;
use xxhash_rust::xxh64::xxh64;
#[cfg(feature = "std")]
type Map<K, V> = std::collections::HashMap<K, V>;
#[cfg(not(feature = "std"))]
type Map<K, V> = alloc::collections::BTreeMap<K, V>;
#[inline]
fn for_each_sorted_u32<V, F: FnMut(u32, &V)>(map: &Map<u32, V>, mut f: F) {
#[cfg(not(feature = "std"))]
{
for (k, v) in map {
f(*k, v);
}
}
#[cfg(feature = "std")]
{
let n = map.len();
if n == 0 {
return;
}
let mut last: Option<u32> = None;
let mut emitted = 0usize;
while emitted < n {
let mut next: Option<u32> = None;
for &k in map.keys() {
if let Some(l) = last {
if k <= l {
continue;
}
}
match next {
None => next = Some(k),
Some(cur) if k < cur => next = Some(k),
_ => {}
}
}
match next {
Some(k) => {
f(k, map.get(&k).unwrap());
last = Some(k);
emitted += 1;
}
None => break, }
}
}
}
fn write_source_stats_into(out: &mut [u8], w: &mut usize, sid: u32, s: &SourceStats) {
out[*w..*w + 4].copy_from_slice(&sid.to_le_bytes());
*w += 4;
out[*w..*w + 8].copy_from_slice(&s.count.to_le_bytes());
*w += 8;
out[*w..*w + 8].copy_from_slice(&s.last_value.to_le_bytes());
*w += 8;
out[*w..*w + 8].copy_from_slice(&s.ema.to_le_bytes());
*w += 8;
out[*w..*w + 8].copy_from_slice(&s.ema_alpha.to_le_bytes());
*w += 8;
out[*w..*w + 8].copy_from_slice(&s.sum_sq_diff.to_le_bytes());
*w += 8;
out[*w..*w + 8].copy_from_slice(&s.mean.to_le_bytes());
*w += 8;
out[*w..*w + 4].copy_from_slice(&(s.max_history as u32).to_le_bytes());
*w += 4;
out[*w..*w + 4].copy_from_slice(&(s.history.len() as u32).to_le_bytes());
*w += 4;
for v in &s.history {
out[*w..*w + 8].copy_from_slice(&v.to_le_bytes());
*w += 8;
}
}
fn write_pattern_into(out: &mut [u8], w: &mut usize, code: u32, p: &Pattern) {
out[*w..*w + 4].copy_from_slice(&code.to_le_bytes());
*w += 4;
let data_len = p.data.len().min(u16::MAX as usize);
out[*w..*w + 2].copy_from_slice(&(data_len as u16).to_le_bytes());
*w += 2;
out[*w..*w + data_len].copy_from_slice(&p.data[..data_len]);
*w += data_len;
out[*w..*w + 8].copy_from_slice(&p.frequency.to_le_bytes());
*w += 8;
out[*w..*w + 8].copy_from_slice(&p.last_used.to_le_bytes());
*w += 8;
out[*w..*w + 8].copy_from_slice(&p.created_at.to_le_bytes());
*w += 8;
}
pub const MAX_PATTERNS: usize = 65535;
pub const MAX_PATTERN_SIZE: usize = 255;
pub const DEFAULT_MEMORY_LIMIT: usize = 64 * 1024;
#[derive(Debug, Clone, PartialEq)]
pub struct Prediction {
pub value: f64,
pub confidence: f32,
pub model_type: PredictionModel,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum PredictionModel {
#[default]
LastValue,
MovingAverage,
LinearRegression,
Periodic,
}
#[derive(Debug, Clone)]
struct SourceStats {
last_value: f64,
ema: f64,
ema_alpha: f64,
count: u64,
sum_sq_diff: f64,
mean: f64,
history: Vec<f64>,
max_history: usize,
}
impl SourceStats {
fn new(max_history: usize, ema_alpha: f64) -> Self {
Self {
last_value: 0.0,
ema: 0.0,
ema_alpha,
count: 0,
sum_sq_diff: 0.0,
mean: 0.0,
history: Vec::with_capacity(max_history),
max_history,
}
}
fn observe(&mut self, value: f64) {
self.count += 1;
self.last_value = value;
if self.count == 1 {
self.ema = value;
} else {
self.ema = self.ema_alpha * value + (1.0 - self.ema_alpha) * self.ema;
}
let delta = value - self.mean;
self.mean += delta / self.count as f64;
let delta2 = value - self.mean;
self.sum_sq_diff += delta * delta2;
if self.history.len() >= self.max_history {
self.history.remove(0);
}
self.history.push(value);
}
fn predict(&self) -> Option<Prediction> {
if self.count == 0 {
return None;
}
let variance = if self.count > 1 {
self.sum_sq_diff / (self.count - 1) as f64
} else {
0.0
};
let confidence = if variance < 0.001 {
0.95
} else if variance < 0.01 {
0.85
} else if variance < 0.1 {
0.70
} else {
0.50
};
let (predicted_value, model_type) = if self.count < 3 {
(self.last_value, PredictionModel::LastValue)
} else {
(self.ema, PredictionModel::MovingAverage)
};
Some(Prediction {
value: predicted_value,
confidence: confidence as f32,
model_type,
})
}
fn moving_average(&self, window: usize) -> Option<f64> {
if self.history.is_empty() {
return None;
}
let window = window.min(self.history.len());
let start = self.history.len() - window;
let sum: f64 = self.history[start..].iter().sum();
Some(sum / window as f64)
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Pattern {
pub data: Vec<u8>,
pub value: Option<f64>,
pub frequency: u64,
pub last_used: u64,
pub created_at: u64,
}
impl Pattern {
pub fn new(data: Vec<u8>) -> Self {
Self {
data,
value: None,
frequency: 1,
last_used: 0,
created_at: 0,
}
}
pub fn with_timestamp(data: Vec<u8>, timestamp: u64) -> Self {
Self {
data,
value: None,
frequency: 1,
last_used: timestamp,
created_at: timestamp,
}
}
pub fn numeric(value: f64) -> Self {
Self {
data: value.to_be_bytes().to_vec(),
value: Some(value),
frequency: 1,
last_used: 0,
created_at: 0,
}
}
pub fn numeric_with_timestamp(value: f64, timestamp: u64) -> Self {
Self {
data: value.to_be_bytes().to_vec(),
value: Some(value),
frequency: 1,
last_used: timestamp,
created_at: timestamp,
}
}
pub fn touch(&mut self, timestamp: u64) {
self.frequency = self.frequency.saturating_add(1);
self.last_used = timestamp;
}
pub fn score(&self, current_time: u64) -> f64 {
let age = current_time.saturating_sub(self.last_used) as f64;
let recency = 1.0 / (1.0 + age / 1000.0);
let x = self.frequency as f64 + 1.0;
#[cfg(feature = "std")]
let freq_score = x.ln();
#[cfg(not(feature = "std"))]
let freq_score = {
let bits = (63 - (x as u64).leading_zeros()) as f64;
bits * core::f64::consts::LN_2
};
freq_score * recency
}
}
#[derive(Debug, Clone)]
pub struct EvolutionConfig {
pub min_frequency: u64,
pub max_age: u64,
pub evolution_interval: u64,
pub promotion_threshold: u64,
pub enabled: bool,
}
impl Default for EvolutionConfig {
fn default() -> Self {
Self {
min_frequency: 2,
max_age: 10000,
evolution_interval: 100,
promotion_threshold: 10,
enabled: true,
}
}
}
#[derive(Debug, Clone)]
pub struct ContextConfig {
pub max_patterns: usize,
pub max_memory: usize,
pub history_size: usize,
pub ema_alpha: f64,
pub evolution: EvolutionConfig,
}
impl Default for ContextConfig {
fn default() -> Self {
Self {
max_patterns: MAX_PATTERNS,
max_memory: DEFAULT_MEMORY_LIMIT,
history_size: 100,
ema_alpha: 0.3,
evolution: EvolutionConfig::default(),
}
}
}
#[derive(Debug, Clone)]
pub struct Context {
version: u32,
observation_count: u64,
dictionary: Map<u32, Pattern>,
pattern_index: Map<u64, u32>,
next_code: u32,
source_stats: Map<u32, SourceStats>,
config: ContextConfig,
scale_factor: u32,
}
impl Context {
pub fn new() -> Self {
Self {
version: 0,
observation_count: 0,
dictionary: Map::new(),
pattern_index: Map::new(),
next_code: 0,
source_stats: Map::new(),
config: ContextConfig::default(),
scale_factor: crate::DEFAULT_SCALE_FACTOR,
}
}
pub fn with_config(config: ContextConfig) -> Self {
Self {
version: 0,
observation_count: 0,
dictionary: Map::new(),
pattern_index: Map::new(),
next_code: 0,
source_stats: Map::new(),
config,
scale_factor: crate::DEFAULT_SCALE_FACTOR,
}
}
pub fn with_evolution(evolution_config: EvolutionConfig) -> Self {
let config = ContextConfig {
evolution: evolution_config,
..Default::default()
};
Self::with_config(config)
}
pub fn observation_count(&self) -> u64 {
self.observation_count
}
pub fn version(&self) -> u32 {
self.version
}
pub fn scale_factor(&self) -> u32 {
self.scale_factor
}
pub fn hash(&self) -> u64 {
let mut data = Vec::new();
let mut codes: Vec<_> = self.dictionary.keys().collect();
codes.sort();
for code in codes {
if let Some(pattern) = self.dictionary.get(code) {
data.extend_from_slice(&code.to_be_bytes());
data.extend_from_slice(&(pattern.data.len() as u16).to_be_bytes());
data.extend_from_slice(&pattern.data);
}
}
xxh64(&data, 0)
}
pub fn pattern_count(&self) -> usize {
self.dictionary.len()
}
pub fn source_count(&self) -> usize {
self.source_stats.len()
}
pub fn memory_usage(&self) -> usize {
let dict_size: usize = self
.dictionary
.values()
.map(|p| p.data.len() + 32) .sum();
let stats_size = self.source_stats.len() * 200; dict_size + stats_size + 256 }
pub fn estimated_memory(&self) -> usize {
self.memory_usage()
}
pub fn evolve(&mut self) {
let current_time = self.observation_count;
self.prune_patterns(current_time);
self.reorder_patterns(current_time);
self.version += 1;
}
fn prune_patterns(&mut self, current_time: u64) {
let config = &self.config.evolution;
let min_freq = config.min_frequency;
let max_age = config.max_age;
let to_remove: Vec<u32> = self
.dictionary
.iter()
.filter(|(_, pattern)| {
let age = current_time.saturating_sub(pattern.last_used);
pattern.frequency < min_freq || age > max_age
})
.map(|(code, _)| *code)
.collect();
for code in to_remove {
if let Some(pattern) = self.dictionary.remove(&code) {
let hash = xxh64(&pattern.data, 0);
self.pattern_index.remove(&hash);
}
}
}
fn reorder_patterns(&mut self, current_time: u64) {
if self.dictionary.is_empty() {
return;
}
let keys: Vec<_> = self.dictionary.keys().copied().collect();
let mut entries: Vec<_> = keys
.into_iter()
.filter_map(|k| self.dictionary.remove(&k).map(|v| (k, v)))
.collect();
entries.sort_by(|a, b| {
b.1.score(current_time)
.partial_cmp(&a.1.score(current_time))
.unwrap_or(core::cmp::Ordering::Equal)
});
self.pattern_index.clear();
self.next_code = 0;
for (_, pattern) in entries {
let new_id = self.next_code;
let hash = xxh64(&pattern.data, 0);
self.pattern_index.insert(hash, new_id);
self.dictionary.insert(new_id, pattern);
self.next_code += 1;
}
}
pub fn ensure_source_stats(&mut self, source_id: u32) {
let ema_alpha = self.config.ema_alpha;
let history_size = self.config.history_size;
self.source_stats
.entry(source_id)
.or_insert_with(|| SourceStats::new(history_size, ema_alpha));
}
pub fn observe(&mut self, data: &RawData) {
self.observation_count += 1;
let ema_alpha = self.config.ema_alpha;
let history_size = self.config.history_size;
let stats = self
.source_stats
.entry(data.source_id)
.or_insert_with(|| SourceStats::new(history_size, ema_alpha));
stats.observe(data.value);
self.version += 1;
let evolution = &self.config.evolution;
if evolution.enabled
&& evolution.evolution_interval > 0
&& self.observation_count % evolution.evolution_interval == 0
{
self.evolve();
}
}
pub fn predict(&self, source_id: u32) -> Option<Prediction> {
self.source_stats.get(&source_id)?.predict()
}
pub fn last_value(&self, source_id: u32) -> Option<f64> {
self.source_stats.get(&source_id).map(|s| s.last_value)
}
pub fn moving_average(&self, source_id: u32, window: usize) -> Option<f64> {
self.source_stats.get(&source_id)?.moving_average(window)
}
pub fn register_pattern(&mut self, pattern: Pattern) -> Result<u32> {
if self.dictionary.len() >= self.config.max_patterns {
return Err(ContextError::DictionaryFull {
max: self.config.max_patterns,
}
.into());
}
if pattern.data.len() > MAX_PATTERN_SIZE {
return Err(ContextError::PatternTooLarge {
size: pattern.data.len(),
max: MAX_PATTERN_SIZE,
}
.into());
}
let pattern_hash = xxh64(&pattern.data, 0);
if let Some(&existing_code) = self.pattern_index.get(&pattern_hash) {
if let Some(p) = self.dictionary.get_mut(&existing_code) {
p.frequency += 1;
}
return Ok(existing_code);
}
let code = self.next_code;
self.next_code += 1;
self.pattern_index.insert(pattern_hash, code);
self.dictionary.insert(code, pattern);
self.version += 1;
Ok(code)
}
pub fn get_pattern(&self, code: u32) -> Option<&Pattern> {
self.dictionary.get(&code)
}
pub fn find_pattern(&self, data: &[u8]) -> Option<u32> {
let hash = xxh64(data, 0);
self.pattern_index.get(&hash).copied()
}
pub fn remove_pattern(&mut self, id: u32) {
if let Some(pattern) = self.dictionary.remove(&id) {
let hash = xxh64(&pattern.data, 0);
self.pattern_index.remove(&hash);
}
}
pub fn set_pattern(&mut self, id: u32, pattern: Pattern) {
let hash = xxh64(&pattern.data, 0);
self.pattern_index.insert(hash, id);
self.dictionary.insert(id, pattern);
if id >= self.next_code {
self.next_code = id + 1;
}
}
pub fn has_pattern(&self, id: u32) -> bool {
self.dictionary.contains_key(&id)
}
pub fn patterns_iter(&self) -> impl Iterator<Item = (&u32, &Pattern)> {
self.dictionary.iter()
}
pub fn pattern_ids(&self) -> impl Iterator<Item = u32> + '_ {
self.dictionary.keys().copied()
}
pub fn set_version(&mut self, version: u32) {
self.version = version;
}
pub fn pattern_hashes(&self) -> impl Iterator<Item = u64> + '_ {
self.pattern_index.keys().copied()
}
pub fn export_full(&self) -> Vec<u8> {
let mut data = Vec::new();
data.extend_from_slice(&self.version.to_be_bytes());
data.extend_from_slice(&self.hash().to_be_bytes());
data.extend_from_slice(&(self.dictionary.len() as u16).to_be_bytes());
for (&code, pattern) in &self.dictionary {
data.extend_from_slice(&code.to_be_bytes());
data.push(pattern.data.len() as u8);
data.extend_from_slice(&pattern.data);
}
data
}
pub fn export_diff(&self, _from_version: u32) -> Vec<u8> {
self.export_full()
}
pub fn import_full(&mut self, data: &[u8]) -> Result<()> {
if data.len() < 14 {
return Err(ContextError::SyncFailed {
reason: "Data too short".to_string(),
}
.into());
}
let version = u32::from_be_bytes([data[0], data[1], data[2], data[3]]);
let hash = u64::from_be_bytes([
data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11],
]);
let count = u16::from_be_bytes([data[12], data[13]]) as usize;
self.dictionary.clear();
self.pattern_index.clear();
self.next_code = 0;
let mut offset = 14;
for _ in 0..count {
if offset + 5 > data.len() {
return Err(ContextError::SyncFailed {
reason: "Truncated data".to_string(),
}
.into());
}
let code = u32::from_be_bytes([
data[offset],
data[offset + 1],
data[offset + 2],
data[offset + 3],
]);
let len = data[offset + 4] as usize;
offset += 5;
if offset + len > data.len() {
return Err(ContextError::SyncFailed {
reason: "Truncated pattern data".to_string(),
}
.into());
}
let pattern_data = data[offset..offset + len].to_vec();
offset += len;
let pattern_hash = xxh64(&pattern_data, 0);
self.dictionary.insert(code, Pattern::new(pattern_data));
self.pattern_index.insert(pattern_hash, code);
if code >= self.next_code {
self.next_code = code + 1;
}
}
self.version = version;
let computed_hash = self.hash();
if computed_hash != hash {
return Err(ContextError::HashMismatch {
expected: hash,
actual: computed_hash,
}
.into());
}
Ok(())
}
pub fn reset(&mut self) {
self.dictionary.clear();
self.pattern_index.clear();
self.source_stats.clear();
self.next_code = 0;
self.version = 0;
self.observation_count = 0;
}
pub fn reset_to_baseline(&mut self) {
self.source_stats.clear();
}
pub fn verify(&self, expected_hash: u64) -> bool {
self.hash() == expected_hash
}
pub fn model_type(&self) -> PredictionModel {
if self.source_stats.is_empty() {
PredictionModel::LastValue
} else {
PredictionModel::LastValue
}
}
#[cfg(feature = "std")]
pub fn save_to_file(&self, path: &std::path::Path, sensor_type: &str) -> Result<()> {
let preload = PreloadFile::from_context(self, sensor_type);
preload.save_to_file(path)
}
#[cfg(feature = "std")]
pub fn load_from_file(path: &std::path::Path) -> Result<Self> {
let preload = PreloadFile::load_from_file(path)?;
Self::from_preload(&preload)
}
#[cfg(feature = "std")]
fn from_preload(preload: &PreloadFile) -> Result<Self> {
let mut ctx = Self::new();
ctx.version = preload.context_version;
for entry in &preload.dictionary {
let pattern = Pattern {
data: entry.pattern.clone(),
value: None,
frequency: entry.frequency as u64,
last_used: 0,
created_at: 0,
};
let code = entry.code as u32;
let hash = xxh64(&pattern.data, 0);
ctx.pattern_index.insert(hash, code);
ctx.dictionary.insert(code, pattern);
if code >= ctx.next_code {
ctx.next_code = code + 1;
}
}
Ok(ctx)
}
pub fn context_version(&self) -> u32 {
self.version
}
pub fn check_version(&self, message_version: u32) -> VersionCheckResult {
if self.version == message_version {
VersionCheckResult::Match
} else {
VersionCheckResult::Mismatch {
expected: self.version,
actual: message_version,
}
}
}
pub fn to_preload_bytes(&self, sensor_type: &str) -> Result<Vec<u8>> {
let needed = self.preload_bytes_len(sensor_type)?;
#[cfg(feature = "std")]
let mut out: Vec<u8> = vec![0u8; needed];
#[cfg(not(feature = "std"))]
let mut out: Vec<u8> = alloc::vec![0u8; needed];
let written = self.write_preload_bytes(sensor_type, &mut out)?;
debug_assert_eq!(written, needed);
out.truncate(written);
Ok(out)
}
pub fn preload_bytes_len(&self, sensor_type: &str) -> Result<usize> {
let sens_len = sensor_type.len();
if sens_len > 255 {
return Err(crate::error::ContextError::PatternTooLarge {
size: sens_len,
max: 255,
}
.into());
}
let mut total = 4 + 4 + 4 + 4 + 8 + 4 + 1 + sens_len + 4;
for s in self.source_stats.values() {
total += 60 + s.history.len() * 8;
}
total += 4; for p in self.dictionary.values() {
let data_len = p.data.len().min(u16::MAX as usize);
total += 30 + data_len;
}
total += 4; Ok(total)
}
pub fn write_preload_bytes(&self, sensor_type: &str, out: &mut [u8]) -> Result<usize> {
let sens_bytes = sensor_type.as_bytes();
if sens_bytes.len() > 255 {
return Err(crate::error::ContextError::PatternTooLarge {
size: sens_bytes.len(),
max: 255,
}
.into());
}
let needed = self.preload_bytes_len(sensor_type)?;
if out.len() < needed {
return Err(crate::error::EncodeError::BufferTooSmall {
needed,
available: out.len(),
}
.into());
}
let mut w = 0usize;
out[w..w + 4].copy_from_slice(ALCS_MAGIC);
w += 4;
out[w..w + 4].copy_from_slice(&ALCS_FORMAT_VERSION.to_le_bytes());
w += 4;
out[w..w + 4].copy_from_slice(&self.version.to_le_bytes());
w += 4;
out[w..w + 4].copy_from_slice(&self.scale_factor.to_le_bytes());
w += 4;
out[w..w + 8].copy_from_slice(&self.observation_count.to_le_bytes());
w += 8;
out[w..w + 4].copy_from_slice(&self.next_code.to_le_bytes());
w += 4;
out[w] = sens_bytes.len() as u8;
w += 1;
out[w..w + sens_bytes.len()].copy_from_slice(sens_bytes);
w += sens_bytes.len();
out[w..w + 4].copy_from_slice(&(self.source_stats.len() as u32).to_le_bytes());
w += 4;
for_each_sorted_u32(&self.source_stats, |sid, s| {
write_source_stats_into(out, &mut w, sid, s);
});
out[w..w + 4].copy_from_slice(&(self.dictionary.len() as u32).to_le_bytes());
w += 4;
for_each_sorted_u32(&self.dictionary, |code, p| {
write_pattern_into(out, &mut w, code, p);
});
use crc::{Crc, CRC_32_ISO_HDLC};
const CRC32: Crc<u32> = Crc::<u32>::new(&CRC_32_ISO_HDLC);
let crc = CRC32.checksum(&out[..w]);
out[w..w + 4].copy_from_slice(&crc.to_le_bytes());
w += 4;
debug_assert_eq!(w, needed);
Ok(w)
}
pub fn validate_preload_header(data: &[u8]) -> bool {
if data.len() < 41 {
return false;
}
if &data[..4] != ALCS_MAGIC {
return false;
}
let format_version = u32::from_le_bytes([data[4], data[5], data[6], data[7]]);
if format_version != ALCS_FORMAT_VERSION {
return false;
}
let crc_offset = data.len() - 4;
let stored_crc = u32::from_le_bytes([
data[crc_offset],
data[crc_offset + 1],
data[crc_offset + 2],
data[crc_offset + 3],
]);
use crc::{Crc, CRC_32_ISO_HDLC};
const CRC32: Crc<u32> = Crc::<u32>::new(&CRC_32_ISO_HDLC);
let computed_crc = CRC32.checksum(&data[..crc_offset]);
stored_crc == computed_crc
}
pub fn from_preload_bytes(data: &[u8]) -> Result<Self> {
use crc::{Crc, CRC_32_ISO_HDLC};
const CRC32: Crc<u32> = Crc::<u32>::new(&CRC_32_ISO_HDLC);
if data.len() < 41 {
return Err(crate::error::DecodeError::BufferTooShort {
needed: 41,
available: data.len(),
}
.into());
}
if &data[..4] != ALCS_MAGIC {
return Err(crate::error::DecodeError::InvalidHeader.into());
}
let crc_offset = data.len() - 4;
let stored_crc = u32::from_le_bytes(data[crc_offset..].try_into().unwrap());
let computed_crc = CRC32.checksum(&data[..crc_offset]);
if stored_crc != computed_crc {
return Err(crate::error::DecodeError::InvalidChecksum {
expected: stored_crc,
actual: computed_crc,
}
.into());
}
let format_version = u32::from_le_bytes(data[4..8].try_into().unwrap());
if format_version != ALCS_FORMAT_VERSION {
return Err(crate::error::DecodeError::MalformedMessage {
offset: 4,
reason: {
#[cfg(feature = "std")]
{
format!(
"unsupported ALCS format version {} (expected {})",
format_version, ALCS_FORMAT_VERSION
)
}
#[cfg(not(feature = "std"))]
{
"unsupported ALCS format version".to_string()
}
},
}
.into());
}
let version = u32::from_le_bytes(data[8..12].try_into().unwrap());
let scale_factor = u32::from_le_bytes(data[12..16].try_into().unwrap());
let observation_count = u64::from_le_bytes(data[16..24].try_into().unwrap());
let next_code = u32::from_le_bytes(data[24..28].try_into().unwrap());
let sens_len = data[28] as usize;
let mut offset: usize = 29;
if offset + sens_len > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + sens_len + 4,
available: data.len(),
}
.into());
}
offset += sens_len;
if offset + 4 > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + 4,
available: data.len(),
}
.into());
}
let src_count = u32::from_le_bytes(data[offset..offset + 4].try_into().unwrap()) as usize;
offset += 4;
let mut source_stats: Map<u32, SourceStats> = Map::new();
for _ in 0..src_count {
if offset + 56 > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + 56,
available: data.len(),
}
.into());
}
let source_id = u32::from_le_bytes(data[offset..offset + 4].try_into().unwrap());
let count = u64::from_le_bytes(data[offset + 4..offset + 12].try_into().unwrap());
let last_value = f64::from_le_bytes(data[offset + 12..offset + 20].try_into().unwrap());
let ema = f64::from_le_bytes(data[offset + 20..offset + 28].try_into().unwrap());
let ema_alpha = f64::from_le_bytes(data[offset + 28..offset + 36].try_into().unwrap());
let sum_sq_diff =
f64::from_le_bytes(data[offset + 36..offset + 44].try_into().unwrap());
let mean = f64::from_le_bytes(data[offset + 44..offset + 52].try_into().unwrap());
let max_history =
u32::from_le_bytes(data[offset + 52..offset + 56].try_into().unwrap()) as usize;
offset += 56;
if offset + 4 > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + 4,
available: data.len(),
}
.into());
}
let hist_len =
u32::from_le_bytes(data[offset..offset + 4].try_into().unwrap()) as usize;
offset += 4;
let hist_bytes = hist_len.saturating_mul(8);
if offset + hist_bytes > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + hist_bytes,
available: data.len(),
}
.into());
}
let mut history: Vec<f64> = Vec::with_capacity(hist_len);
for i in 0..hist_len {
let hv = f64::from_le_bytes(
data[offset + i * 8..offset + i * 8 + 8].try_into().unwrap(),
);
history.push(hv);
}
offset += hist_bytes;
source_stats.insert(
source_id,
SourceStats {
last_value,
ema,
ema_alpha,
count,
sum_sq_diff,
mean,
history,
max_history,
},
);
}
if offset + 4 > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + 4,
available: data.len(),
}
.into());
}
let dict_count = u32::from_le_bytes(data[offset..offset + 4].try_into().unwrap()) as usize;
offset += 4;
let mut dictionary: Map<u32, Pattern> = Map::new();
let mut pattern_index: Map<u64, u32> = Map::new();
for _ in 0..dict_count {
if offset + 6 > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + 6,
available: data.len(),
}
.into());
}
let code = u32::from_le_bytes(data[offset..offset + 4].try_into().unwrap());
let data_len =
u16::from_le_bytes(data[offset + 4..offset + 6].try_into().unwrap()) as usize;
offset += 6;
if offset + data_len + 24 > crc_offset {
return Err(crate::error::DecodeError::BufferTooShort {
needed: offset + data_len + 24,
available: data.len(),
}
.into());
}
let pattern_bytes = data[offset..offset + data_len].to_vec();
offset += data_len;
let frequency = u64::from_le_bytes(data[offset..offset + 8].try_into().unwrap());
let last_used = u64::from_le_bytes(data[offset + 8..offset + 16].try_into().unwrap());
let created_at = u64::from_le_bytes(data[offset + 16..offset + 24].try_into().unwrap());
offset += 24;
let hash = xxh64(&pattern_bytes, 0);
pattern_index.insert(hash, code);
dictionary.insert(
code,
Pattern {
data: pattern_bytes,
value: None,
frequency,
last_used,
created_at,
},
);
}
if offset != crc_offset {
return Err(crate::error::DecodeError::MalformedMessage {
offset,
reason: {
#[cfg(feature = "std")]
{
format!(
"trailing {} byte(s) between content and CRC",
crc_offset - offset
)
}
#[cfg(not(feature = "std"))]
{
"trailing bytes between content and CRC".to_string()
}
},
}
.into());
}
Ok(Self {
version,
observation_count,
dictionary,
pattern_index,
next_code,
source_stats,
config: ContextConfig::default(),
scale_factor,
})
}
}
pub const ALCS_MAGIC: &[u8; 4] = b"ALCS";
pub const ALCS_FORMAT_VERSION: u32 = 1;
impl Default for Context {
fn default() -> Self {
Self::new()
}
}
#[cfg(feature = "std")]
impl crate::health::HealthCheckable for Context {
fn health_check(&self) -> crate::health::HealthCheck {
use crate::health::{HealthCheck, HealthStatus};
use std::time::Instant;
let start = Instant::now();
let memory = self.estimated_memory();
let pattern_count = self.pattern_count();
let status = if memory > 100_000_000 {
HealthStatus::Unhealthy
} else if memory > 10_000_000 || pattern_count > 50_000 {
HealthStatus::Degraded
} else {
HealthStatus::Healthy
};
HealthCheck {
component: "Context".to_string(),
status,
last_check: Instant::now(),
message: format!(
"Memory: {} bytes, Patterns: {}, Sources: {}",
memory,
pattern_count,
self.source_count()
),
latency: start.elapsed(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_context_new() {
let ctx = Context::new();
assert_eq!(ctx.version(), 0);
assert_eq!(ctx.pattern_count(), 0);
}
#[test]
fn test_reset_to_baseline_wipes_predictions_preserves_patterns() {
let mut ctx = Context::new();
for _ in 0..5 {
ctx.observe(&RawData::with_source(1, 22.5, 0));
ctx.observe(&RawData::with_source(2, 1013.25, 0));
}
assert!(ctx.predict(1).is_some());
assert!(ctx.predict(2).is_some());
assert_eq!(ctx.last_value(1), Some(22.5));
assert_eq!(ctx.last_value(2), Some(1013.25));
let code = ctx
.register_pattern(Pattern::new(vec![0xDE, 0xAD, 0xBE, 0xEF]))
.unwrap();
let pre_pattern_count = ctx.pattern_count();
let pre_version = ctx.version();
assert!(pre_pattern_count >= 1);
ctx.reset_to_baseline();
assert!(ctx.predict(1).is_none());
assert!(ctx.predict(2).is_none());
assert_eq!(ctx.last_value(1), None);
assert_eq!(ctx.last_value(2), None);
assert_eq!(ctx.pattern_count(), pre_pattern_count);
assert!(ctx.get_pattern(code).is_some());
assert_eq!(ctx.version(), pre_version);
ctx.observe(&RawData::with_source(1, 99.0, 0));
assert_eq!(ctx.last_value(1), Some(99.0));
}
#[test]
fn test_observe_and_predict() {
let mut ctx = Context::new();
assert!(ctx.predict(0).is_none());
for i in 0..10 {
ctx.observe(&RawData::new(20.0 + i as f64 * 0.1, i as u64));
}
let pred = ctx.predict(0).unwrap();
assert!(pred.value > 20.0 && pred.value < 21.0);
assert!(pred.confidence > 0.0);
}
#[test]
fn test_register_pattern() {
let mut ctx = Context::new();
let pattern = Pattern::new(vec![1, 2, 3, 4]);
let code = ctx.register_pattern(pattern.clone()).unwrap();
assert_eq!(ctx.pattern_count(), 1);
assert!(ctx.get_pattern(code).is_some());
let code2 = ctx.register_pattern(pattern).unwrap();
assert_eq!(code, code2);
assert_eq!(ctx.pattern_count(), 1); }
#[test]
fn test_context_hash() {
let mut ctx1 = Context::new();
let mut ctx2 = Context::new();
ctx1.register_pattern(Pattern::new(vec![1, 2, 3])).unwrap();
ctx2.register_pattern(Pattern::new(vec![1, 2, 3])).unwrap();
assert_eq!(ctx1.hash(), ctx2.hash());
ctx1.register_pattern(Pattern::new(vec![4, 5, 6])).unwrap();
assert_ne!(ctx1.hash(), ctx2.hash());
}
#[test]
fn test_export_import() {
let mut ctx1 = Context::new();
ctx1.register_pattern(Pattern::new(vec![1, 2, 3])).unwrap();
ctx1.register_pattern(Pattern::new(vec![4, 5, 6])).unwrap();
let exported = ctx1.export_full();
let mut ctx2 = Context::new();
ctx2.import_full(&exported).unwrap();
assert_eq!(ctx1.hash(), ctx2.hash());
assert_eq!(ctx1.pattern_count(), ctx2.pattern_count());
}
#[test]
fn test_context_reset() {
let mut ctx = Context::new();
ctx.observe(&RawData::new(42.0, 0));
ctx.register_pattern(Pattern::new(vec![1, 2, 3])).unwrap();
assert!(ctx.version() > 0);
assert!(ctx.pattern_count() > 0);
ctx.reset();
assert_eq!(ctx.version(), 0);
assert_eq!(ctx.pattern_count(), 0);
assert!(ctx.predict(0).is_none());
}
#[test]
fn test_last_value() {
let mut ctx = Context::new();
assert!(ctx.last_value(0).is_none());
ctx.observe(&RawData::new(42.5, 0));
assert_eq!(ctx.last_value(0), Some(42.5));
ctx.observe(&RawData::new(43.0, 1));
assert_eq!(ctx.last_value(0), Some(43.0));
}
#[test]
fn test_pattern_pruning() {
let config = ContextConfig {
evolution: EvolutionConfig {
min_frequency: 3,
max_age: 50,
evolution_interval: 10,
promotion_threshold: 5,
enabled: false, },
..ContextConfig::default()
};
let mut ctx = Context::with_config(config);
let mut pattern = Pattern::new(vec![42]);
pattern.frequency = 1;
pattern.last_used = 0;
ctx.register_pattern(pattern).unwrap();
assert_eq!(ctx.pattern_count(), 1);
ctx.observation_count = 100;
ctx.evolve();
assert_eq!(ctx.pattern_count(), 0);
}
#[test]
fn test_pattern_kept_if_frequent() {
let config = ContextConfig {
evolution: EvolutionConfig {
min_frequency: 2,
max_age: 1000,
evolution_interval: 10,
promotion_threshold: 5,
enabled: false,
},
..ContextConfig::default()
};
let mut ctx = Context::with_config(config);
let mut pattern = Pattern::new(vec![42]);
pattern.frequency = 10;
pattern.last_used = 50;
ctx.register_pattern(pattern).unwrap();
if let Some(p) = ctx.dictionary.get_mut(&0) {
p.frequency = 10;
p.last_used = 50;
}
ctx.observation_count = 100;
ctx.evolve();
assert_eq!(ctx.pattern_count(), 1);
}
#[test]
fn test_pattern_reordering() {
let mut config = ContextConfig::default();
config.evolution.enabled = false;
let mut ctx = Context::with_config(config);
ctx.register_pattern(Pattern::with_timestamp(vec![1], 0))
.unwrap();
ctx.register_pattern(Pattern::with_timestamp(vec![2], 0))
.unwrap();
if let Some(p) = ctx.dictionary.get_mut(&1) {
p.frequency = 100;
p.last_used = 10;
}
if let Some(p) = ctx.dictionary.get_mut(&0) {
p.frequency = 1;
p.last_used = 0;
}
ctx.observation_count = 10;
ctx.evolve();
let pattern_0 = ctx.get_pattern(0).unwrap();
assert_eq!(pattern_0.frequency, 100);
}
#[test]
fn test_ema_prediction() {
let mut ctx = Context::new();
for i in 0..20 {
ctx.observe(&RawData::new(20.0 + i as f64, i as u64));
}
let prediction = ctx.predict(0).unwrap();
assert!(prediction.value > 30.0 && prediction.value < 40.0);
assert_eq!(prediction.model_type, PredictionModel::MovingAverage);
}
#[test]
fn test_evolution_triggered_automatically() {
let config = ContextConfig {
evolution: EvolutionConfig {
min_frequency: 1,
max_age: 10000,
evolution_interval: 5, promotion_threshold: 5,
enabled: true,
},
..ContextConfig::default()
};
let mut ctx = Context::with_config(config);
ctx.register_pattern(Pattern::new(vec![1, 2, 3])).unwrap();
let initial_version = ctx.version();
for i in 0..5 {
ctx.observe(&RawData::new(20.0, i as u64));
}
assert!(ctx.version() > initial_version);
assert_eq!(ctx.observation_count(), 5);
}
#[test]
fn test_pattern_score() {
let mut pattern = Pattern::new(vec![1, 2, 3]);
pattern.frequency = 100;
pattern.last_used = 900;
let score_recent = pattern.score(1000);
let score_old = pattern.score(2000);
assert!(score_recent > score_old);
}
fn trained_context() -> Context {
let mut ctx = Context::new();
for i in 0..30 {
ctx.observe(&RawData::with_source(1, 22.5 + (i as f64) * 0.01, 0));
}
for _ in 0..15 {
ctx.observe(&RawData::with_source(2, 1013.25, 0));
}
let _ = ctx
.register_pattern(Pattern::new(vec![0xBE, 0xEF]))
.unwrap();
ctx
}
#[test]
fn test_to_preload_bytes_from_preload_bytes_roundtrip() {
let ctx = trained_context();
let bytes = ctx.to_preload_bytes("em500-co2").expect("serialize");
assert!(
bytes.len() < 3072,
"serialized context should be well under 3 KB, got {}",
bytes.len()
);
let restored = Context::from_preload_bytes(&bytes).expect("deserialize");
assert_eq!(restored.version(), ctx.version());
assert_eq!(restored.scale_factor(), ctx.scale_factor());
assert_eq!(restored.observation_count(), ctx.observation_count());
assert_eq!(restored.source_count(), ctx.source_count());
assert_eq!(restored.pattern_count(), ctx.pattern_count());
for sid in [1u32, 2] {
let a = ctx.source_stats.get(&sid).unwrap();
let b = restored.source_stats.get(&sid).unwrap();
assert_eq!(a.count, b.count, "sid {} count", sid);
assert!(a.last_value.to_bits() == b.last_value.to_bits());
assert!(a.ema.to_bits() == b.ema.to_bits());
assert!(a.ema_alpha.to_bits() == b.ema_alpha.to_bits());
assert!(a.sum_sq_diff.to_bits() == b.sum_sq_diff.to_bits());
assert!(a.mean.to_bits() == b.mean.to_bits());
assert_eq!(a.max_history, b.max_history);
assert_eq!(a.history.len(), b.history.len());
for (x, y) in a.history.iter().zip(b.history.iter()) {
assert_eq!(x.to_bits(), y.to_bits(), "sid {} history", sid);
}
}
}
#[test]
fn test_from_preload_bytes_rejects_bad_magic() {
let mut bytes = trained_context().to_preload_bytes("x").unwrap();
bytes[0] = b'X';
let r = Context::from_preload_bytes(&bytes);
assert!(r.is_err());
}
#[test]
fn test_from_preload_bytes_rejects_bad_crc() {
let mut bytes = trained_context().to_preload_bytes("x").unwrap();
let mid = bytes.len() / 2;
bytes[mid] ^= 0xFF;
let r = Context::from_preload_bytes(&bytes);
match r {
Err(crate::error::AlecError::Decode(crate::error::DecodeError::InvalidChecksum {
..
})) => {}
other => panic!("expected InvalidChecksum, got {:?}", other),
}
}
#[test]
fn test_to_preload_bytes_rejects_oversize_sensor_type() {
let ctx = Context::new();
let long: String = "a".repeat(300);
let r = ctx.to_preload_bytes(&long);
assert!(r.is_err());
}
}