use anyhow::Result;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs::{File, OpenOptions};
use std::io::{Read, Seek, SeekFrom, Write};
use std::path::Path;
const TAIL_BUFFER_SIZE: i64 = 8192; const TOKEN_TABLE_MARKER: &[u8] = b"TOKENS:"; const BLOCK_END_MARKER: &[u8] = b"\x00BLK\x00";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BackwardsConsciousness {
pub current_tokens: HashMap<u8, String>, pub recent_memories: Vec<MemoryBlock>, pub importance_graph: Vec<ImportanceLink>, pub session_frequency: f64, }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MemoryBlock {
pub timestamp: DateTime<Utc>,
pub content: Vec<u8>, pub importance: f32, pub backlinks: Vec<BackLink>, pub token_discoveries: Vec<(String, u8)>, }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BackLink {
pub offset: u64, pub importance: f32, pub context: String, }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImportanceLink {
pub from_offset: u64,
pub to_offset: u64,
pub weight: f32,
pub reason: String,
}
pub struct M8BackwardsReader {
path: PathBuf,
current_tokens: HashMap<u8, String>,
token_frequency: HashMap<String, u32>,
next_token_id: u8,
user_keywords: Vec<String>, importance_boost: HashMap<String, f32>, }
impl M8BackwardsReader {
pub fn new(path: impl AsRef<Path>) -> Self {
Self {
path: path.as_ref().to_path_buf(),
current_tokens: Self::init_base_tokens(),
token_frequency: HashMap::new(),
next_token_id: 0xA0, user_keywords: Vec::new(),
importance_boost: HashMap::new(),
}
}
pub fn set_user_context(&mut self, keywords: Vec<String>) {
self.user_keywords = keywords.clone();
for keyword in keywords {
self.importance_boost.insert(keyword.clone(), 0.3);
if keyword.to_lowercase().contains("audio") {
self.importance_boost.insert("sound".to_string(), 0.2);
self.importance_boost.insert("processing".to_string(), 0.2);
self.importance_boost.insert("voice".to_string(), 0.2);
}
if keyword.to_lowercase().contains("memory") {
self.importance_boost
.insert("consciousness".to_string(), 0.2);
self.importance_boost.insert("m8".to_string(), 0.2);
}
}
}
fn init_base_tokens() -> HashMap<u8, String> {
let mut tokens = HashMap::new();
tokens.insert(0x80, "node_modules".to_string());
tokens.insert(0x81, ".git".to_string());
tokens.insert(0x82, "target".to_string());
tokens.insert(0x83, "src".to_string());
tokens.insert(0x84, "Audio".to_string()); tokens.insert(0x85, "claude".to_string());
tokens.insert(0x86, "2024".to_string());
tokens.insert(0x87, "/aidata/ayeverse/smart-tree".to_string());
tokens
}
pub fn read_backwards(&mut self) -> Result<BackwardsConsciousness> {
let mut file = File::open(&self.path)?;
let file_size = file.metadata()?.len() as i64;
let read_start = (file_size - TAIL_BUFFER_SIZE).max(0);
file.seek(SeekFrom::Start(read_start as u64))?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
let mut consciousness = BackwardsConsciousness {
current_tokens: HashMap::new(),
recent_memories: Vec::new(),
importance_graph: Vec::new(),
session_frequency: 42.73, };
if let Some(token_pos) = Self::find_marker_reverse(&buffer, TOKEN_TABLE_MARKER) {
consciousness.current_tokens = self.parse_token_table(&buffer[token_pos..])?;
self.current_tokens = consciousness.current_tokens.clone();
}
let mut pos = buffer.len();
while pos > 0 {
if let Some(block_start) = Self::find_marker_reverse(&buffer[..pos], BLOCK_END_MARKER) {
let block = self.parse_memory_block(&buffer[block_start..pos])?;
consciousness.recent_memories.push(block);
pos = block_start;
if consciousness.recent_memories.len() >= 10 {
break;
}
} else {
break;
}
}
Ok(consciousness)
}
pub fn follow_backlinks(
&self,
consciousness: &BackwardsConsciousness,
) -> Result<Vec<MemoryBlock>> {
let mut important_memories = Vec::new();
let mut file = File::open(&self.path)?;
for memory in &consciousness.recent_memories {
for backlink in &memory.backlinks {
if backlink.importance > 0.7 {
file.seek(SeekFrom::Start(backlink.offset))?;
let mut block_buffer = vec![0u8; 4096];
file.read_exact(&mut block_buffer)?;
if let Ok(block) = self.parse_memory_block(&block_buffer) {
important_memories.push(block);
}
}
}
}
Ok(important_memories)
}
pub fn append_memory(&mut self, content: &str, base_importance: f32) -> Result<()> {
self.evolve_tokens(content);
let mut importance = base_importance;
for keyword in &self.user_keywords {
if content.to_lowercase().contains(&keyword.to_lowercase()) {
importance += self.importance_boost.get(keyword).unwrap_or(&0.2);
importance = importance.min(1.0); }
}
let compressed = self.compress_with_tokens(content)?;
let backlinks = self.find_backlinks(content)?;
let block = MemoryBlock {
timestamp: Utc::now(),
content: compressed,
importance,
backlinks,
token_discoveries: self.get_new_tokens(),
};
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open(&self.path)?;
self.write_memory_block(&mut file, &block)?;
self.write_token_table(&mut file)?;
Ok(())
}
fn evolve_tokens(&mut self, content: &str) {
for word in content.split_whitespace() {
*self.token_frequency.entry(word.to_string()).or_default() += 1;
if self.token_frequency[word] > 5 && !self.is_tokenized(word) {
self.current_tokens
.insert(self.next_token_id, word.to_string());
self.next_token_id += 1;
}
}
}
fn is_tokenized(&self, word: &str) -> bool {
self.current_tokens.values().any(|v| v == word)
}
fn compress_with_tokens(&self, content: &str) -> Result<Vec<u8>> {
let mut compressed = Vec::new();
let mut remaining = content.to_string();
for (token, word) in &self.current_tokens {
remaining = remaining.replace(word, &format!("\x00{}\x00", token));
}
compressed.extend_from_slice(remaining.as_bytes());
Ok(compressed)
}
fn find_backlinks(&self, _content: &str) -> Result<Vec<BackLink>> {
Ok(Vec::new())
}
fn get_new_tokens(&self) -> Vec<(String, u8)> {
Vec::new() }
fn parse_token_table(&self, _buffer: &[u8]) -> Result<HashMap<u8, String>> {
Ok(self.current_tokens.clone())
}
fn parse_memory_block(&self, _buffer: &[u8]) -> Result<MemoryBlock> {
Ok(MemoryBlock {
timestamp: Utc::now(),
content: Vec::new(),
importance: 0.5,
backlinks: Vec::new(),
token_discoveries: Vec::new(),
})
}
fn write_memory_block(&self, file: &mut File, block: &MemoryBlock) -> Result<()> {
let data = bincode::serialize(block)?;
file.write_all(&data)?;
file.write_all(BLOCK_END_MARKER)?;
Ok(())
}
fn write_token_table(&self, file: &mut File) -> Result<()> {
file.write_all(TOKEN_TABLE_MARKER)?;
let data = bincode::serialize(&self.current_tokens)?;
file.write_all(&data)?;
Ok(())
}
fn find_marker_reverse(buffer: &[u8], marker: &[u8]) -> Option<usize> {
(0..buffer.len().saturating_sub(marker.len()))
.rev()
.find(|&i| &buffer[i..i + marker.len()] == marker)
}
}
use std::path::PathBuf;
pub fn demo_backwards_consciousness() -> Result<()> {
println!("🎵 C64 Tape-Style Consciousness Reading Demo\n");
println!("{}\n", "=".repeat(60));
let path = Path::new("/tmp/test_consciousness.m8");
let mut reader = M8BackwardsReader::new(path);
println!("📼 Writing memories (append-only)...");
reader.append_memory("Working on Audio processing pipeline", 0.8)?;
reader.append_memory("Claude helped with tokenization system", 0.9)?;
reader.append_memory("Implemented backwards reading - like C64!", 1.0)?;
println!("\n⏪ Reading consciousness BACKWARDS...");
let consciousness = reader.read_backwards()?;
println!("\n📍 Most recent memories (read from END):");
for (i, memory) in consciousness.recent_memories.iter().enumerate() {
println!(
" {}. [{:.1}] {:?}",
i + 1,
memory.importance,
memory.timestamp
);
}
println!("\n🎯 Current session tokens:");
for (token, word) in &consciousness.current_tokens {
println!(" 0x{:02X} = \"{}\"", token, word);
}
println!("\n✨ The magic: We never lost context!");
println!(" Recent stuff loaded first, important stuff follows backlinks!");
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_backwards_reading() {
let temp_dir = TempDir::new().unwrap();
let path = temp_dir.path().join("test.m8");
let mut reader = M8BackwardsReader::new(&path);
reader.append_memory("First memory", 0.5).unwrap();
reader.append_memory("Second memory", 0.7).unwrap();
reader.append_memory("Most recent memory", 0.9).unwrap();
let consciousness = reader.read_backwards().unwrap();
assert!(!consciousness.recent_memories.is_empty());
assert!(!consciousness.current_tokens.is_empty());
}
#[test]
fn test_token_evolution() {
let mut reader = M8BackwardsReader::new("/tmp/test.m8");
let content = "Audio Audio Audio Audio Audio Audio processing";
reader.evolve_tokens(content);
assert!(reader.is_tokenized("Audio"));
}
}