1use anyhow::Result;
6use chrono::{DateTime, Utc};
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9use std::fs::{File, OpenOptions};
10use std::io::{Read, Seek, SeekFrom, Write};
11use std::path::Path;
12
13const TAIL_BUFFER_SIZE: i64 = 8192; const TOKEN_TABLE_MARKER: &[u8] = b"TOKENS:"; const BLOCK_END_MARKER: &[u8] = b"\x00BLK\x00"; #[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct BackwardsConsciousness {
20 pub current_tokens: HashMap<u8, String>, pub recent_memories: Vec<MemoryBlock>, pub importance_graph: Vec<ImportanceLink>, pub session_frequency: f64, }
25
26#[derive(Debug, Clone, Serialize, Deserialize)]
27pub struct MemoryBlock {
28 pub timestamp: DateTime<Utc>,
29 pub content: Vec<u8>, pub importance: f32, pub backlinks: Vec<BackLink>, pub token_discoveries: Vec<(String, u8)>, }
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
36pub struct BackLink {
37 pub offset: u64, pub importance: f32, pub context: String, }
41
42#[derive(Debug, Clone, Serialize, Deserialize)]
43pub struct ImportanceLink {
44 pub from_offset: u64,
45 pub to_offset: u64,
46 pub weight: f32,
47 pub reason: String,
48}
49
50pub struct M8BackwardsReader {
51 path: PathBuf,
52 current_tokens: HashMap<u8, String>,
53 token_frequency: HashMap<String, u32>,
54 next_token_id: u8,
55 user_keywords: Vec<String>, importance_boost: HashMap<String, f32>, }
58
59impl M8BackwardsReader {
60 pub fn new(path: impl AsRef<Path>) -> Self {
61 Self {
62 path: path.as_ref().to_path_buf(),
63 current_tokens: Self::init_base_tokens(),
64 token_frequency: HashMap::new(),
65 next_token_id: 0xA0, user_keywords: Vec::new(),
67 importance_boost: HashMap::new(),
68 }
69 }
70
71 pub fn set_user_context(&mut self, keywords: Vec<String>) {
73 self.user_keywords = keywords.clone();
74
75 for keyword in keywords {
77 self.importance_boost.insert(keyword.clone(), 0.3); if keyword.to_lowercase().contains("audio") {
81 self.importance_boost.insert("sound".to_string(), 0.2);
82 self.importance_boost.insert("processing".to_string(), 0.2);
83 self.importance_boost.insert("voice".to_string(), 0.2);
84 }
85 if keyword.to_lowercase().contains("memory") {
86 self.importance_boost
87 .insert("consciousness".to_string(), 0.2);
88 self.importance_boost.insert("m8".to_string(), 0.2);
89 }
90 }
91 }
92
93 fn init_base_tokens() -> HashMap<u8, String> {
95 let mut tokens = HashMap::new();
96 tokens.insert(0x80, "node_modules".to_string());
98 tokens.insert(0x81, ".git".to_string());
99 tokens.insert(0x82, "target".to_string());
100 tokens.insert(0x83, "src".to_string());
101 tokens.insert(0x84, "Audio".to_string()); tokens.insert(0x85, "claude".to_string());
103 tokens.insert(0x86, "2024".to_string());
104 tokens.insert(0x87, "/aidata/ayeverse/smart-tree".to_string());
105 tokens
106 }
107
108 pub fn read_backwards(&mut self) -> Result<BackwardsConsciousness> {
110 let mut file = File::open(&self.path)?;
111 let file_size = file.metadata()?.len() as i64;
112
113 let read_start = (file_size - TAIL_BUFFER_SIZE).max(0);
115 file.seek(SeekFrom::Start(read_start as u64))?;
116
117 let mut buffer = Vec::new();
118 file.read_to_end(&mut buffer)?;
119
120 let mut consciousness = BackwardsConsciousness {
122 current_tokens: HashMap::new(),
123 recent_memories: Vec::new(),
124 importance_graph: Vec::new(),
125 session_frequency: 42.73, };
127
128 if let Some(token_pos) = Self::find_marker_reverse(&buffer, TOKEN_TABLE_MARKER) {
130 consciousness.current_tokens = self.parse_token_table(&buffer[token_pos..])?;
131 self.current_tokens = consciousness.current_tokens.clone();
132 }
133
134 let mut pos = buffer.len();
136 while pos > 0 {
137 if let Some(block_start) = Self::find_marker_reverse(&buffer[..pos], BLOCK_END_MARKER) {
138 let block = self.parse_memory_block(&buffer[block_start..pos])?;
139 consciousness.recent_memories.push(block);
140 pos = block_start;
141
142 if consciousness.recent_memories.len() >= 10 {
144 break;
145 }
146 } else {
147 break;
148 }
149 }
150
151 Ok(consciousness)
152 }
153
154 pub fn follow_backlinks(
156 &self,
157 consciousness: &BackwardsConsciousness,
158 ) -> Result<Vec<MemoryBlock>> {
159 let mut important_memories = Vec::new();
160 let mut file = File::open(&self.path)?;
161
162 for memory in &consciousness.recent_memories {
163 for backlink in &memory.backlinks {
164 if backlink.importance > 0.7 {
165 file.seek(SeekFrom::Start(backlink.offset))?;
167
168 let mut block_buffer = vec![0u8; 4096];
169 file.read_exact(&mut block_buffer)?;
170
171 if let Ok(block) = self.parse_memory_block(&block_buffer) {
172 important_memories.push(block);
173 }
174 }
175 }
176 }
177
178 Ok(important_memories)
179 }
180
181 pub fn append_memory(&mut self, content: &str, base_importance: f32) -> Result<()> {
183 self.evolve_tokens(content);
185
186 let mut importance = base_importance;
188 for keyword in &self.user_keywords {
189 if content.to_lowercase().contains(&keyword.to_lowercase()) {
190 importance += self.importance_boost.get(keyword).unwrap_or(&0.2);
191 importance = importance.min(1.0); }
193 }
194
195 let compressed = self.compress_with_tokens(content)?;
197
198 let backlinks = self.find_backlinks(content)?;
200
201 let block = MemoryBlock {
202 timestamp: Utc::now(),
203 content: compressed,
204 importance,
205 backlinks,
206 token_discoveries: self.get_new_tokens(),
207 };
208
209 let mut file = OpenOptions::new()
211 .create(true)
212 .append(true)
213 .open(&self.path)?;
214
215 self.write_memory_block(&mut file, &block)?;
217
218 self.write_token_table(&mut file)?;
220
221 Ok(())
222 }
223
224 fn evolve_tokens(&mut self, content: &str) {
226 for word in content.split_whitespace() {
227 *self.token_frequency.entry(word.to_string()).or_default() += 1;
228
229 if self.token_frequency[word] > 5 && !self.is_tokenized(word) {
231 self.current_tokens
232 .insert(self.next_token_id, word.to_string());
233 self.next_token_id += 1;
234 }
235 }
236 }
237
238 fn is_tokenized(&self, word: &str) -> bool {
240 self.current_tokens.values().any(|v| v == word)
241 }
242
243 fn compress_with_tokens(&self, content: &str) -> Result<Vec<u8>> {
245 let mut compressed = Vec::new();
246 let mut remaining = content.to_string();
247
248 for (token, word) in &self.current_tokens {
250 remaining = remaining.replace(word, &format!("\x00{}\x00", token));
251 }
252
253 compressed.extend_from_slice(remaining.as_bytes());
254 Ok(compressed)
255 }
256
257 fn find_backlinks(&self, _content: &str) -> Result<Vec<BackLink>> {
259 Ok(Vec::new())
261 }
262
263 fn get_new_tokens(&self) -> Vec<(String, u8)> {
265 Vec::new() }
268
269 fn parse_token_table(&self, _buffer: &[u8]) -> Result<HashMap<u8, String>> {
271 Ok(self.current_tokens.clone())
273 }
274
275 fn parse_memory_block(&self, _buffer: &[u8]) -> Result<MemoryBlock> {
277 Ok(MemoryBlock {
279 timestamp: Utc::now(),
280 content: Vec::new(),
281 importance: 0.5,
282 backlinks: Vec::new(),
283 token_discoveries: Vec::new(),
284 })
285 }
286
287 fn write_memory_block(&self, file: &mut File, block: &MemoryBlock) -> Result<()> {
289 let data = bincode::serialize(block)?;
291 file.write_all(&data)?;
292 file.write_all(BLOCK_END_MARKER)?;
293 Ok(())
294 }
295
296 fn write_token_table(&self, file: &mut File) -> Result<()> {
298 file.write_all(TOKEN_TABLE_MARKER)?;
299 let data = bincode::serialize(&self.current_tokens)?;
300 file.write_all(&data)?;
301 Ok(())
302 }
303
304 fn find_marker_reverse(buffer: &[u8], marker: &[u8]) -> Option<usize> {
306 (0..buffer.len().saturating_sub(marker.len()))
307 .rev()
308 .find(|&i| &buffer[i..i + marker.len()] == marker)
309 }
310}
311
312use std::path::PathBuf;
313
314pub fn demo_backwards_consciousness() -> Result<()> {
316 println!("🎵 C64 Tape-Style Consciousness Reading Demo\n");
317 println!("{}\n", "=".repeat(60));
318
319 let path = Path::new("/tmp/test_consciousness.m8");
320 let mut reader = M8BackwardsReader::new(path);
321
322 println!("📼 Writing memories (append-only)...");
324 reader.append_memory("Working on Audio processing pipeline", 0.8)?;
325 reader.append_memory("Claude helped with tokenization system", 0.9)?;
326 reader.append_memory("Implemented backwards reading - like C64!", 1.0)?;
327
328 println!("\n⏪ Reading consciousness BACKWARDS...");
330 let consciousness = reader.read_backwards()?;
331
332 println!("\n📍 Most recent memories (read from END):");
333 for (i, memory) in consciousness.recent_memories.iter().enumerate() {
334 println!(
335 " {}. [{:.1}] {:?}",
336 i + 1,
337 memory.importance,
338 memory.timestamp
339 );
340 }
341
342 println!("\n🎯 Current session tokens:");
343 for (token, word) in &consciousness.current_tokens {
344 println!(" 0x{:02X} = \"{}\"", token, word);
345 }
346
347 println!("\n✨ The magic: We never lost context!");
348 println!(" Recent stuff loaded first, important stuff follows backlinks!");
349
350 Ok(())
351}
352
353#[cfg(test)]
354mod tests {
355 use super::*;
356 use tempfile::TempDir;
357
358 #[test]
359 fn test_backwards_reading() {
360 let temp_dir = TempDir::new().unwrap();
361 let path = temp_dir.path().join("test.m8");
362
363 let mut reader = M8BackwardsReader::new(&path);
364
365 reader.append_memory("First memory", 0.5).unwrap();
367 reader.append_memory("Second memory", 0.7).unwrap();
368 reader.append_memory("Most recent memory", 0.9).unwrap();
369
370 let consciousness = reader.read_backwards().unwrap();
372
373 assert!(!consciousness.recent_memories.is_empty());
375 assert!(!consciousness.current_tokens.is_empty());
377 }
378
379 #[test]
380 fn test_token_evolution() {
381 let mut reader = M8BackwardsReader::new("/tmp/test.m8");
382
383 let content = "Audio Audio Audio Audio Audio Audio processing";
385 reader.evolve_tokens(content);
386
387 assert!(reader.is_tokenized("Audio"));
389 }
390}