st/formatters/
quantum_semantic.rs1use super::Formatter;
5use crate::dynamic_tokenizer::DynamicTokenizer;
6use crate::scanner::{FileNode, TreeStats};
7use crate::tree_sitter_quantum::SemanticQuantumCompressor;
8use anyhow::Result;
9use std::io::Write;
10use std::path::Path;
11
12pub struct QuantumSemanticFormatter {
13 #[allow(dead_code)]
14 compressor: SemanticQuantumCompressor,
15 #[allow(dead_code)]
16 max_nodes_per_file: usize,
17}
18
19impl Default for QuantumSemanticFormatter {
20 fn default() -> Self {
21 Self::new()
22 }
23}
24
25impl QuantumSemanticFormatter {
26 pub fn new() -> Self {
27 Self {
28 compressor: SemanticQuantumCompressor::new(),
29 max_nodes_per_file: 10, }
31 }
32}
33
34impl Formatter for QuantumSemanticFormatter {
35 fn format(
36 &self,
37 writer: &mut dyn Write,
38 nodes: &[FileNode],
39 stats: &TreeStats,
40 root_path: &Path,
41 ) -> Result<()> {
42 let mut tokenizer = DynamicTokenizer::new();
44 tokenizer.analyze(nodes);
45
46 writeln!(writer, "QUANTUM_SEMANTIC_V2:")?;
48 write!(writer, "{}", tokenizer.get_token_header())?;
49
50 writeln!(writer, " F0=fn")?;
52 writeln!(writer, " F1=struct")?;
53 writeln!(writer, " F2=trait")?;
54 writeln!(writer, " F3=impl")?;
55 writeln!(writer, " F4=class")?;
56 writeln!(writer, " F5=def")?;
57 writeln!(writer, " F6=main[1.0]")?;
58 writeln!(writer, " F7=pub[0.9]")?;
59 writeln!(writer, " F8=test[0.3]")?;
60 writeln!(writer)?;
61
62 writeln!(writer, "ROOT:{}", root_path.display())?;
64 writeln!(
65 writer,
66 "STATS:F{:x}D{:x}S{:x}",
67 stats.total_files, stats.total_dirs, stats.total_size
68 )?;
69
70 let token_stats = tokenizer.get_stats();
72 writeln!(
73 writer,
74 "TOKENIZER:patterns={},tokens={},saved={}B",
75 token_stats.patterns_found, token_stats.tokens_generated, token_stats.estimated_savings
76 )?;
77 writeln!(writer)?;
78 writeln!(writer, "DATA:")?;
79
80 for node in nodes {
82 if !node.is_dir {
83 let relative = node.path.strip_prefix(root_path).unwrap_or(&node.path);
85 let path_str = relative.to_string_lossy().to_string();
86
87 let compressed_path = tokenizer.compress_path(&path_str);
89
90 if let Some(ext) = node.path.extension().and_then(|e| e.to_str()) {
91 let lang_marker = match ext {
93 "rs" => "@",
94 "py" => "#",
95 "js" => "$",
96 "ts" => "%",
97 _ => "",
98 };
99
100 if !lang_marker.is_empty() {
101 write!(writer, "{}", lang_marker)?;
102 write!(writer, "{}", compressed_path)?;
103
104 writeln!(writer)?;
109 }
110 }
111 }
112 }
113
114 writeln!(writer)?;
115 writeln!(writer, "END_QS")?;
116
117 Ok(())
118 }
119}