1use std::collections::{HashMap, HashSet};
10use std::fs;
11use std::path::{Path, PathBuf};
12use std::process::Command;
13
14use super::types_chunked::*;
15
16#[derive(Debug, Clone, Default)]
18pub struct UpdateOptions {
19 pub full_rebuild: bool,
21 pub files: Option<Vec<String>>,
23 pub target_dir: Option<String>,
25 pub verbose: bool,
27 pub on_progress: Option<fn(&str)>,
29}
30
31#[derive(Debug, Clone)]
33pub struct UpdateResult {
34 pub message: String,
36 pub chunks_updated: usize,
38 pub files: Vec<String>,
40 pub affected_dirs: Vec<String>,
42}
43
44#[derive(Debug, Clone, Default)]
46struct GitDiffResult {
47 modified_files: Vec<String>,
49 added_files: Vec<String>,
51 deleted_files: Vec<String>,
53}
54
55pub struct IncrementalBlueprintUpdater {
57 root_path: PathBuf,
58 map_dir: PathBuf,
59 chunks_dir: PathBuf,
60 index_path: PathBuf,
61 index: Option<ChunkedIndex>,
62}
63
64impl IncrementalBlueprintUpdater {
65 pub fn new(root_path: impl AsRef<Path>) -> Self {
67 let root = root_path.as_ref().to_path_buf();
68 let map_dir = root.join(".claude").join("map");
69 let chunks_dir = map_dir.join("chunks");
70 let index_path = map_dir.join("index.json");
71
72 Self {
73 root_path: root,
74 map_dir,
75 chunks_dir,
76 index_path,
77 index: None,
78 }
79 }
80
81 pub fn update(&mut self, options: &UpdateOptions) -> UpdateResult {
83 self.log(options, "开始增量更新...");
84
85 if !self.index_path.exists() {
87 return UpdateResult {
88 message: "蓝图不存在,请先运行 /map generate".to_string(),
89 chunks_updated: 0,
90 files: vec![],
91 affected_dirs: vec![],
92 };
93 }
94
95 if let Err(e) = self.load_index() {
97 return UpdateResult {
98 message: format!("加载索引失败: {}", e),
99 chunks_updated: 0,
100 files: vec![],
101 affected_dirs: vec![],
102 };
103 }
104
105 let changed_files = self.detect_changed_files(options);
107
108 if changed_files.is_empty() {
109 return UpdateResult {
110 message: "没有检测到变更".to_string(),
111 chunks_updated: 0,
112 files: vec![],
113 affected_dirs: vec![],
114 };
115 }
116
117 self.log(
118 options,
119 &format!("检测到 {} 个变更文件", changed_files.len()),
120 );
121
122 let affected_dirs = self.analyze_impact(&changed_files, options);
124 self.log(
125 options,
126 &format!("影响范围:{} 个目录", affected_dirs.len()),
127 );
128
129 let updated_chunks = self.regenerate_chunks(&affected_dirs, options);
131 self.log(
132 options,
133 &format!("已更新 {} 个 chunk", updated_chunks.len()),
134 );
135
136 self.update_index(&updated_chunks, &changed_files, options);
138
139 UpdateResult {
140 message: format!("✓ 已更新 {} 个 chunk", updated_chunks.len()),
141 chunks_updated: updated_chunks.len(),
142 files: changed_files,
143 affected_dirs: affected_dirs.into_iter().collect(),
144 }
145 }
146
147 fn load_index(&mut self) -> Result<(), String> {
149 let content =
150 fs::read_to_string(&self.index_path).map_err(|e| format!("读取索引失败: {}", e))?;
151 self.index =
152 Some(serde_json::from_str(&content).map_err(|e| format!("解析索引失败: {}", e))?);
153 Ok(())
154 }
155
156 fn detect_changed_files(&self, options: &UpdateOptions) -> Vec<String> {
158 if options.full_rebuild {
160 return self.get_all_source_files();
161 }
162
163 if let Some(ref files) = options.files {
165 return files
166 .iter()
167 .filter(|f| self.is_source_file(f))
168 .cloned()
169 .collect();
170 }
171
172 if let Some(ref target_dir) = options.target_dir {
174 return self.get_files_in_directory(target_dir);
175 }
176
177 match self.get_git_diff() {
179 Ok(git_diff) => {
180 let mut all_changed = Vec::new();
181 all_changed.extend(git_diff.modified_files);
182 all_changed.extend(git_diff.added_files);
183 all_changed.extend(git_diff.deleted_files);
184 all_changed
185 .into_iter()
186 .filter(|f| self.is_source_file(f))
187 .collect()
188 }
189 Err(e) => {
190 self.log(options, &format!("Git diff 失败: {}", e));
191 vec![]
192 }
193 }
194 }
195
196 fn get_git_diff(&self) -> Result<GitDiffResult, String> {
198 let mut result = GitDiffResult::default();
199
200 let unstaged = Command::new("git")
202 .args(["diff", "--name-status"])
203 .current_dir(&self.root_path)
204 .output()
205 .map_err(|e| format!("执行 git diff 失败: {}", e))?;
206
207 let staged = Command::new("git")
209 .args(["diff", "--cached", "--name-status"])
210 .current_dir(&self.root_path)
211 .output()
212 .map_err(|e| format!("执行 git diff --cached 失败: {}", e))?;
213
214 self.parse_git_output(&String::from_utf8_lossy(&unstaged.stdout), &mut result);
216 self.parse_git_output(&String::from_utf8_lossy(&staged.stdout), &mut result);
217
218 Ok(result)
219 }
220
221 fn parse_git_output(&self, output: &str, result: &mut GitDiffResult) {
223 for line in output.lines().filter(|l| !l.is_empty()) {
224 let parts: Vec<&str> = line.split('\t').collect();
225 if parts.len() < 2 {
226 continue;
227 }
228
229 let status = parts[0].chars().next().unwrap_or(' ');
230 let file = parts[1..].join("\t");
231
232 match status {
233 'M' => {
234 if !result.modified_files.contains(&file) {
235 result.modified_files.push(file);
236 }
237 }
238 'A' => {
239 if !result.added_files.contains(&file) {
240 result.added_files.push(file);
241 }
242 }
243 'D' => {
244 if !result.deleted_files.contains(&file) {
245 result.deleted_files.push(file);
246 }
247 }
248 'R' => {
249 if parts.len() >= 3 {
251 result.deleted_files.push(parts[1].to_string());
252 result.added_files.push(parts[2].to_string());
253 }
254 }
255 _ => {}
256 }
257 }
258 }
259
260 fn get_all_source_files(&self) -> Vec<String> {
262 let mut files = Vec::new();
263 let src_dir = self.root_path.join("src");
264
265 if src_dir.exists() {
266 self.collect_source_files(&src_dir, &mut files);
267 }
268
269 files
270 }
271
272 fn collect_source_files(&self, dir: &Path, files: &mut Vec<String>) {
274 if let Ok(entries) = fs::read_dir(dir) {
275 for entry in entries.flatten() {
276 let path = entry.path();
277 if path.is_dir() {
278 let name = path.file_name().unwrap_or_default().to_string_lossy();
280 if name != "node_modules" && name != "dist" && name != "target" {
281 self.collect_source_files(&path, files);
282 }
283 } else if self.is_source_file(&path.to_string_lossy()) {
284 if let Ok(rel_path) = path.strip_prefix(&self.root_path) {
285 files.push(rel_path.to_string_lossy().to_string());
286 }
287 }
288 }
289 }
290 }
291
292 fn get_files_in_directory(&self, dir: &str) -> Vec<String> {
294 let mut files = Vec::new();
295 let target_dir = self.root_path.join(dir);
296
297 if target_dir.exists() {
298 self.collect_source_files(&target_dir, &mut files);
299 }
300
301 files
302 }
303
304 fn is_source_file(&self, file_path: &str) -> bool {
306 let source_exts = [".ts", ".tsx", ".js", ".jsx", ".rs", ".py", ".go"];
307 let path = Path::new(file_path);
308
309 if let Some(ext) = path.extension() {
310 let ext_str = format!(".{}", ext.to_string_lossy());
311 source_exts.contains(&ext_str.as_str())
312 && !file_path.ends_with(".d.ts")
313 && !file_path.contains("node_modules")
314 && !file_path.contains("dist/")
315 && !file_path.contains("target/")
316 } else {
317 false
318 }
319 }
320
321 fn analyze_impact(
323 &self,
324 changed_files: &[String],
325 _options: &UpdateOptions,
326 ) -> HashSet<String> {
327 let mut affected_dirs = HashSet::new();
328
329 if let Some(ref index) = self.index {
330 for file in changed_files {
331 if let Some(parent) = Path::new(file).parent() {
333 let dir_path = parent.to_string_lossy().to_string();
334 affected_dirs.insert(if dir_path == "." {
335 String::new()
336 } else {
337 dir_path
338 });
339 }
340
341 let dependents = self.find_dependents(file, index);
343 for dep in dependents {
344 if let Some(parent) = Path::new(&dep).parent() {
345 let dir_path = parent.to_string_lossy().to_string();
346 affected_dirs.insert(if dir_path == "." {
347 String::new()
348 } else {
349 dir_path
350 });
351 }
352 }
353 }
354 }
355
356 affected_dirs
357 }
358
359 fn find_dependents(&self, module_id: &str, index: &ChunkedIndex) -> Vec<String> {
361 let mut dependents = Vec::new();
362
363 if let Some(ref graph) = index.global_dependency_graph {
364 if let Some(node) = graph.get(module_id) {
365 if node.exports_symbols {
367 dependents.extend(node.imported_by.clone());
368 }
369 }
370 }
371
372 dependents
373 }
374
375 fn regenerate_chunks(
377 &self,
378 affected_dirs: &HashSet<String>,
379 options: &UpdateOptions,
380 ) -> Vec<String> {
381 let mut updated_chunks = Vec::new();
382
383 for dir_path in affected_dirs {
384 self.log(
385 options,
386 &format!(
387 "正在更新 chunk: {}",
388 if dir_path.is_empty() {
389 "root"
390 } else {
391 dir_path
392 }
393 ),
394 );
395
396 let files = if dir_path.is_empty() {
398 self.get_files_in_directory("src")
399 } else {
400 self.get_files_in_directory(dir_path)
401 };
402
403 if files.is_empty() {
404 let chunk_file_name = self.get_chunk_file_name(dir_path);
406 let chunk_path = self.chunks_dir.join(&chunk_file_name);
407 if chunk_path.exists() {
408 if let Err(e) = fs::remove_file(&chunk_path) {
409 self.log(options, &format!("删除空 chunk 失败: {}", e));
410 } else {
411 self.log(options, &format!("已删除空 chunk: {}", chunk_file_name));
412 }
413 }
414 continue;
415 }
416
417 if let Ok(chunk_data) = self.build_chunk_data(dir_path, &files) {
419 let chunk_file_name = self.get_chunk_file_name(dir_path);
421 let chunk_path = self.chunks_dir.join(&chunk_file_name);
422
423 if let Some(parent) = chunk_path.parent() {
425 let _ = fs::create_dir_all(parent);
426 }
427
428 match serde_json::to_string_pretty(&chunk_data) {
429 Ok(json) => {
430 if let Err(e) = fs::write(&chunk_path, json) {
431 self.log(options, &format!("写入 chunk 失败 ({}): {}", dir_path, e));
432 } else {
433 updated_chunks.push(dir_path.clone());
434 }
435 }
436 Err(e) => {
437 self.log(options, &format!("序列化 chunk 失败 ({}): {}", dir_path, e));
438 }
439 }
440 }
441 }
442
443 updated_chunks
444 }
445
446 fn build_chunk_data(&self, dir_path: &str, _files: &[String]) -> Result<ChunkData, String> {
448 let chunk_file_name = self.get_chunk_file_name(dir_path);
450 let existing_chunk_path = self.chunks_dir.join(&chunk_file_name);
451 let existing_chunk: Option<ChunkData> = if existing_chunk_path.exists() {
452 fs::read_to_string(&existing_chunk_path)
453 .ok()
454 .and_then(|s| serde_json::from_str(&s).ok())
455 } else {
456 None
457 };
458
459 let mut chunk_data = ChunkData {
461 path: dir_path.to_string(),
462 modules: HashMap::new(),
463 symbols: HashMap::new(),
464 references: ChunkReferences {
465 module_deps: vec![],
466 symbol_calls: vec![],
467 type_refs: vec![],
468 },
469 metadata: None,
470 planned_modules: None,
471 refactoring_tasks: None,
472 module_design_meta: None,
473 };
474
475 if let Some(existing) = existing_chunk {
477 chunk_data.planned_modules = existing.planned_modules;
478 chunk_data.refactoring_tasks = existing.refactoring_tasks;
479 chunk_data.module_design_meta = existing.module_design_meta;
480 }
481
482 Ok(chunk_data)
483 }
484
485 fn get_chunk_file_name(&self, dir_path: &str) -> String {
487 if dir_path.is_empty() || dir_path == "." {
488 "root.json".to_string()
489 } else {
490 format!("{}.json", dir_path.replace(['/', '\\'], "_"))
491 }
492 }
493
494 fn update_index(
496 &mut self,
497 updated_chunks: &[String],
498 changed_files: &[String],
499 options: &UpdateOptions,
500 ) {
501 let chunk_updates: Vec<_> = updated_chunks
503 .iter()
504 .map(|dir_path| {
505 let chunk_file_name = Self::get_chunk_file_name_static(dir_path);
506 let chunk_path = self.chunks_dir.join(&chunk_file_name);
507 (dir_path.clone(), chunk_file_name, chunk_path.exists())
508 })
509 .collect();
510
511 if let Some(ref mut index) = self.index {
512 index.meta.updated_at = Some(chrono::Utc::now().to_rfc3339());
514
515 Self::recalculate_statistics_static(&self.chunks_dir, index);
517
518 for (dir_path, chunk_file_name, exists) in chunk_updates {
520 if exists {
521 index
522 .chunk_index
523 .insert(dir_path, format!("chunks/{}", chunk_file_name));
524 } else {
525 index.chunk_index.remove(&dir_path);
526 }
527 }
528
529 Self::update_global_dependency_graph_static(&self.chunks_dir, changed_files, index);
531
532 match serde_json::to_string_pretty(&index) {
534 Ok(json) => {
535 if let Err(e) = fs::write(&self.index_path, json) {
536 self.log(options, &format!("写入 index.json 失败: {}", e));
537 } else {
538 self.log(options, "已更新 index.json");
539 }
540 }
541 Err(e) => {
542 self.log(options, &format!("序列化 index.json 失败: {}", e));
543 }
544 }
545 }
546 }
547
548 fn get_chunk_file_name_static(dir_path: &str) -> String {
550 if dir_path.is_empty() || dir_path == "." {
551 "root.json".to_string()
552 } else {
553 format!("{}.json", dir_path.replace(['/', '\\'], "_"))
554 }
555 }
556
557 fn recalculate_statistics_static(chunks_dir: &Path, index: &mut ChunkedIndex) {
559 let mut total_modules = 0;
560 let mut total_symbols = 0;
561 let mut total_lines = 0;
562 let mut total_module_deps = 0;
563 let mut total_symbol_calls = 0;
564 let mut total_type_refs = 0;
565
566 if let Ok(entries) = fs::read_dir(chunks_dir) {
568 for entry in entries.flatten() {
569 let path = entry.path();
570 if path.extension().is_some_and(|e| e == "json") {
571 if let Ok(content) = fs::read_to_string(&path) {
572 if let Ok(chunk) = serde_json::from_str::<ChunkData>(&content) {
573 total_modules += chunk.modules.len();
574 total_symbols += chunk.symbols.len();
575
576 for module in chunk.modules.values() {
577 total_lines += module.lines;
578 }
579
580 total_module_deps += chunk.references.module_deps.len();
581 total_symbol_calls += chunk.references.symbol_calls.len();
582 total_type_refs += chunk.references.type_refs.len();
583 }
584 }
585 }
586 }
587 }
588
589 index.statistics.total_modules = total_modules;
591 index.statistics.total_symbols = total_symbols;
592 index.statistics.total_lines = total_lines;
593 index.statistics.reference_stats = super::types_enhanced::ReferenceStats {
594 total_module_deps,
595 total_symbol_calls,
596 total_type_refs,
597 };
598 }
599
600 #[allow(dead_code)]
602 fn recalculate_statistics(&self, index: &mut ChunkedIndex) {
603 Self::recalculate_statistics_static(&self.chunks_dir, index);
604 }
605
606 fn update_global_dependency_graph_static(
608 chunks_dir: &Path,
609 changed_files: &[String],
610 index: &mut ChunkedIndex,
611 ) {
612 if index.global_dependency_graph.is_none() {
613 return;
614 }
615
616 let graph = index.global_dependency_graph.as_mut().unwrap();
617
618 for file in changed_files {
619 let dir_path = Path::new(file)
620 .parent()
621 .map(|p| p.to_string_lossy().to_string())
622 .unwrap_or_default();
623 let dir_path = if dir_path == "." {
624 String::new()
625 } else {
626 dir_path
627 };
628
629 let chunk_file_name = Self::get_chunk_file_name_static(&dir_path);
630 let chunk_path = chunks_dir.join(&chunk_file_name);
631
632 if !chunk_path.exists() {
633 continue;
634 }
635
636 if let Ok(content) = fs::read_to_string(&chunk_path) {
637 if let Ok(chunk) = serde_json::from_str::<ChunkData>(&content) {
638 if let Some(module_info) = chunk.modules.get(file) {
639 let import_sources: Vec<String> = module_info
641 .imports
642 .iter()
643 .map(|imp| imp.source.clone())
644 .collect();
645
646 let existing_imported_by = graph
647 .get(file)
648 .map(|n| n.imported_by.clone())
649 .unwrap_or_default();
650
651 graph.insert(
652 file.clone(),
653 GlobalDependencyNode {
654 imports: import_sources,
655 imported_by: existing_imported_by,
656 exports_symbols: !module_info.exports.is_empty(),
657 },
658 );
659
660 for dep in &chunk.references.module_deps {
662 if dep.source == *file {
663 if let Some(target_node) = graph.get_mut(&dep.target) {
664 if !target_node.imported_by.contains(file) {
665 target_node.imported_by.push(file.clone());
666 }
667 }
668 }
669 }
670 }
671 }
672 }
673 }
674 }
675
676 #[allow(dead_code)]
678 fn update_global_dependency_graph(&self, changed_files: &[String], index: &mut ChunkedIndex) {
679 Self::update_global_dependency_graph_static(&self.chunks_dir, changed_files, index);
680 }
681
682 fn log(&self, options: &UpdateOptions, message: &str) {
684 if options.verbose {
685 if let Some(callback) = options.on_progress {
686 callback(message);
687 } else {
688 println!("{}", message);
689 }
690 }
691 }
692}
693
694pub fn update_blueprint(root_path: impl AsRef<Path>, options: &UpdateOptions) -> UpdateResult {
700 let mut updater = IncrementalBlueprintUpdater::new(root_path);
701 updater.update(options)
702}