1use crate::config::FoundryConfig;
2use crate::config::ProjectIndexCacheMode;
3use crate::goto::{CachedBuild, NodeInfo};
4use crate::types::{AbsPath, NodeId, RelPath};
5use serde::{Deserialize, Serialize};
6use serde_json::Value;
7use std::collections::{BTreeMap, HashMap};
8use std::fs;
9use std::io::Write;
10use std::path::{Path, PathBuf};
11use std::time::Instant;
12use tiny_keccak::{Hasher, Keccak};
13
14const CACHE_SCHEMA_VERSION_V2: u32 = 3;
15const CACHE_DIR: &str = ".solidity-language-server";
16const CACHE_FILE_V2: &str = "solidity-lsp-schema-v2.json";
17const CACHE_SHARDS_DIR_V2: &str = "reference-index-v2";
18const CACHE_SOLC_INPUT_FILE: &str = "last-solc-input.json";
19const CACHE_GITIGNORE_FILE: &str = ".gitignore";
20const CACHE_GITIGNORE_CONTENTS: &str = "*\n";
21
22#[derive(Debug, Clone, Serialize, Deserialize)]
23struct PersistedNodeEntry {
24 id: i64,
25 info: NodeInfo,
26}
27
28#[derive(Debug, Clone, Serialize, Deserialize)]
29struct PersistedExternalRef {
30 src: crate::types::SrcLocation,
31 decl_id: i64,
32}
33
34#[derive(Debug, Clone, Serialize, Deserialize)]
35struct PersistedFileShardV2 {
36 abs_path: String,
37 entries: Vec<PersistedNodeEntry>,
38}
39
40#[derive(Debug, Clone, Serialize, Deserialize)]
41struct PersistedReferenceCacheV2 {
42 schema_version: u32,
43 project_root: String,
44 config_fingerprint: String,
45 file_hashes: BTreeMap<String, String>,
46 #[serde(default)]
47 file_hash_history: BTreeMap<String, Vec<String>>,
48 path_to_abs: HashMap<String, String>,
49 id_to_path_map: HashMap<crate::types::SolcFileId, String>,
50 external_refs: Vec<PersistedExternalRef>,
51 node_shards: BTreeMap<String, String>,
53}
54
55#[derive(Debug, Clone)]
56pub struct CacheLoadReport {
57 pub build: Option<CachedBuild>,
58 pub hit: bool,
59 pub miss_reason: Option<String>,
60 pub file_count_hashed: usize,
61 pub file_count_reused: usize,
62 pub complete: bool,
63 pub duration_ms: u128,
64}
65
66#[derive(Debug, Clone)]
67pub struct CacheSaveReport {
68 pub file_count_hashed: usize,
69 pub duration_ms: u128,
70}
71
72pub fn cache_dir(root: &Path) -> PathBuf {
75 root.join(CACHE_DIR)
76}
77
78fn cache_file_path_v2(root: &Path) -> PathBuf {
79 root.join(CACHE_DIR).join(CACHE_FILE_V2)
80}
81
82fn cache_shards_dir_v2(root: &Path) -> PathBuf {
83 root.join(CACHE_DIR).join(CACHE_SHARDS_DIR_V2)
84}
85
86fn cache_solc_input_path(root: &Path) -> PathBuf {
87 root.join(CACHE_DIR).join(CACHE_SOLC_INPUT_FILE)
88}
89
90pub fn save_last_solc_input(root: &Path, input: &Value) -> Result<(), String> {
97 let cache_root = root.join(CACHE_DIR);
98 fs::create_dir_all(&cache_root)
99 .map_err(|e| format!("failed to create cache dir {}: {e}", cache_root.display()))?;
100 let path = cache_solc_input_path(root);
101
102 let mut sanitised = input.clone();
104 if let Some(sources) = sanitised.get_mut("sources").and_then(|s| s.as_object_mut()) {
105 for (rel_path, entry) in sources.iter_mut() {
106 if entry.get("content").is_some() {
107 *entry = serde_json::json!({ "urls": [rel_path.clone()] });
108 }
109 }
110 }
111
112 let bytes = serde_json::to_vec_pretty(&sanitised)
113 .map_err(|e| format!("failed to serialize solc input: {e}"))?;
114 let mut file =
115 fs::File::create(&path).map_err(|e| format!("failed to create {}: {e}", path.display()))?;
116 file.write_all(&bytes)
117 .map_err(|e| format!("failed to write {}: {e}", path.display()))?;
118 Ok(())
119}
120
121fn ensure_cache_dir_layout(root: &Path) -> Result<(PathBuf, PathBuf), String> {
122 let cache_root = root.join(CACHE_DIR);
123 fs::create_dir_all(&cache_root)
124 .map_err(|e| format!("failed to create cache dir {}: {e}", cache_root.display()))?;
125
126 let gitignore_path = cache_root.join(CACHE_GITIGNORE_FILE);
128 if !gitignore_path.exists() {
129 fs::write(&gitignore_path, CACHE_GITIGNORE_CONTENTS).map_err(|e| {
130 format!(
131 "failed to write cache gitignore {}: {e}",
132 gitignore_path.display()
133 )
134 })?;
135 }
136
137 let shards_dir = cache_shards_dir_v2(root);
138 fs::create_dir_all(&shards_dir)
139 .map_err(|e| format!("failed to create shards dir {}: {e}", shards_dir.display()))?;
140
141 Ok((cache_root, shards_dir))
142}
143
144fn shard_file_name_for_rel_path(rel_path: &str) -> String {
145 format!("{}.json", keccak_hex(rel_path.as_bytes()))
146}
147
148fn write_atomic_json(path: &Path, payload: &[u8]) -> Result<(), String> {
149 let tmp_path = path.with_extension(format!(
150 "{}.tmp",
151 path.extension()
152 .and_then(|s| s.to_str())
153 .unwrap_or_default()
154 ));
155 {
156 let mut file = fs::File::create(&tmp_path)
157 .map_err(|e| format!("create tmp {}: {e}", tmp_path.display()))?;
158 file.write_all(payload)
159 .map_err(|e| format!("write tmp {}: {e}", tmp_path.display()))?;
160 file.flush()
161 .map_err(|e| format!("flush tmp {}: {e}", tmp_path.display()))?;
162 file.sync_all()
163 .map_err(|e| format!("sync tmp {}: {e}", tmp_path.display()))?;
164 }
165 fs::rename(&tmp_path, path).map_err(|e| {
166 format!(
167 "rename tmp {} -> {}: {e}",
168 tmp_path.display(),
169 path.display()
170 )
171 })
172}
173
174fn keccak_hex(bytes: &[u8]) -> String {
175 let mut out = [0u8; 32];
176 let mut hasher = Keccak::v256();
177 hasher.update(bytes);
178 hasher.finalize(&mut out);
179 hex::encode(out)
180}
181
182fn file_hash(path: &Path) -> Option<String> {
183 let bytes = fs::read(path).ok()?;
184 Some(keccak_hex(&bytes))
185}
186
187fn relative_to_root(root: &Path, file: &Path) -> String {
188 file.strip_prefix(root)
189 .unwrap_or(file)
190 .to_string_lossy()
191 .replace('\\', "/")
192}
193
194fn current_file_hashes(
195 config: &FoundryConfig,
196 include_libs: bool,
197) -> Result<BTreeMap<String, String>, String> {
198 let source_files = if include_libs {
199 crate::solc::discover_source_files_with_libs(config)
200 } else {
201 crate::solc::discover_source_files(config)
202 };
203 hash_file_list(config, &source_files)
204}
205
206fn hash_file_list(
208 config: &FoundryConfig,
209 source_files: &[PathBuf],
210) -> Result<BTreeMap<String, String>, String> {
211 if source_files.is_empty() {
212 return Ok(BTreeMap::new());
213 }
214 let mut hashes = BTreeMap::new();
215 for path in source_files {
216 let rel = relative_to_root(&config.root, path);
217 let hash = file_hash(path)
218 .ok_or_else(|| format!("failed to hash source file {}", path.display()))?;
219 hashes.insert(rel, hash);
220 }
221 Ok(hashes)
222}
223
224fn config_fingerprint(config: &FoundryConfig) -> String {
225 let payload = serde_json::json!({
226 "lsp_version": env!("CARGO_PKG_VERSION"),
229 "solc_version": config.solc_version,
230 "remappings": config.remappings,
231 "evm_version": config.evm_version,
232 "sources_dir": config.sources_dir,
233 "libs": config.libs,
234 "via_ir": config.via_ir,
237 });
238 keccak_hex(payload.to_string().as_bytes())
239}
240
241fn push_hash_history(meta: &mut PersistedReferenceCacheV2, rel: &str, hash: &str) {
242 const MAX_HISTORY: usize = 8;
243 let history = meta.file_hash_history.entry(rel.to_string()).or_default();
244 if history.last().is_some_and(|h| h == hash) {
245 return;
246 }
247 history.push(hash.to_string());
248 if history.len() > MAX_HISTORY {
249 let drop_count = history.len() - MAX_HISTORY;
250 history.drain(0..drop_count);
251 }
252}
253
254pub fn save_reference_cache(config: &FoundryConfig, build: &CachedBuild) -> Result<(), String> {
255 save_reference_cache_with_report(config, build, None).map(|_| ())
256}
257
258pub fn upsert_reference_cache_v2_with_report(
271 config: &FoundryConfig,
272 build: &CachedBuild,
273 changed_abs_paths: &[String],
274) -> Result<CacheSaveReport, String> {
275 let started = Instant::now();
276 if !config.root.is_dir() {
277 return Err(format!("invalid project root: {}", config.root.display()));
278 }
279
280 let (_cache_root, shards_dir) = ensure_cache_dir_layout(&config.root)?;
281
282 let meta_path = cache_file_path_v2(&config.root);
285 let mut meta = if let Ok(bytes) = fs::read(&meta_path) {
286 serde_json::from_slice::<PersistedReferenceCacheV2>(&bytes).unwrap_or(
287 PersistedReferenceCacheV2 {
288 schema_version: CACHE_SCHEMA_VERSION_V2,
289 project_root: config.root.to_string_lossy().to_string(),
290 config_fingerprint: config_fingerprint(config),
291 file_hashes: BTreeMap::new(),
292 file_hash_history: BTreeMap::new(),
293 path_to_abs: HashMap::new(),
294 id_to_path_map: HashMap::new(),
295 external_refs: Vec::new(),
296 node_shards: BTreeMap::new(),
297 },
298 )
299 } else {
300 PersistedReferenceCacheV2 {
301 schema_version: CACHE_SCHEMA_VERSION_V2,
302 project_root: config.root.to_string_lossy().to_string(),
303 config_fingerprint: config_fingerprint(config),
304 file_hashes: BTreeMap::new(),
305 file_hash_history: BTreeMap::new(),
306 path_to_abs: HashMap::new(),
307 id_to_path_map: HashMap::new(),
308 external_refs: Vec::new(),
309 node_shards: BTreeMap::new(),
310 }
311 };
312
313 if meta.project_root != config.root.to_string_lossy()
315 || meta.config_fingerprint != config_fingerprint(config)
316 {
317 meta = PersistedReferenceCacheV2 {
318 schema_version: CACHE_SCHEMA_VERSION_V2,
319 project_root: config.root.to_string_lossy().to_string(),
320 config_fingerprint: config_fingerprint(config),
321 file_hashes: BTreeMap::new(),
322 file_hash_history: BTreeMap::new(),
323 path_to_abs: HashMap::new(),
324 id_to_path_map: HashMap::new(),
325 external_refs: Vec::new(),
326 node_shards: BTreeMap::new(),
327 };
328 }
329
330 let changed_set: std::collections::HashSet<&str> =
332 changed_abs_paths.iter().map(|s| s.as_str()).collect();
333 let mut touched = 0usize;
334 for (abs_path, file_nodes) in &build.nodes {
335 if !changed_set.contains(abs_path.as_str()) {
336 continue;
337 }
338 let abs = Path::new(abs_path.as_str());
339 let rel = relative_to_root(&config.root, abs);
340 let shard_name = shard_file_name_for_rel_path(&rel);
341 let shard_path = shards_dir.join(&shard_name);
342
343 let mut entries = Vec::with_capacity(file_nodes.len());
344 for (id, info) in file_nodes {
345 entries.push(PersistedNodeEntry {
346 id: id.0,
347 info: info.clone(),
348 });
349 }
350 let shard = PersistedFileShardV2 {
351 abs_path: abs_path.to_string(),
352 entries,
353 };
354 let shard_payload =
355 serde_json::to_vec(&shard).map_err(|e| format!("serialize shard {}: {e}", rel))?;
356 write_atomic_json(&shard_path, &shard_payload)?;
357
358 if let Some(hash) = file_hash(abs) {
359 push_hash_history(&mut meta, &rel, &hash);
360 meta.file_hashes.insert(rel.clone(), hash);
361 meta.node_shards.insert(rel, shard_name);
362 touched += 1;
363 }
364 }
365
366 meta.path_to_abs = build
370 .path_to_abs
371 .iter()
372 .map(|(k, v)| (k.to_string(), v.to_string()))
373 .collect();
374 meta.id_to_path_map = build.id_to_path_map.clone();
375 meta.external_refs = build
376 .external_refs
377 .iter()
378 .map(|(src, id)| PersistedExternalRef {
379 src: src.clone(),
380 decl_id: id.0,
381 })
382 .collect();
383
384 let payload_v2 = serde_json::to_vec(&meta).map_err(|e| format!("serialize v2 cache: {e}"))?;
385 write_atomic_json(&meta_path, &payload_v2)?;
386
387 Ok(CacheSaveReport {
388 file_count_hashed: touched,
389 duration_ms: started.elapsed().as_millis(),
390 })
391}
392
393pub fn save_reference_cache_with_report(
394 config: &FoundryConfig,
395 build: &CachedBuild,
396 source_files: Option<&[PathBuf]>,
397) -> Result<CacheSaveReport, String> {
398 let started = Instant::now();
399 if !config.root.is_dir() {
400 return Err(format!("invalid project root: {}", config.root.display()));
401 }
402
403 let file_hashes = if let Some(files) = source_files {
407 hash_file_list(config, files)?
408 } else {
409 let build_paths: Vec<PathBuf> = build
410 .nodes
411 .keys()
412 .map(|p| PathBuf::from(p.as_str()))
413 .collect();
414 if build_paths.is_empty() {
415 current_file_hashes(config, true)?
416 } else {
417 hash_file_list(config, &build_paths)?
418 }
419 };
420 let file_count_hashed = file_hashes.len();
421 let external_refs = build
422 .external_refs
423 .iter()
424 .map(|(src, id)| PersistedExternalRef {
425 src: src.clone(),
426 decl_id: id.0,
427 })
428 .collect::<Vec<_>>();
429
430 let (_cache_root, shards_dir) = ensure_cache_dir_layout(&config.root)?;
431
432 let mut node_shards: BTreeMap<String, String> = BTreeMap::new();
433 let mut live_shards = std::collections::HashSet::new();
434 for (abs_path, file_nodes) in &build.nodes {
435 let abs = Path::new(abs_path.as_str());
436 let rel = relative_to_root(&config.root, abs);
437 let shard_name = shard_file_name_for_rel_path(&rel);
438 let shard_path = shards_dir.join(&shard_name);
439
440 let mut entries = Vec::with_capacity(file_nodes.len());
441 for (id, info) in file_nodes {
442 entries.push(PersistedNodeEntry {
443 id: id.0,
444 info: info.clone(),
445 });
446 }
447 let shard = PersistedFileShardV2 {
448 abs_path: abs_path.to_string(),
449 entries,
450 };
451 let shard_payload =
452 serde_json::to_vec(&shard).map_err(|e| format!("serialize shard {}: {e}", rel))?;
453 write_atomic_json(&shard_path, &shard_payload)?;
454 node_shards.insert(rel, shard_name.clone());
455 live_shards.insert(shard_name);
456 }
457
458 if let Ok(dir) = fs::read_dir(&shards_dir) {
460 for entry in dir.flatten() {
461 let file_name = entry.file_name().to_string_lossy().to_string();
462 if !live_shards.contains(&file_name) {
463 let _ = fs::remove_file(entry.path());
464 }
465 }
466 }
467
468 let persisted_v2 = PersistedReferenceCacheV2 {
469 schema_version: CACHE_SCHEMA_VERSION_V2,
470 project_root: config.root.to_string_lossy().to_string(),
471 config_fingerprint: config_fingerprint(config),
472 file_hashes: file_hashes.clone(),
473 file_hash_history: {
474 let mut h = BTreeMap::new();
475 for (rel, hash) in &file_hashes {
476 h.insert(rel.clone(), vec![hash.clone()]);
477 }
478 h
479 },
480 path_to_abs: build
481 .path_to_abs
482 .iter()
483 .map(|(k, v)| (k.to_string(), v.to_string()))
484 .collect(),
485 external_refs: external_refs.clone(),
486 id_to_path_map: build.id_to_path_map.clone(),
487 node_shards,
488 };
489 let payload_v2 =
490 serde_json::to_vec(&persisted_v2).map_err(|e| format!("serialize v2 cache: {e}"))?;
491 write_atomic_json(&cache_file_path_v2(&config.root), &payload_v2)?;
492
493 Ok(CacheSaveReport {
494 file_count_hashed,
495 duration_ms: started.elapsed().as_millis(),
496 })
497}
498
499pub fn load_reference_cache(config: &FoundryConfig) -> Option<CachedBuild> {
500 load_reference_cache_with_report(config, ProjectIndexCacheMode::Auto, false).build
501}
502
503pub struct DiscoveredLibs {
507 pub cached: Vec<PathBuf>,
509 pub uncached: Vec<PathBuf>,
511}
512
513pub fn discover_lib_sub_projects(config: &FoundryConfig) -> DiscoveredLibs {
517 let mut cached = Vec::new();
518 let mut uncached = Vec::new();
519 for lib_dir_name in &config.libs {
520 let lib_dir = config.root.join(lib_dir_name);
521 if !lib_dir.is_dir() {
522 continue;
523 }
524 discover_lib_sub_projects_recursive(&lib_dir, &mut cached, &mut uncached);
525 }
526 DiscoveredLibs { cached, uncached }
527}
528
529pub fn discover_lib_caches(config: &FoundryConfig) -> Vec<PathBuf> {
532 discover_lib_sub_projects(config).cached
533}
534
535fn discover_lib_sub_projects_recursive(
536 dir: &Path,
537 cached: &mut Vec<PathBuf>,
538 uncached: &mut Vec<PathBuf>,
539) {
540 let entries = match fs::read_dir(dir) {
541 Ok(e) => e,
542 Err(_) => return,
543 };
544 for entry in entries.flatten() {
545 let path = entry.path();
546 if !path.is_dir() {
547 continue;
548 }
549 let Some(name) = path.file_name().and_then(|n| n.to_str()) else {
550 continue;
551 };
552 if name.starts_with('.')
554 || matches!(name, "out" | "cache" | "artifacts" | "target" | "broadcast")
555 {
556 continue;
557 }
558 let has_config = path.join("foundry.toml").is_file();
559 if has_config {
560 let has_cache = path.join(CACHE_DIR).join(CACHE_FILE_V2).is_file();
561 if has_cache {
562 cached.push(path.clone());
563 } else {
564 uncached.push(path.clone());
565 }
566 }
567 discover_lib_sub_projects_recursive(&path, cached, uncached);
570 }
571}
572
573pub fn load_lib_cache(sub_root: &Path) -> Option<CachedBuild> {
580 let cache_path = sub_root.join(CACHE_DIR).join(CACHE_FILE_V2);
581 let bytes = fs::read(&cache_path).ok()?;
582 let persisted: PersistedReferenceCacheV2 = serde_json::from_slice(&bytes).ok()?;
583
584 if persisted.schema_version != CACHE_SCHEMA_VERSION_V2 {
585 return None;
586 }
587
588 let shards_dir = sub_root.join(CACHE_DIR).join(CACHE_SHARDS_DIR_V2);
589 let mut nodes: HashMap<AbsPath, HashMap<NodeId, NodeInfo>> = HashMap::new();
590 let mut reused_decl_ids = std::collections::HashSet::new();
591
592 for (_rel_path, shard_name) in &persisted.node_shards {
593 let shard_path = shards_dir.join(shard_name);
594 let shard_bytes = match fs::read(&shard_path) {
595 Ok(v) => v,
596 Err(_) => continue,
597 };
598 let shard: PersistedFileShardV2 = match serde_json::from_slice(&shard_bytes) {
599 Ok(v) => v,
600 Err(_) => continue,
601 };
602 let mut file_nodes = HashMap::with_capacity(shard.entries.len());
603 for entry in shard.entries {
604 reused_decl_ids.insert(entry.id);
605 file_nodes.insert(NodeId(entry.id), entry.info);
606 }
607 nodes.insert(AbsPath::new(shard.abs_path), file_nodes);
608 }
609
610 if nodes.is_empty() {
611 return None;
612 }
613
614 let mut external_refs = HashMap::new();
615 for item in persisted.external_refs {
616 if reused_decl_ids.contains(&item.decl_id) {
617 external_refs.insert(item.src, NodeId(item.decl_id));
618 }
619 }
620
621 Some(CachedBuild::from_reference_index(
622 nodes,
623 persisted
624 .path_to_abs
625 .into_iter()
626 .map(|(k, v)| (RelPath::new(k), AbsPath::new(v)))
627 .collect(),
628 external_refs,
629 persisted.id_to_path_map,
630 0,
631 None,
632 ))
633}
634
635pub fn changed_files_since_v2_cache(
638 config: &FoundryConfig,
639 include_libs: bool,
640) -> Result<Vec<PathBuf>, String> {
641 if !config.root.is_dir() {
642 return Err(format!("invalid project root: {}", config.root.display()));
643 }
644
645 let cache_path_v2 = cache_file_path_v2(&config.root);
646 let bytes = fs::read(&cache_path_v2).map_err(|e| format!("cache file read failed: {e}"))?;
647 let persisted: PersistedReferenceCacheV2 =
648 serde_json::from_slice(&bytes).map_err(|e| format!("cache decode failed: {e}"))?;
649
650 if persisted.schema_version != CACHE_SCHEMA_VERSION_V2 {
651 return Err(format!(
652 "schema mismatch: cache={}, expected={}",
653 persisted.schema_version, CACHE_SCHEMA_VERSION_V2
654 ));
655 }
656 if persisted.project_root != config.root.to_string_lossy() {
657 return Err("project root mismatch".to_string());
658 }
659 if persisted.config_fingerprint != config_fingerprint(config) {
660 return Err("config fingerprint mismatch".to_string());
661 }
662
663 let saved_paths: Vec<PathBuf> = persisted
665 .file_hashes
666 .keys()
667 .map(|rel| config.root.join(rel))
668 .collect();
669 let current_hashes = hash_file_list(config, &saved_paths)?;
670 let mut changed = Vec::new();
671 for (rel, current_hash) in ¤t_hashes {
672 match persisted.file_hashes.get(rel) {
673 Some(prev) if prev == current_hash => {}
674 _ => changed.push(config.root.join(rel)),
675 }
676 }
677
678 let saved_rels: std::collections::HashSet<&String> = persisted.file_hashes.keys().collect();
684 let discovered = if include_libs {
685 crate::solc::discover_source_files_with_libs(config)
686 } else {
687 crate::solc::discover_source_files(config)
688 };
689 for path in &discovered {
690 let rel = relative_to_root(&config.root, path);
691 if !saved_rels.contains(&rel) {
692 changed.push(path.clone());
693 }
694 }
695
696 Ok(changed)
697}
698
699pub fn load_reference_cache_with_report(
700 config: &FoundryConfig,
701 cache_mode: ProjectIndexCacheMode,
702 _include_libs: bool,
703) -> CacheLoadReport {
704 let started = Instant::now();
705 let miss = |reason: String, file_count_hashed: usize, duration_ms: u128| CacheLoadReport {
706 build: None,
707 hit: false,
708 miss_reason: Some(reason),
709 file_count_hashed,
710 file_count_reused: 0,
711 complete: false,
712 duration_ms,
713 };
714
715 if !config.root.is_dir() {
716 return miss(
717 format!("invalid project root: {}", config.root.display()),
718 0,
719 started.elapsed().as_millis(),
720 );
721 }
722
723 let should_try_v2 = matches!(
724 cache_mode,
725 ProjectIndexCacheMode::Auto | ProjectIndexCacheMode::V2
726 );
727
728 let cache_path_v2 = cache_file_path_v2(&config.root);
730 if should_try_v2
731 && let Ok(bytes) = fs::read(&cache_path_v2)
732 && let Ok(persisted) = serde_json::from_slice::<PersistedReferenceCacheV2>(&bytes)
733 {
734 if persisted.schema_version != CACHE_SCHEMA_VERSION_V2 {
735 return miss(
736 format!(
737 "schema mismatch: cache={}, expected={}",
738 persisted.schema_version, CACHE_SCHEMA_VERSION_V2
739 ),
740 0,
741 started.elapsed().as_millis(),
742 );
743 }
744 if persisted.project_root != config.root.to_string_lossy() {
745 return miss(
746 "project root mismatch".to_string(),
747 0,
748 started.elapsed().as_millis(),
749 );
750 }
751 if persisted.config_fingerprint != config_fingerprint(config) {
752 return miss(
753 "config fingerprint mismatch".to_string(),
754 0,
755 started.elapsed().as_millis(),
756 );
757 }
758
759 let saved_paths: Vec<PathBuf> = persisted
762 .file_hashes
763 .keys()
764 .map(|rel| config.root.join(rel))
765 .collect();
766 let current_hashes = match hash_file_list(config, &saved_paths) {
767 Ok(h) => h,
768 Err(e) => return miss(e, 0, started.elapsed().as_millis()),
769 };
770 let file_count_hashed = current_hashes.len();
771
772 let shards_dir = cache_shards_dir_v2(&config.root);
773 let mut nodes: HashMap<AbsPath, HashMap<NodeId, NodeInfo>> = HashMap::new();
774 let mut file_count_reused = 0usize;
775 let mut reused_decl_ids = std::collections::HashSet::new();
776
777 for (rel_path, current_hash) in ¤t_hashes {
778 let Some(cached_hash) = persisted.file_hashes.get(rel_path) else {
779 continue;
780 };
781 if cached_hash != current_hash {
782 continue;
783 }
784 let Some(shard_name) = persisted.node_shards.get(rel_path) else {
785 continue;
786 };
787 let shard_path = shards_dir.join(shard_name);
788 let shard_bytes = match fs::read(&shard_path) {
789 Ok(v) => v,
790 Err(_) => continue,
791 };
792 let shard: PersistedFileShardV2 = match serde_json::from_slice(&shard_bytes) {
793 Ok(v) => v,
794 Err(_) => continue,
795 };
796 let mut file_nodes = HashMap::with_capacity(shard.entries.len());
797 for entry in shard.entries {
798 reused_decl_ids.insert(entry.id);
799 file_nodes.insert(NodeId(entry.id), entry.info);
800 }
801 nodes.insert(AbsPath::new(shard.abs_path), file_nodes);
802 file_count_reused += 1;
803 }
804
805 if file_count_reused == 0 {
806 return miss(
807 "v2 cache: no reusable files".to_string(),
808 file_count_hashed,
809 started.elapsed().as_millis(),
810 );
811 }
812
813 let mut external_refs = HashMap::new();
814 for item in persisted.external_refs {
815 if reused_decl_ids.contains(&item.decl_id) {
816 external_refs.insert(item.src, NodeId(item.decl_id));
817 }
818 }
819
820 let complete =
822 file_count_reused == file_count_hashed && current_hashes == persisted.file_hashes;
823
824 return CacheLoadReport {
825 build: Some(CachedBuild::from_reference_index(
826 nodes,
827 persisted
828 .path_to_abs
829 .into_iter()
830 .map(|(k, v)| (RelPath::new(k), AbsPath::new(v)))
831 .collect(),
832 external_refs,
833 persisted.id_to_path_map,
834 0,
835 None,
836 )),
837 hit: true,
838 miss_reason: if complete {
839 None
840 } else {
841 Some("v2 cache partial reuse".to_string())
842 },
843 file_count_hashed,
844 file_count_reused,
845 complete,
846 duration_ms: started.elapsed().as_millis(),
847 };
848 }
849
850 miss(
851 "cache mode v2: no usable v2 cache".to_string(),
852 0,
853 started.elapsed().as_millis(),
854 )
855}
856
857#[cfg(test)]
858mod tests {
859 use super::*;
860
861 #[test]
862 fn test_save_last_solc_input_writes_cache_file() {
863 let dir = tempfile::tempdir().unwrap();
864 let input = serde_json::json!({
865 "language": "Solidity",
866 "sources": {
867 "src/Foo.sol": { "urls": ["src/Foo.sol"] }
868 },
869 "settings": { "outputSelection": { "*": { "": ["ast"] } } }
870 });
871
872 save_last_solc_input(dir.path(), &input).unwrap();
873
874 let saved_path = cache_solc_input_path(dir.path());
875 assert!(saved_path.is_file());
876
877 let bytes = std::fs::read(saved_path).unwrap();
878 let parsed: Value = serde_json::from_slice(&bytes).unwrap();
879 assert_eq!(parsed, input);
880 }
881}