Skip to main content

solidity_language_server/
project_cache.rs

1use crate::config::FoundryConfig;
2use crate::config::ProjectIndexCacheMode;
3use crate::goto::{CachedBuild, NodeInfo};
4use crate::types::{AbsPath, NodeId, RelPath};
5use serde::{Deserialize, Serialize};
6use serde_json::Value;
7use std::collections::{BTreeMap, HashMap};
8use std::fs;
9use std::io::Write;
10use std::path::{Path, PathBuf};
11use std::time::Instant;
12use tiny_keccak::{Hasher, Keccak};
13
14const CACHE_SCHEMA_VERSION_V2: u32 = 3;
15const CACHE_DIR: &str = ".solidity-language-server";
16const CACHE_FILE_V2: &str = "solidity-lsp-schema-v2.json";
17const CACHE_SHARDS_DIR_V2: &str = "reference-index-v2";
18const CACHE_SOLC_INPUT_FILE: &str = "last-solc-input.json";
19const CACHE_GITIGNORE_FILE: &str = ".gitignore";
20const CACHE_GITIGNORE_CONTENTS: &str = "*\n";
21
22#[derive(Debug, Clone, Serialize, Deserialize)]
23struct PersistedNodeEntry {
24    id: i64,
25    info: NodeInfo,
26}
27
28#[derive(Debug, Clone, Serialize, Deserialize)]
29struct PersistedExternalRef {
30    src: crate::types::SrcLocation,
31    decl_id: i64,
32}
33
34#[derive(Debug, Clone, Serialize, Deserialize)]
35struct PersistedFileShardV2 {
36    abs_path: String,
37    entries: Vec<PersistedNodeEntry>,
38}
39
40#[derive(Debug, Clone, Serialize, Deserialize)]
41struct PersistedReferenceCacheV2 {
42    schema_version: u32,
43    project_root: String,
44    config_fingerprint: String,
45    file_hashes: BTreeMap<String, String>,
46    #[serde(default)]
47    file_hash_history: BTreeMap<String, Vec<String>>,
48    path_to_abs: HashMap<String, String>,
49    id_to_path_map: HashMap<crate::types::SolcFileId, String>,
50    external_refs: Vec<PersistedExternalRef>,
51    // relative-path -> shard file name
52    node_shards: BTreeMap<String, String>,
53}
54
55#[derive(Debug, Clone)]
56pub struct CacheLoadReport {
57    pub build: Option<CachedBuild>,
58    pub hit: bool,
59    pub miss_reason: Option<String>,
60    pub file_count_hashed: usize,
61    pub file_count_reused: usize,
62    pub complete: bool,
63    pub duration_ms: u128,
64}
65
66#[derive(Debug, Clone)]
67pub struct CacheSaveReport {
68    pub file_count_hashed: usize,
69    pub duration_ms: u128,
70}
71
72/// Public helper — returns the root of the on-disk cache directory for
73/// `project_root`. Used by the `solidity.clearCache` command handler.
74pub fn cache_dir(root: &Path) -> PathBuf {
75    root.join(CACHE_DIR)
76}
77
78fn cache_file_path_v2(root: &Path) -> PathBuf {
79    root.join(CACHE_DIR).join(CACHE_FILE_V2)
80}
81
82fn cache_shards_dir_v2(root: &Path) -> PathBuf {
83    root.join(CACHE_DIR).join(CACHE_SHARDS_DIR_V2)
84}
85
86fn cache_solc_input_path(root: &Path) -> PathBuf {
87    root.join(CACHE_DIR).join(CACHE_SOLC_INPUT_FILE)
88}
89
90/// Persist the last solc standard-JSON input to the cache directory so it can
91/// be inspected for debugging.
92///
93/// Source entries that use `"content"` (in-memory editor buffers with unsaved
94/// changes) are rewritten to `"urls"` in the debug file to keep it small.
95/// The actual solc invocation still receives the real content.
96pub fn save_last_solc_input(root: &Path, input: &Value) -> Result<(), String> {
97    let cache_root = root.join(CACHE_DIR);
98    fs::create_dir_all(&cache_root)
99        .map_err(|e| format!("failed to create cache dir {}: {e}", cache_root.display()))?;
100    let path = cache_solc_input_path(root);
101
102    // Replace "content" entries with "urls" so the debug file stays small.
103    let mut sanitised = input.clone();
104    if let Some(sources) = sanitised.get_mut("sources").and_then(|s| s.as_object_mut()) {
105        for (rel_path, entry) in sources.iter_mut() {
106            if entry.get("content").is_some() {
107                *entry = serde_json::json!({ "urls": [rel_path.clone()] });
108            }
109        }
110    }
111
112    let bytes = serde_json::to_vec_pretty(&sanitised)
113        .map_err(|e| format!("failed to serialize solc input: {e}"))?;
114    let mut file =
115        fs::File::create(&path).map_err(|e| format!("failed to create {}: {e}", path.display()))?;
116    file.write_all(&bytes)
117        .map_err(|e| format!("failed to write {}: {e}", path.display()))?;
118    Ok(())
119}
120
121fn ensure_cache_dir_layout(root: &Path) -> Result<(PathBuf, PathBuf), String> {
122    let cache_root = root.join(CACHE_DIR);
123    fs::create_dir_all(&cache_root)
124        .map_err(|e| format!("failed to create cache dir {}: {e}", cache_root.display()))?;
125
126    // Ensure cache artifacts are ignored by Git in consumer projects.
127    let gitignore_path = cache_root.join(CACHE_GITIGNORE_FILE);
128    if !gitignore_path.exists() {
129        fs::write(&gitignore_path, CACHE_GITIGNORE_CONTENTS).map_err(|e| {
130            format!(
131                "failed to write cache gitignore {}: {e}",
132                gitignore_path.display()
133            )
134        })?;
135    }
136
137    let shards_dir = cache_shards_dir_v2(root);
138    fs::create_dir_all(&shards_dir)
139        .map_err(|e| format!("failed to create shards dir {}: {e}", shards_dir.display()))?;
140
141    Ok((cache_root, shards_dir))
142}
143
144fn shard_file_name_for_rel_path(rel_path: &str) -> String {
145    format!("{}.json", keccak_hex(rel_path.as_bytes()))
146}
147
148fn write_atomic_json(path: &Path, payload: &[u8]) -> Result<(), String> {
149    let tmp_path = path.with_extension(format!(
150        "{}.tmp",
151        path.extension()
152            .and_then(|s| s.to_str())
153            .unwrap_or_default()
154    ));
155    {
156        let mut file = fs::File::create(&tmp_path)
157            .map_err(|e| format!("create tmp {}: {e}", tmp_path.display()))?;
158        file.write_all(payload)
159            .map_err(|e| format!("write tmp {}: {e}", tmp_path.display()))?;
160        file.flush()
161            .map_err(|e| format!("flush tmp {}: {e}", tmp_path.display()))?;
162        file.sync_all()
163            .map_err(|e| format!("sync tmp {}: {e}", tmp_path.display()))?;
164    }
165    fs::rename(&tmp_path, path).map_err(|e| {
166        format!(
167            "rename tmp {} -> {}: {e}",
168            tmp_path.display(),
169            path.display()
170        )
171    })
172}
173
174fn keccak_hex(bytes: &[u8]) -> String {
175    let mut out = [0u8; 32];
176    let mut hasher = Keccak::v256();
177    hasher.update(bytes);
178    hasher.finalize(&mut out);
179    hex::encode(out)
180}
181
182fn file_hash(path: &Path) -> Option<String> {
183    let bytes = fs::read(path).ok()?;
184    Some(keccak_hex(&bytes))
185}
186
187fn relative_to_root(root: &Path, file: &Path) -> String {
188    file.strip_prefix(root)
189        .unwrap_or(file)
190        .to_string_lossy()
191        .replace('\\', "/")
192}
193
194fn current_file_hashes(
195    config: &FoundryConfig,
196    include_libs: bool,
197) -> Result<BTreeMap<String, String>, String> {
198    let source_files = if include_libs {
199        crate::solc::discover_source_files_with_libs(config)
200    } else {
201        crate::solc::discover_source_files(config)
202    };
203    hash_file_list(config, &source_files)
204}
205
206/// Hash an explicit list of absolute paths (relative to config.root).
207fn hash_file_list(
208    config: &FoundryConfig,
209    source_files: &[PathBuf],
210) -> Result<BTreeMap<String, String>, String> {
211    if source_files.is_empty() {
212        return Ok(BTreeMap::new());
213    }
214    let mut hashes = BTreeMap::new();
215    for path in source_files {
216        let rel = relative_to_root(&config.root, path);
217        let hash = file_hash(path)
218            .ok_or_else(|| format!("failed to hash source file {}", path.display()))?;
219        hashes.insert(rel, hash);
220    }
221    Ok(hashes)
222}
223
224fn config_fingerprint(config: &FoundryConfig) -> String {
225    let payload = serde_json::json!({
226        // Including the server version means any binary upgrade automatically
227        // invalidates the on-disk cache and triggers a clean rebuild.
228        "lsp_version": env!("CARGO_PKG_VERSION"),
229        "solc_version": config.solc_version,
230        "remappings": config.remappings,
231        "evm_version": config.evm_version,
232        "sources_dir": config.sources_dir,
233        "libs": config.libs,
234        // via_ir changes the Yul IR pipeline which can produce different AST
235        // node IDs — toggling it must invalidate the cache.
236        "via_ir": config.via_ir,
237    });
238    keccak_hex(payload.to_string().as_bytes())
239}
240
241fn push_hash_history(meta: &mut PersistedReferenceCacheV2, rel: &str, hash: &str) {
242    const MAX_HISTORY: usize = 8;
243    let history = meta.file_hash_history.entry(rel.to_string()).or_default();
244    if history.last().is_some_and(|h| h == hash) {
245        return;
246    }
247    history.push(hash.to_string());
248    if history.len() > MAX_HISTORY {
249        let drop_count = history.len() - MAX_HISTORY;
250        history.drain(0..drop_count);
251    }
252}
253
254pub fn save_reference_cache(config: &FoundryConfig, build: &CachedBuild) -> Result<(), String> {
255    save_reference_cache_with_report(config, build, None).map(|_| ())
256}
257
258/// Incrementally upsert v2 cache shards for changed files, serializing
259/// global metadata (`path_to_abs`, `id_to_path_map`, `external_refs`) from
260/// the **merged in-memory `CachedBuild`** (root-key entry in `ast_cache`).
261///
262/// This ensures the disk cache always mirrors the authoritative in-memory
263/// state, which has correct globally-remapped file IDs from
264/// `merge_scoped_cached_build`.  Only file shards for `changed_abs_paths`
265/// are rewritten (the incremental fast-path); all other shards are preserved.
266///
267/// The full-project reconcile (`save_reference_cache_with_report`) is still
268/// the canonical full save; this function bridges the gap between saves so
269/// that a restart can warm-start from a reasonably up-to-date cache.
270pub fn upsert_reference_cache_v2_with_report(
271    config: &FoundryConfig,
272    build: &CachedBuild,
273    changed_abs_paths: &[String],
274) -> Result<CacheSaveReport, String> {
275    let started = Instant::now();
276    if !config.root.is_dir() {
277        return Err(format!("invalid project root: {}", config.root.display()));
278    }
279
280    let (_cache_root, shards_dir) = ensure_cache_dir_layout(&config.root)?;
281
282    // Load existing metadata (for file_hashes and node_shards of unchanged
283    // files) or start fresh.
284    let meta_path = cache_file_path_v2(&config.root);
285    let mut meta = if let Ok(bytes) = fs::read(&meta_path) {
286        serde_json::from_slice::<PersistedReferenceCacheV2>(&bytes).unwrap_or(
287            PersistedReferenceCacheV2 {
288                schema_version: CACHE_SCHEMA_VERSION_V2,
289                project_root: config.root.to_string_lossy().to_string(),
290                config_fingerprint: config_fingerprint(config),
291                file_hashes: BTreeMap::new(),
292                file_hash_history: BTreeMap::new(),
293                path_to_abs: HashMap::new(),
294                id_to_path_map: HashMap::new(),
295                external_refs: Vec::new(),
296                node_shards: BTreeMap::new(),
297            },
298        )
299    } else {
300        PersistedReferenceCacheV2 {
301            schema_version: CACHE_SCHEMA_VERSION_V2,
302            project_root: config.root.to_string_lossy().to_string(),
303            config_fingerprint: config_fingerprint(config),
304            file_hashes: BTreeMap::new(),
305            file_hash_history: BTreeMap::new(),
306            path_to_abs: HashMap::new(),
307            id_to_path_map: HashMap::new(),
308            external_refs: Vec::new(),
309            node_shards: BTreeMap::new(),
310        }
311    };
312
313    // Reset metadata when root/fingerprint changed.
314    if meta.project_root != config.root.to_string_lossy()
315        || meta.config_fingerprint != config_fingerprint(config)
316    {
317        meta = PersistedReferenceCacheV2 {
318            schema_version: CACHE_SCHEMA_VERSION_V2,
319            project_root: config.root.to_string_lossy().to_string(),
320            config_fingerprint: config_fingerprint(config),
321            file_hashes: BTreeMap::new(),
322            file_hash_history: BTreeMap::new(),
323            path_to_abs: HashMap::new(),
324            id_to_path_map: HashMap::new(),
325            external_refs: Vec::new(),
326            node_shards: BTreeMap::new(),
327        };
328    }
329
330    // Write shards only for the changed files.
331    let changed_set: std::collections::HashSet<&str> =
332        changed_abs_paths.iter().map(|s| s.as_str()).collect();
333    let mut touched = 0usize;
334    for (abs_path, file_nodes) in &build.nodes {
335        if !changed_set.contains(abs_path.as_str()) {
336            continue;
337        }
338        let abs = Path::new(abs_path.as_str());
339        let rel = relative_to_root(&config.root, abs);
340        let shard_name = shard_file_name_for_rel_path(&rel);
341        let shard_path = shards_dir.join(&shard_name);
342
343        let mut entries = Vec::with_capacity(file_nodes.len());
344        for (id, info) in file_nodes {
345            entries.push(PersistedNodeEntry {
346                id: id.0,
347                info: info.clone(),
348            });
349        }
350        let shard = PersistedFileShardV2 {
351            abs_path: abs_path.to_string(),
352            entries,
353        };
354        let shard_payload =
355            serde_json::to_vec(&shard).map_err(|e| format!("serialize shard {}: {e}", rel))?;
356        write_atomic_json(&shard_path, &shard_payload)?;
357
358        if let Some(hash) = file_hash(abs) {
359            push_hash_history(&mut meta, &rel, &hash);
360            meta.file_hashes.insert(rel.clone(), hash);
361            meta.node_shards.insert(rel, shard_name);
362            touched += 1;
363        }
364    }
365
366    // Serialize global metadata from the authoritative merged build.
367    // This replaces the buggy per-file merge that previously wrote
368    // un-remapped file IDs and identity path_to_abs entries.
369    meta.path_to_abs = build
370        .path_to_abs
371        .iter()
372        .map(|(k, v)| (k.to_string(), v.to_string()))
373        .collect();
374    meta.id_to_path_map = build.id_to_path_map.clone();
375    meta.external_refs = build
376        .external_refs
377        .iter()
378        .map(|(src, id)| PersistedExternalRef {
379            src: src.clone(),
380            decl_id: id.0,
381        })
382        .collect();
383
384    let payload_v2 = serde_json::to_vec(&meta).map_err(|e| format!("serialize v2 cache: {e}"))?;
385    write_atomic_json(&meta_path, &payload_v2)?;
386
387    Ok(CacheSaveReport {
388        file_count_hashed: touched,
389        duration_ms: started.elapsed().as_millis(),
390    })
391}
392
393pub fn save_reference_cache_with_report(
394    config: &FoundryConfig,
395    build: &CachedBuild,
396    source_files: Option<&[PathBuf]>,
397) -> Result<CacheSaveReport, String> {
398    let started = Instant::now();
399    if !config.root.is_dir() {
400        return Err(format!("invalid project root: {}", config.root.display()));
401    }
402
403    // When an explicit file list is given, hash only those.
404    // Otherwise derive the list from the build's node keys (the files that
405    // were actually compiled) — this avoids walking unrelated lib files.
406    let file_hashes = if let Some(files) = source_files {
407        hash_file_list(config, files)?
408    } else {
409        let build_paths: Vec<PathBuf> = build
410            .nodes
411            .keys()
412            .map(|p| PathBuf::from(p.as_str()))
413            .collect();
414        if build_paths.is_empty() {
415            current_file_hashes(config, true)?
416        } else {
417            hash_file_list(config, &build_paths)?
418        }
419    };
420    let file_count_hashed = file_hashes.len();
421    let external_refs = build
422        .external_refs
423        .iter()
424        .map(|(src, id)| PersistedExternalRef {
425            src: src.clone(),
426            decl_id: id.0,
427        })
428        .collect::<Vec<_>>();
429
430    let (_cache_root, shards_dir) = ensure_cache_dir_layout(&config.root)?;
431
432    let mut node_shards: BTreeMap<String, String> = BTreeMap::new();
433    let mut live_shards = std::collections::HashSet::new();
434    for (abs_path, file_nodes) in &build.nodes {
435        let abs = Path::new(abs_path.as_str());
436        let rel = relative_to_root(&config.root, abs);
437        let shard_name = shard_file_name_for_rel_path(&rel);
438        let shard_path = shards_dir.join(&shard_name);
439
440        let mut entries = Vec::with_capacity(file_nodes.len());
441        for (id, info) in file_nodes {
442            entries.push(PersistedNodeEntry {
443                id: id.0,
444                info: info.clone(),
445            });
446        }
447        let shard = PersistedFileShardV2 {
448            abs_path: abs_path.to_string(),
449            entries,
450        };
451        let shard_payload =
452            serde_json::to_vec(&shard).map_err(|e| format!("serialize shard {}: {e}", rel))?;
453        write_atomic_json(&shard_path, &shard_payload)?;
454        node_shards.insert(rel, shard_name.clone());
455        live_shards.insert(shard_name);
456    }
457
458    // Best-effort cleanup of stale shard files.
459    if let Ok(dir) = fs::read_dir(&shards_dir) {
460        for entry in dir.flatten() {
461            let file_name = entry.file_name().to_string_lossy().to_string();
462            if !live_shards.contains(&file_name) {
463                let _ = fs::remove_file(entry.path());
464            }
465        }
466    }
467
468    let persisted_v2 = PersistedReferenceCacheV2 {
469        schema_version: CACHE_SCHEMA_VERSION_V2,
470        project_root: config.root.to_string_lossy().to_string(),
471        config_fingerprint: config_fingerprint(config),
472        file_hashes: file_hashes.clone(),
473        file_hash_history: {
474            let mut h = BTreeMap::new();
475            for (rel, hash) in &file_hashes {
476                h.insert(rel.clone(), vec![hash.clone()]);
477            }
478            h
479        },
480        path_to_abs: build
481            .path_to_abs
482            .iter()
483            .map(|(k, v)| (k.to_string(), v.to_string()))
484            .collect(),
485        external_refs: external_refs.clone(),
486        id_to_path_map: build.id_to_path_map.clone(),
487        node_shards,
488    };
489    let payload_v2 =
490        serde_json::to_vec(&persisted_v2).map_err(|e| format!("serialize v2 cache: {e}"))?;
491    write_atomic_json(&cache_file_path_v2(&config.root), &payload_v2)?;
492
493    Ok(CacheSaveReport {
494        file_count_hashed,
495        duration_ms: started.elapsed().as_millis(),
496    })
497}
498
499pub fn load_reference_cache(config: &FoundryConfig) -> Option<CachedBuild> {
500    load_reference_cache_with_report(config, ProjectIndexCacheMode::Auto, false).build
501}
502
503/// Discover existing LSP caches in lib sub-projects.
504///
505/// Result of discovering lib sub-projects.
506pub struct DiscoveredLibs {
507    /// Sub-projects that already have a valid `.solidity-language-server/` cache.
508    pub cached: Vec<PathBuf>,
509    /// Sub-projects with `foundry.toml` but no existing cache.
510    pub uncached: Vec<PathBuf>,
511}
512
513/// Walks the configured `libs` directories looking for `foundry.toml` files.
514/// Returns sub-project roots partitioned into those with existing caches
515/// and those without.
516pub fn discover_lib_sub_projects(config: &FoundryConfig) -> DiscoveredLibs {
517    let mut cached = Vec::new();
518    let mut uncached = Vec::new();
519    for lib_dir_name in &config.libs {
520        let lib_dir = config.root.join(lib_dir_name);
521        if !lib_dir.is_dir() {
522            continue;
523        }
524        discover_lib_sub_projects_recursive(&lib_dir, &mut cached, &mut uncached);
525    }
526    DiscoveredLibs { cached, uncached }
527}
528
529/// Backwards-compatible wrapper: returns only sub-projects that have an
530/// existing cache on disk.
531pub fn discover_lib_caches(config: &FoundryConfig) -> Vec<PathBuf> {
532    discover_lib_sub_projects(config).cached
533}
534
535fn discover_lib_sub_projects_recursive(
536    dir: &Path,
537    cached: &mut Vec<PathBuf>,
538    uncached: &mut Vec<PathBuf>,
539) {
540    let entries = match fs::read_dir(dir) {
541        Ok(e) => e,
542        Err(_) => return,
543    };
544    for entry in entries.flatten() {
545        let path = entry.path();
546        if !path.is_dir() {
547            continue;
548        }
549        let Some(name) = path.file_name().and_then(|n| n.to_str()) else {
550            continue;
551        };
552        // Skip hidden dirs and build artifacts
553        if name.starts_with('.')
554            || matches!(name, "out" | "cache" | "artifacts" | "target" | "broadcast")
555        {
556            continue;
557        }
558        let has_config = path.join("foundry.toml").is_file();
559        if has_config {
560            let has_cache = path.join(CACHE_DIR).join(CACHE_FILE_V2).is_file();
561            if has_cache {
562                cached.push(path.clone());
563            } else {
564                uncached.push(path.clone());
565            }
566        }
567        // Always recurse deeper — nested libs (e.g. lib/v4-periphery/lib/v4-core/)
568        // can have their own caches too.
569        discover_lib_sub_projects_recursive(&path, cached, uncached);
570    }
571}
572
573/// Load a sub-project's cache as a [`CachedBuild`].
574///
575/// This is a simplified version of [`load_reference_cache_with_report`] that
576/// does not validate config fingerprints or file hashes — we just load
577/// whatever shards are on disk.  Sub-caches are used read-only for cross-file
578/// reference lookup; staleness is acceptable.
579pub fn load_lib_cache(sub_root: &Path) -> Option<CachedBuild> {
580    let cache_path = sub_root.join(CACHE_DIR).join(CACHE_FILE_V2);
581    let bytes = fs::read(&cache_path).ok()?;
582    let persisted: PersistedReferenceCacheV2 = serde_json::from_slice(&bytes).ok()?;
583
584    if persisted.schema_version != CACHE_SCHEMA_VERSION_V2 {
585        return None;
586    }
587
588    let shards_dir = sub_root.join(CACHE_DIR).join(CACHE_SHARDS_DIR_V2);
589    let mut nodes: HashMap<AbsPath, HashMap<NodeId, NodeInfo>> = HashMap::new();
590    let mut reused_decl_ids = std::collections::HashSet::new();
591
592    for (_rel_path, shard_name) in &persisted.node_shards {
593        let shard_path = shards_dir.join(shard_name);
594        let shard_bytes = match fs::read(&shard_path) {
595            Ok(v) => v,
596            Err(_) => continue,
597        };
598        let shard: PersistedFileShardV2 = match serde_json::from_slice(&shard_bytes) {
599            Ok(v) => v,
600            Err(_) => continue,
601        };
602        let mut file_nodes = HashMap::with_capacity(shard.entries.len());
603        for entry in shard.entries {
604            reused_decl_ids.insert(entry.id);
605            file_nodes.insert(NodeId(entry.id), entry.info);
606        }
607        nodes.insert(AbsPath::new(shard.abs_path), file_nodes);
608    }
609
610    if nodes.is_empty() {
611        return None;
612    }
613
614    let mut external_refs = HashMap::new();
615    for item in persisted.external_refs {
616        if reused_decl_ids.contains(&item.decl_id) {
617            external_refs.insert(item.src, NodeId(item.decl_id));
618        }
619    }
620
621    Some(CachedBuild::from_reference_index(
622        nodes,
623        persisted
624            .path_to_abs
625            .into_iter()
626            .map(|(k, v)| (RelPath::new(k), AbsPath::new(v)))
627            .collect(),
628        external_refs,
629        persisted.id_to_path_map,
630        0,
631        None,
632    ))
633}
634
635/// Return absolute paths of source files whose current hash differs from v2
636/// cache metadata (including newly-added files missing from metadata).
637pub fn changed_files_since_v2_cache(
638    config: &FoundryConfig,
639    include_libs: bool,
640) -> Result<Vec<PathBuf>, String> {
641    if !config.root.is_dir() {
642        return Err(format!("invalid project root: {}", config.root.display()));
643    }
644
645    let cache_path_v2 = cache_file_path_v2(&config.root);
646    let bytes = fs::read(&cache_path_v2).map_err(|e| format!("cache file read failed: {e}"))?;
647    let persisted: PersistedReferenceCacheV2 =
648        serde_json::from_slice(&bytes).map_err(|e| format!("cache decode failed: {e}"))?;
649
650    if persisted.schema_version != CACHE_SCHEMA_VERSION_V2 {
651        return Err(format!(
652            "schema mismatch: cache={}, expected={}",
653            persisted.schema_version, CACHE_SCHEMA_VERSION_V2
654        ));
655    }
656    if persisted.project_root != config.root.to_string_lossy() {
657        return Err("project root mismatch".to_string());
658    }
659    if persisted.config_fingerprint != config_fingerprint(config) {
660        return Err("config fingerprint mismatch".to_string());
661    }
662
663    // Hash cached files and compare to saved hashes.
664    let saved_paths: Vec<PathBuf> = persisted
665        .file_hashes
666        .keys()
667        .map(|rel| config.root.join(rel))
668        .collect();
669    let current_hashes = hash_file_list(config, &saved_paths)?;
670    let mut changed = Vec::new();
671    for (rel, current_hash) in &current_hashes {
672        match persisted.file_hashes.get(rel) {
673            Some(prev) if prev == current_hash => {}
674            _ => changed.push(config.root.join(rel)),
675        }
676    }
677
678    // Detect new files: walk the source dir to find .sol files that are not
679    // in the cached file list.  This ensures newly-created files trigger a
680    // scoped reindex instead of silently remaining invisible until a full
681    // rebuild.  When include_libs is true (fullProjectScan), we also scan
682    // library directories so that newly-added lib files are picked up.
683    let saved_rels: std::collections::HashSet<&String> = persisted.file_hashes.keys().collect();
684    let discovered = if include_libs {
685        crate::solc::discover_source_files_with_libs(config)
686    } else {
687        crate::solc::discover_source_files(config)
688    };
689    for path in &discovered {
690        let rel = relative_to_root(&config.root, path);
691        if !saved_rels.contains(&rel) {
692            changed.push(path.clone());
693        }
694    }
695
696    Ok(changed)
697}
698
699pub fn load_reference_cache_with_report(
700    config: &FoundryConfig,
701    cache_mode: ProjectIndexCacheMode,
702    _include_libs: bool,
703) -> CacheLoadReport {
704    let started = Instant::now();
705    let miss = |reason: String, file_count_hashed: usize, duration_ms: u128| CacheLoadReport {
706        build: None,
707        hit: false,
708        miss_reason: Some(reason),
709        file_count_hashed,
710        file_count_reused: 0,
711        complete: false,
712        duration_ms,
713    };
714
715    if !config.root.is_dir() {
716        return miss(
717            format!("invalid project root: {}", config.root.display()),
718            0,
719            started.elapsed().as_millis(),
720        );
721    }
722
723    let should_try_v2 = matches!(
724        cache_mode,
725        ProjectIndexCacheMode::Auto | ProjectIndexCacheMode::V2
726    );
727
728    // Try v2 first (partial warm-start capable).
729    let cache_path_v2 = cache_file_path_v2(&config.root);
730    if should_try_v2
731        && let Ok(bytes) = fs::read(&cache_path_v2)
732        && let Ok(persisted) = serde_json::from_slice::<PersistedReferenceCacheV2>(&bytes)
733    {
734        if persisted.schema_version != CACHE_SCHEMA_VERSION_V2 {
735            return miss(
736                format!(
737                    "schema mismatch: cache={}, expected={}",
738                    persisted.schema_version, CACHE_SCHEMA_VERSION_V2
739                ),
740                0,
741                started.elapsed().as_millis(),
742            );
743        }
744        if persisted.project_root != config.root.to_string_lossy() {
745            return miss(
746                "project root mismatch".to_string(),
747                0,
748                started.elapsed().as_millis(),
749            );
750        }
751        if persisted.config_fingerprint != config_fingerprint(config) {
752            return miss(
753                "config fingerprint mismatch".to_string(),
754                0,
755                started.elapsed().as_millis(),
756            );
757        }
758
759        // Hash only the files that were saved — no rediscovery needed.
760        // This means we compare exactly the closure that was compiled last time.
761        let saved_paths: Vec<PathBuf> = persisted
762            .file_hashes
763            .keys()
764            .map(|rel| config.root.join(rel))
765            .collect();
766        let current_hashes = match hash_file_list(config, &saved_paths) {
767            Ok(h) => h,
768            Err(e) => return miss(e, 0, started.elapsed().as_millis()),
769        };
770        let file_count_hashed = current_hashes.len();
771
772        let shards_dir = cache_shards_dir_v2(&config.root);
773        let mut nodes: HashMap<AbsPath, HashMap<NodeId, NodeInfo>> = HashMap::new();
774        let mut file_count_reused = 0usize;
775        let mut reused_decl_ids = std::collections::HashSet::new();
776
777        for (rel_path, current_hash) in &current_hashes {
778            let Some(cached_hash) = persisted.file_hashes.get(rel_path) else {
779                continue;
780            };
781            if cached_hash != current_hash {
782                continue;
783            }
784            let Some(shard_name) = persisted.node_shards.get(rel_path) else {
785                continue;
786            };
787            let shard_path = shards_dir.join(shard_name);
788            let shard_bytes = match fs::read(&shard_path) {
789                Ok(v) => v,
790                Err(_) => continue,
791            };
792            let shard: PersistedFileShardV2 = match serde_json::from_slice(&shard_bytes) {
793                Ok(v) => v,
794                Err(_) => continue,
795            };
796            let mut file_nodes = HashMap::with_capacity(shard.entries.len());
797            for entry in shard.entries {
798                reused_decl_ids.insert(entry.id);
799                file_nodes.insert(NodeId(entry.id), entry.info);
800            }
801            nodes.insert(AbsPath::new(shard.abs_path), file_nodes);
802            file_count_reused += 1;
803        }
804
805        if file_count_reused == 0 {
806            return miss(
807                "v2 cache: no reusable files".to_string(),
808                file_count_hashed,
809                started.elapsed().as_millis(),
810            );
811        }
812
813        let mut external_refs = HashMap::new();
814        for item in persisted.external_refs {
815            if reused_decl_ids.contains(&item.decl_id) {
816                external_refs.insert(item.src, NodeId(item.decl_id));
817            }
818        }
819
820        // Complete = every saved file was reused with a matching hash.
821        let complete =
822            file_count_reused == file_count_hashed && current_hashes == persisted.file_hashes;
823
824        return CacheLoadReport {
825            build: Some(CachedBuild::from_reference_index(
826                nodes,
827                persisted
828                    .path_to_abs
829                    .into_iter()
830                    .map(|(k, v)| (RelPath::new(k), AbsPath::new(v)))
831                    .collect(),
832                external_refs,
833                persisted.id_to_path_map,
834                0,
835                None,
836            )),
837            hit: true,
838            miss_reason: if complete {
839                None
840            } else {
841                Some("v2 cache partial reuse".to_string())
842            },
843            file_count_hashed,
844            file_count_reused,
845            complete,
846            duration_ms: started.elapsed().as_millis(),
847        };
848    }
849
850    miss(
851        "cache mode v2: no usable v2 cache".to_string(),
852        0,
853        started.elapsed().as_millis(),
854    )
855}
856
857#[cfg(test)]
858mod tests {
859    use super::*;
860
861    #[test]
862    fn test_save_last_solc_input_writes_cache_file() {
863        let dir = tempfile::tempdir().unwrap();
864        let input = serde_json::json!({
865            "language": "Solidity",
866            "sources": {
867                "src/Foo.sol": { "urls": ["src/Foo.sol"] }
868            },
869            "settings": { "outputSelection": { "*": { "": ["ast"] } } }
870        });
871
872        save_last_solc_input(dir.path(), &input).unwrap();
873
874        let saved_path = cache_solc_input_path(dir.path());
875        assert!(saved_path.is_file());
876
877        let bytes = std::fs::read(saved_path).unwrap();
878        let parsed: Value = serde_json::from_slice(&bytes).unwrap();
879        assert_eq!(parsed, input);
880    }
881}