Skip to main content

solidity_language_server/
project_cache.rs

1use crate::config::FoundryConfig;
2use crate::config::ProjectIndexCacheMode;
3use crate::goto::{CachedBuild, NodeInfo};
4use crate::types::NodeId;
5use serde::{Deserialize, Serialize};
6use std::collections::{BTreeMap, HashMap};
7use std::fs;
8use std::io::Write;
9use std::path::{Path, PathBuf};
10use std::time::Instant;
11use tiny_keccak::{Hasher, Keccak};
12
13const CACHE_SCHEMA_VERSION_V1: u32 = 1;
14const CACHE_SCHEMA_VERSION_V2: u32 = 2;
15const CACHE_DIR: &str = ".solidity-language-server";
16const CACHE_FILE_V1: &str = "solidity-lsp-schema-v1.json";
17const CACHE_FILE_V2: &str = "solidity-lsp-schema-v2.json";
18const CACHE_SHARDS_DIR_V2: &str = "reference-index-v2";
19
20#[derive(Debug, Clone, Serialize, Deserialize)]
21struct PersistedNodeEntry {
22    id: u64,
23    info: NodeInfo,
24}
25
26#[derive(Debug, Clone, Serialize, Deserialize)]
27struct PersistedExternalRef {
28    src: String,
29    decl_id: u64,
30}
31
32#[derive(Debug, Clone, Serialize, Deserialize)]
33struct PersistedReferenceCache {
34    schema_version: u32,
35    project_root: String,
36    config_fingerprint: String,
37    file_hashes: BTreeMap<String, String>,
38    nodes: HashMap<String, Vec<PersistedNodeEntry>>,
39    path_to_abs: HashMap<String, String>,
40    external_refs: Vec<PersistedExternalRef>,
41    id_to_path_map: HashMap<String, String>,
42}
43
44#[derive(Debug, Clone, Serialize, Deserialize)]
45struct PersistedFileShardV2 {
46    abs_path: String,
47    entries: Vec<PersistedNodeEntry>,
48}
49
50#[derive(Debug, Clone, Serialize, Deserialize)]
51struct PersistedReferenceCacheV2 {
52    schema_version: u32,
53    project_root: String,
54    config_fingerprint: String,
55    file_hashes: BTreeMap<String, String>,
56    path_to_abs: HashMap<String, String>,
57    id_to_path_map: HashMap<String, String>,
58    external_refs: Vec<PersistedExternalRef>,
59    // relative-path -> shard file name
60    node_shards: BTreeMap<String, String>,
61}
62
63#[derive(Debug, Clone)]
64pub struct CacheLoadReport {
65    pub build: Option<CachedBuild>,
66    pub hit: bool,
67    pub miss_reason: Option<String>,
68    pub file_count_hashed: usize,
69    pub file_count_reused: usize,
70    pub complete: bool,
71    pub duration_ms: u128,
72}
73
74#[derive(Debug, Clone)]
75pub struct CacheSaveReport {
76    pub file_count_hashed: usize,
77    pub duration_ms: u128,
78}
79
80fn cache_file_path_v1(root: &Path) -> PathBuf {
81    root.join(CACHE_DIR).join(CACHE_FILE_V1)
82}
83
84fn cache_file_path_v2(root: &Path) -> PathBuf {
85    root.join(CACHE_DIR).join(CACHE_FILE_V2)
86}
87
88fn cache_shards_dir_v2(root: &Path) -> PathBuf {
89    root.join(CACHE_DIR).join(CACHE_SHARDS_DIR_V2)
90}
91
92fn shard_file_name_for_rel_path(rel_path: &str) -> String {
93    format!("{}.json", keccak_hex(rel_path.as_bytes()))
94}
95
96fn write_atomic_json(path: &Path, payload: &[u8]) -> Result<(), String> {
97    let tmp_path = path.with_extension(format!(
98        "{}.tmp",
99        path.extension()
100            .and_then(|s| s.to_str())
101            .unwrap_or_default()
102    ));
103    {
104        let mut file = fs::File::create(&tmp_path)
105            .map_err(|e| format!("create tmp {}: {e}", tmp_path.display()))?;
106        file.write_all(payload)
107            .map_err(|e| format!("write tmp {}: {e}", tmp_path.display()))?;
108        file.flush()
109            .map_err(|e| format!("flush tmp {}: {e}", tmp_path.display()))?;
110        file.sync_all()
111            .map_err(|e| format!("sync tmp {}: {e}", tmp_path.display()))?;
112    }
113    fs::rename(&tmp_path, path).map_err(|e| {
114        format!(
115            "rename tmp {} -> {}: {e}",
116            tmp_path.display(),
117            path.display()
118        )
119    })
120}
121
122fn keccak_hex(bytes: &[u8]) -> String {
123    let mut out = [0u8; 32];
124    let mut hasher = Keccak::v256();
125    hasher.update(bytes);
126    hasher.finalize(&mut out);
127    hex::encode(out)
128}
129
130fn file_hash(path: &Path) -> Option<String> {
131    let bytes = fs::read(path).ok()?;
132    Some(keccak_hex(&bytes))
133}
134
135fn relative_to_root(root: &Path, file: &Path) -> String {
136    file.strip_prefix(root)
137        .unwrap_or(file)
138        .to_string_lossy()
139        .replace('\\', "/")
140}
141
142fn current_file_hashes(config: &FoundryConfig) -> Result<BTreeMap<String, String>, String> {
143    let source_files = crate::solc::discover_source_files(config);
144    if source_files.is_empty() {
145        return Ok(BTreeMap::new());
146    }
147
148    let mut hashes = BTreeMap::new();
149    for path in source_files {
150        let rel = relative_to_root(&config.root, &path);
151        let hash = file_hash(&path)
152            .ok_or_else(|| format!("failed to hash source file {}", path.display()))?;
153        hashes.insert(rel, hash);
154    }
155    Ok(hashes)
156}
157
158fn config_fingerprint(config: &FoundryConfig) -> String {
159    let payload = serde_json::json!({
160        "solc_version": config.solc_version,
161        "remappings": config.remappings,
162        "via_ir": config.via_ir,
163        "optimizer": config.optimizer,
164        "optimizer_runs": config.optimizer_runs,
165        "evm_version": config.evm_version,
166        "sources_dir": config.sources_dir,
167        "libs": config.libs,
168    });
169    keccak_hex(payload.to_string().as_bytes())
170}
171
172pub fn save_reference_cache(config: &FoundryConfig, build: &CachedBuild) -> Result<(), String> {
173    save_reference_cache_with_report(config, build).map(|_| ())
174}
175
176/// Incrementally upsert v2 cache shards from a partial build (typically a
177/// saved file compile). This is a fast-path: it updates per-file shards and
178/// file hashes for touched files, while preserving existing global metadata.
179///
180/// The authoritative full-project cache is still produced by full reconcile.
181pub fn upsert_reference_cache_v2_with_report(
182    config: &FoundryConfig,
183    build: &CachedBuild,
184) -> Result<CacheSaveReport, String> {
185    let started = Instant::now();
186    if !config.root.is_dir() {
187        return Err(format!("invalid project root: {}", config.root.display()));
188    }
189
190    let cache_root = config.root.join(CACHE_DIR);
191    fs::create_dir_all(&cache_root)
192        .map_err(|e| format!("failed to create cache dir {}: {e}", cache_root.display()))?;
193    let shards_dir = cache_shards_dir_v2(&config.root);
194    fs::create_dir_all(&shards_dir)
195        .map_err(|e| format!("failed to create shards dir {}: {e}", shards_dir.display()))?;
196
197    let meta_path = cache_file_path_v2(&config.root);
198    let mut meta = if let Ok(bytes) = fs::read(&meta_path) {
199        serde_json::from_slice::<PersistedReferenceCacheV2>(&bytes).unwrap_or(PersistedReferenceCacheV2 {
200            schema_version: CACHE_SCHEMA_VERSION_V2,
201            project_root: config.root.to_string_lossy().to_string(),
202            config_fingerprint: config_fingerprint(config),
203            file_hashes: BTreeMap::new(),
204            path_to_abs: HashMap::new(),
205            id_to_path_map: HashMap::new(),
206            external_refs: Vec::new(),
207            node_shards: BTreeMap::new(),
208        })
209    } else {
210        PersistedReferenceCacheV2 {
211            schema_version: CACHE_SCHEMA_VERSION_V2,
212            project_root: config.root.to_string_lossy().to_string(),
213            config_fingerprint: config_fingerprint(config),
214            file_hashes: BTreeMap::new(),
215            path_to_abs: HashMap::new(),
216            id_to_path_map: HashMap::new(),
217            external_refs: Vec::new(),
218            node_shards: BTreeMap::new(),
219        }
220    };
221
222    // Reset metadata when root/fingerprint changed.
223    if meta.project_root != config.root.to_string_lossy()
224        || meta.config_fingerprint != config_fingerprint(config)
225    {
226        meta = PersistedReferenceCacheV2 {
227            schema_version: CACHE_SCHEMA_VERSION_V2,
228            project_root: config.root.to_string_lossy().to_string(),
229            config_fingerprint: config_fingerprint(config),
230            file_hashes: BTreeMap::new(),
231            path_to_abs: HashMap::new(),
232            id_to_path_map: HashMap::new(),
233            external_refs: Vec::new(),
234            node_shards: BTreeMap::new(),
235        };
236    }
237
238    let mut touched = 0usize;
239    for (abs_path, file_nodes) in &build.nodes {
240        let abs = Path::new(abs_path);
241        let rel = relative_to_root(&config.root, abs);
242        let shard_name = shard_file_name_for_rel_path(&rel);
243        let shard_path = shards_dir.join(&shard_name);
244
245        let mut entries = Vec::with_capacity(file_nodes.len());
246        for (id, info) in file_nodes {
247            entries.push(PersistedNodeEntry {
248                id: id.0,
249                info: info.clone(),
250            });
251        }
252        let shard = PersistedFileShardV2 {
253            abs_path: abs_path.clone(),
254            entries,
255        };
256        let shard_payload =
257            serde_json::to_vec(&shard).map_err(|e| format!("serialize shard {}: {e}", rel))?;
258        write_atomic_json(&shard_path, &shard_payload)?;
259
260        if let Some(hash) = file_hash(abs) {
261            meta.file_hashes.insert(rel.clone(), hash);
262            meta.node_shards.insert(rel, shard_name);
263            touched += 1;
264        }
265
266        meta.path_to_abs.insert(abs_path.clone(), abs_path.clone());
267    }
268
269    for (k, v) in &build.id_to_path_map {
270        meta.id_to_path_map.insert(k.clone(), v.clone());
271    }
272
273    let payload_v2 = serde_json::to_vec(&meta).map_err(|e| format!("serialize v2 cache: {e}"))?;
274    write_atomic_json(&meta_path, &payload_v2)?;
275
276    Ok(CacheSaveReport {
277        file_count_hashed: touched,
278        duration_ms: started.elapsed().as_millis(),
279    })
280}
281
282pub fn save_reference_cache_with_report(
283    config: &FoundryConfig,
284    build: &CachedBuild,
285) -> Result<CacheSaveReport, String> {
286    let started = Instant::now();
287    if !config.root.is_dir() {
288        return Err(format!("invalid project root: {}", config.root.display()));
289    }
290
291    let file_hashes = current_file_hashes(config)?;
292    let file_count_hashed = file_hashes.len();
293    let external_refs = build
294        .external_refs
295        .iter()
296        .map(|(src, id)| PersistedExternalRef {
297            src: src.clone(),
298            decl_id: id.0,
299        })
300        .collect::<Vec<_>>();
301
302    let cache_root = config.root.join(CACHE_DIR);
303    fs::create_dir_all(&cache_root)
304        .map_err(|e| format!("failed to create cache dir {}: {e}", cache_root.display()))?;
305    let shards_dir = cache_shards_dir_v2(&config.root);
306    fs::create_dir_all(&shards_dir)
307        .map_err(|e| format!("failed to create shards dir {}: {e}", shards_dir.display()))?;
308
309    let mut node_shards: BTreeMap<String, String> = BTreeMap::new();
310    let mut live_shards = std::collections::HashSet::new();
311    for (abs_path, file_nodes) in &build.nodes {
312        let abs = Path::new(abs_path);
313        let rel = relative_to_root(&config.root, abs);
314        let shard_name = shard_file_name_for_rel_path(&rel);
315        let shard_path = shards_dir.join(&shard_name);
316
317        let mut entries = Vec::with_capacity(file_nodes.len());
318        for (id, info) in file_nodes {
319            entries.push(PersistedNodeEntry {
320                id: id.0,
321                info: info.clone(),
322            });
323        }
324        let shard = PersistedFileShardV2 {
325            abs_path: abs_path.clone(),
326            entries,
327        };
328        let shard_payload =
329            serde_json::to_vec(&shard).map_err(|e| format!("serialize shard {}: {e}", rel))?;
330        write_atomic_json(&shard_path, &shard_payload)?;
331        node_shards.insert(rel, shard_name.clone());
332        live_shards.insert(shard_name);
333    }
334
335    // Best-effort cleanup of stale shard files.
336    if let Ok(dir) = fs::read_dir(&shards_dir) {
337        for entry in dir.flatten() {
338            let file_name = entry.file_name().to_string_lossy().to_string();
339            if !live_shards.contains(&file_name) {
340                let _ = fs::remove_file(entry.path());
341            }
342        }
343    }
344
345    let persisted_v2 = PersistedReferenceCacheV2 {
346        schema_version: CACHE_SCHEMA_VERSION_V2,
347        project_root: config.root.to_string_lossy().to_string(),
348        config_fingerprint: config_fingerprint(config),
349        file_hashes: file_hashes.clone(),
350        path_to_abs: build.path_to_abs.clone(),
351        external_refs: external_refs.clone(),
352        id_to_path_map: build.id_to_path_map.clone(),
353        node_shards,
354    };
355    let payload_v2 =
356        serde_json::to_vec(&persisted_v2).map_err(|e| format!("serialize v2 cache: {e}"))?;
357    write_atomic_json(&cache_file_path_v2(&config.root), &payload_v2)?;
358
359    // Intentionally bypass v1 writes: v2 is now the only persisted write path.
360    // v1 read fallback remains available for older on-disk caches.
361
362    Ok(CacheSaveReport {
363        file_count_hashed,
364        duration_ms: started.elapsed().as_millis(),
365    })
366}
367
368pub fn load_reference_cache(config: &FoundryConfig) -> Option<CachedBuild> {
369    load_reference_cache_with_report(config, ProjectIndexCacheMode::Auto).build
370}
371
372pub fn load_reference_cache_with_report(
373    config: &FoundryConfig,
374    cache_mode: ProjectIndexCacheMode,
375) -> CacheLoadReport {
376    let started = Instant::now();
377    let miss = |reason: String, file_count_hashed: usize, duration_ms: u128| CacheLoadReport {
378        build: None,
379        hit: false,
380        miss_reason: Some(reason),
381        file_count_hashed,
382        file_count_reused: 0,
383        complete: false,
384        duration_ms,
385    };
386
387    if !config.root.is_dir() {
388        return miss(
389            format!("invalid project root: {}", config.root.display()),
390            0,
391            started.elapsed().as_millis(),
392        );
393    }
394    let current_hashes = match current_file_hashes(config) {
395        Ok(h) => h,
396        Err(e) => {
397            return miss(e, 0, started.elapsed().as_millis());
398        }
399    };
400    let file_count_hashed = current_hashes.len();
401
402    let should_try_v2 = matches!(
403        cache_mode,
404        ProjectIndexCacheMode::Auto | ProjectIndexCacheMode::V2
405    );
406    let should_try_v1 = matches!(
407        cache_mode,
408        ProjectIndexCacheMode::Auto | ProjectIndexCacheMode::V1
409    );
410
411    // Try v2 first (partial warm-start capable).
412    let cache_path_v2 = cache_file_path_v2(&config.root);
413    if should_try_v2
414        && let Ok(bytes) = fs::read(&cache_path_v2)
415        && let Ok(persisted) = serde_json::from_slice::<PersistedReferenceCacheV2>(&bytes)
416    {
417        if persisted.schema_version != CACHE_SCHEMA_VERSION_V2 {
418            return miss(
419                format!(
420                    "schema mismatch: cache={}, expected={}",
421                    persisted.schema_version, CACHE_SCHEMA_VERSION_V2
422                ),
423                file_count_hashed,
424                started.elapsed().as_millis(),
425            );
426        }
427        if persisted.project_root != config.root.to_string_lossy() {
428            return miss(
429                "project root mismatch".to_string(),
430                file_count_hashed,
431                started.elapsed().as_millis(),
432            );
433        }
434        if persisted.config_fingerprint != config_fingerprint(config) {
435            return miss(
436                "config fingerprint mismatch".to_string(),
437                file_count_hashed,
438                started.elapsed().as_millis(),
439            );
440        }
441
442        let shards_dir = cache_shards_dir_v2(&config.root);
443        let mut nodes: HashMap<String, HashMap<NodeId, NodeInfo>> = HashMap::new();
444        let mut file_count_reused = 0usize;
445        let mut reused_decl_ids = std::collections::HashSet::new();
446
447        for (rel_path, current_hash) in &current_hashes {
448            let Some(cached_hash) = persisted.file_hashes.get(rel_path) else {
449                continue;
450            };
451            if cached_hash != current_hash {
452                continue;
453            }
454            let Some(shard_name) = persisted.node_shards.get(rel_path) else {
455                continue;
456            };
457            let shard_path = shards_dir.join(shard_name);
458            let shard_bytes = match fs::read(&shard_path) {
459                Ok(v) => v,
460                Err(_) => continue,
461            };
462            let shard: PersistedFileShardV2 = match serde_json::from_slice(&shard_bytes) {
463                Ok(v) => v,
464                Err(_) => continue,
465            };
466            let mut file_nodes = HashMap::with_capacity(shard.entries.len());
467            for entry in shard.entries {
468                reused_decl_ids.insert(entry.id);
469                file_nodes.insert(NodeId(entry.id), entry.info);
470            }
471            nodes.insert(shard.abs_path, file_nodes);
472            file_count_reused += 1;
473        }
474
475        if file_count_reused == 0 {
476            return miss(
477                "v2 cache: no reusable files".to_string(),
478                file_count_hashed,
479                started.elapsed().as_millis(),
480            );
481        }
482
483        let mut external_refs = HashMap::new();
484        for item in persisted.external_refs {
485            if reused_decl_ids.contains(&item.decl_id) {
486                external_refs.insert(item.src, NodeId(item.decl_id));
487            }
488        }
489
490        let complete = file_count_reused == file_count_hashed
491            && file_count_hashed == persisted.file_hashes.len()
492            && current_hashes == persisted.file_hashes;
493
494        return CacheLoadReport {
495            build: Some(CachedBuild::from_reference_index(
496                nodes,
497                persisted.path_to_abs,
498                external_refs,
499                persisted.id_to_path_map,
500                0,
501            )),
502            hit: true,
503            miss_reason: if complete {
504                None
505            } else {
506                Some("v2 cache partial reuse".to_string())
507            },
508            file_count_hashed,
509            file_count_reused,
510            complete,
511            duration_ms: started.elapsed().as_millis(),
512        };
513    }
514
515    if !should_try_v1 {
516        return miss(
517            "cache mode v2: no usable v2 cache".to_string(),
518            file_count_hashed,
519            started.elapsed().as_millis(),
520        );
521    }
522
523    // Fallback to v1 (all-or-nothing).
524    let cache_path_v1 = cache_file_path_v1(&config.root);
525    let bytes = match fs::read(&cache_path_v1) {
526        Ok(b) => b,
527        Err(e) => {
528            return miss(
529                format!("cache file read failed: {e}"),
530                file_count_hashed,
531                started.elapsed().as_millis(),
532            );
533        }
534    };
535    let persisted: PersistedReferenceCache = match serde_json::from_slice(&bytes) {
536        Ok(v) => v,
537        Err(e) => {
538            return miss(
539                format!("cache decode failed: {e}"),
540                file_count_hashed,
541                started.elapsed().as_millis(),
542            );
543        }
544    };
545    if persisted.schema_version != CACHE_SCHEMA_VERSION_V1 {
546        return miss(
547            format!(
548                "schema mismatch: cache={}, expected={}",
549                persisted.schema_version, CACHE_SCHEMA_VERSION_V1
550            ),
551            file_count_hashed,
552            started.elapsed().as_millis(),
553        );
554    }
555    if persisted.project_root != config.root.to_string_lossy() {
556        return miss(
557            "project root mismatch".to_string(),
558            file_count_hashed,
559            started.elapsed().as_millis(),
560        );
561    }
562    if persisted.config_fingerprint != config_fingerprint(config) {
563        return miss(
564            "config fingerprint mismatch".to_string(),
565            file_count_hashed,
566            started.elapsed().as_millis(),
567        );
568    }
569    if current_hashes != persisted.file_hashes {
570        return miss(
571            "file hash mismatch".to_string(),
572            file_count_hashed,
573            started.elapsed().as_millis(),
574        );
575    }
576
577    let mut nodes: HashMap<String, HashMap<NodeId, NodeInfo>> =
578        HashMap::with_capacity(persisted.nodes.len());
579    for (abs_path, entries) in persisted.nodes {
580        let mut file_nodes = HashMap::with_capacity(entries.len());
581        for entry in entries {
582            file_nodes.insert(NodeId(entry.id), entry.info);
583        }
584        nodes.insert(abs_path, file_nodes);
585    }
586    let mut external_refs = HashMap::new();
587    for item in persisted.external_refs {
588        external_refs.insert(item.src, NodeId(item.decl_id));
589    }
590
591    CacheLoadReport {
592        build: Some(CachedBuild::from_reference_index(
593            nodes,
594            persisted.path_to_abs,
595            external_refs,
596            persisted.id_to_path_map,
597            0,
598        )),
599        hit: true,
600        miss_reason: None,
601        file_count_hashed,
602        file_count_reused: file_count_hashed,
603        complete: true,
604        duration_ms: started.elapsed().as_millis(),
605    }
606}