Skip to main content

solidity_language_server/
file_operations.rs

1use crate::links;
2use crate::utils;
3use std::collections::HashMap;
4use std::path::{Path, PathBuf};
5use tower_lsp::lsp_types::{Position, Range, TextEdit, Url};
6
7// ---------------------------------------------------------------------------
8// Types
9// ---------------------------------------------------------------------------
10
11/// A single file rename: old absolute path → new absolute path.
12#[derive(Debug, Clone)]
13pub struct FileRename {
14    pub old_path: PathBuf,
15    pub new_path: PathBuf,
16}
17
18/// Diagnostic counters returned alongside edits so the caller can log them.
19#[derive(Debug, Default)]
20pub struct RenameStats {
21    /// Source files whose bytes could not be read.
22    pub read_failures: usize,
23    /// Source files with no parseable parent directory.
24    pub no_parent: usize,
25    /// Imports skipped because pathdiff returned None.
26    pub pathdiff_failures: usize,
27    /// Imports whose new path equals the old path (no-op, correctly skipped).
28    pub no_op_skips: usize,
29    /// Edits suppressed because Case 1 already covered the range.
30    pub dedup_skips: usize,
31    /// Duplicate old_path entries detected in the rename list (last wins).
32    pub duplicate_renames: usize,
33}
34
35/// Result of a rename_imports call: edits + diagnostic stats.
36pub struct RenameResult {
37    pub edits: HashMap<Url, Vec<TextEdit>>,
38    pub stats: RenameStats,
39}
40
41/// Diagnostic counters returned alongside delete edits.
42#[derive(Debug, Default)]
43pub struct DeleteStats {
44    /// Source files whose bytes could not be read.
45    pub read_failures: usize,
46    /// Source files with no parseable parent directory.
47    pub no_parent: usize,
48    /// Imports where we could not determine a full import-statement span.
49    pub statement_range_failures: usize,
50    /// Duplicate delete targets detected in the delete list.
51    pub duplicate_deletes: usize,
52    /// Duplicate edits skipped for the same statement range.
53    pub dedup_skips: usize,
54}
55
56/// Result of delete_imports call: edits + diagnostic stats.
57pub struct DeleteResult {
58    pub edits: HashMap<Url, Vec<TextEdit>>,
59    pub stats: DeleteStats,
60}
61
62// ---------------------------------------------------------------------------
63// Folder expansion
64// ---------------------------------------------------------------------------
65
66/// Expand rename entries that target folders into per-file renames.
67///
68/// For each entry, if `old_path` is a directory (or has no `.sol` extension),
69/// every source file under it is expanded to a concrete file rename.
70/// Uses `Path::strip_prefix` for component-aware matching (won't match
71/// `/src2` given `/src`).
72pub fn expand_folder_renames(
73    params: &[(PathBuf, PathBuf)],
74    source_files: &[String],
75) -> Vec<FileRename> {
76    // Deduplicate by old path; last entry wins.
77    let mut dedup: HashMap<PathBuf, PathBuf> = HashMap::new();
78    for (old_path, new_path) in params {
79        if old_path.is_dir() || !old_path.extension().map_or(false, |e| e == "sol") {
80            for sf in source_files {
81                let sf_path = Path::new(sf);
82                if let Ok(suffix) = sf_path.strip_prefix(old_path) {
83                    dedup.insert(sf_path.to_path_buf(), new_path.join(suffix));
84                }
85            }
86        } else {
87            dedup.insert(old_path.clone(), new_path.clone());
88        }
89    }
90    dedup
91        .into_iter()
92        .map(|(old_path, new_path)| FileRename { old_path, new_path })
93        .collect()
94}
95
96/// Expand delete entries that target folders into per-file paths.
97///
98/// For each entry, if it is a directory (or has no `.sol` extension),
99/// every source file under it is expanded to a concrete file path.
100pub fn expand_folder_deletes(params: &[PathBuf], source_files: &[String]) -> Vec<PathBuf> {
101    let mut dedup: HashMap<PathBuf, ()> = HashMap::new();
102    for old_path in params {
103        if old_path.is_dir() || !old_path.extension().map_or(false, |e| e == "sol") {
104            for sf in source_files {
105                let sf_path = Path::new(sf);
106                if sf_path.strip_prefix(old_path).is_ok() {
107                    dedup.insert(sf_path.to_path_buf(), ());
108                }
109            }
110        } else {
111            dedup.insert(old_path.clone(), ());
112        }
113    }
114    dedup.into_keys().collect()
115}
116
117/// Expand folder renames using candidate filesystem paths.
118///
119/// `candidate_paths` should be the union of discovered project files and files
120/// currently present in `text_cache` so folder renames don't miss entries.
121pub fn expand_folder_renames_from_paths(
122    params: &[(Url, Url)],
123    candidate_paths: &[PathBuf],
124) -> Vec<(String, String)> {
125    // Deduplicate by old URI; last entry wins.
126    let mut dedup: HashMap<String, String> = HashMap::new();
127    for (old_uri, new_uri) in params {
128        let old_path = match old_uri.to_file_path() {
129            Ok(p) => p,
130            Err(_) => continue,
131        };
132        let new_path = match new_uri.to_file_path() {
133            Ok(p) => p,
134            Err(_) => continue,
135        };
136
137        if old_path.extension().map_or(false, |e| e == "sol") && !old_path.is_dir() {
138            dedup.insert(old_uri.to_string(), new_uri.to_string());
139        } else {
140            for existing_path in candidate_paths {
141                if let Ok(suffix) = existing_path.strip_prefix(&old_path) {
142                    let new_file_path = new_path.join(suffix);
143                    let Ok(existing_uri) = Url::from_file_path(existing_path) else {
144                        continue;
145                    };
146                    let Ok(new_file_uri) = Url::from_file_path(&new_file_path) else {
147                        continue;
148                    };
149                    dedup.insert(existing_uri.to_string(), new_file_uri.to_string());
150                }
151            }
152        }
153    }
154    dedup.into_iter().collect()
155}
156
157/// Expand delete entries using candidate filesystem paths.
158///
159/// `candidate_paths` should be the union of discovered project files and files
160/// currently present in `text_cache` so folder deletes don't miss entries.
161pub fn expand_folder_deletes_from_paths(
162    params: &[Url],
163    candidate_paths: &[PathBuf],
164) -> Vec<PathBuf> {
165    let mut dedup: HashMap<PathBuf, ()> = HashMap::new();
166    for uri in params {
167        let old_path = match uri.to_file_path() {
168            Ok(p) => p,
169            Err(_) => continue,
170        };
171
172        if old_path.extension().map_or(false, |e| e == "sol") && !old_path.is_dir() {
173            dedup.insert(old_path, ());
174        } else {
175            for existing_path in candidate_paths {
176                if existing_path.strip_prefix(&old_path).is_ok() {
177                    dedup.insert(existing_path.clone(), ());
178                }
179            }
180        }
181    }
182    dedup.into_keys().collect()
183}
184
185// ---------------------------------------------------------------------------
186// Core rename logic
187// ---------------------------------------------------------------------------
188
189/// Compute import-path edits needed when one or more files are renamed/moved.
190///
191/// Handles batch renames correctly: when both an importer and its import target
192/// are being moved in the same request (e.g. folder rename), the relative path
193/// between them is computed using their **new** locations, so imports that are
194/// still valid after the move are left unchanged.
195///
196/// Uses tree-sitter to find import strings in each source file, making this
197/// robust against stale or unavailable solc AST data.
198///
199/// Handles two cases per renamed file:
200/// 1. **Other files import the renamed file** — their import path string must
201///    change to reflect the new location of the target.
202/// 2. **The renamed file's own relative imports** — if the file moved to a
203///    different directory, its relative imports to other files need updating.
204pub fn rename_imports(
205    source_files: &[String],
206    renames: &[FileRename],
207    project_root: &Path,
208    get_source_bytes: &dyn Fn(&str) -> Option<Vec<u8>>,
209) -> RenameResult {
210    let mut edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
211    let mut stats = RenameStats::default();
212
213    if renames.is_empty() {
214        return RenameResult { edits, stats };
215    }
216
217    // Build lookup: old_path → new_path.
218    // Detect duplicates (same old_path appears more than once).
219    let mut rename_map: HashMap<PathBuf, PathBuf> = HashMap::with_capacity(renames.len());
220    for r in renames {
221        if rename_map
222            .insert(r.old_path.clone(), r.new_path.clone())
223            .is_some()
224        {
225            stats.duplicate_renames += 1;
226        }
227    }
228
229    // ── Case 1: files that import a renamed file ───────────────────────
230    for source_fs_str in source_files {
231        let source_path = Path::new(source_fs_str);
232
233        // If this source file is itself being renamed, use its NEW directory
234        // for computing replacement import paths.
235        let source_new_path = rename_map.get(source_path);
236
237        let effective_source_dir = match source_new_path {
238            Some(new_p) => match new_p.parent() {
239                Some(d) => d.to_path_buf(),
240                None => {
241                    stats.no_parent += 1;
242                    continue;
243                }
244            },
245            None => match source_path.parent() {
246                Some(d) => d.to_path_buf(),
247                None => {
248                    stats.no_parent += 1;
249                    continue;
250                }
251            },
252        };
253
254        let current_source_dir = match source_path.parent() {
255            Some(d) => d,
256            None => {
257                stats.no_parent += 1;
258                continue;
259            }
260        };
261
262        let bytes = match get_source_bytes(source_fs_str) {
263            Some(b) => b,
264            None => {
265                stats.read_failures += 1;
266                continue;
267            }
268        };
269
270        let imports = links::ts_find_imports(&bytes);
271
272        for imp in &imports {
273            let resolved = normalize_path(&current_source_dir.join(&imp.path));
274
275            let resolved_target = if rename_map.contains_key(&resolved) {
276                Some(resolved)
277            } else if !imp.path.starts_with('.') {
278                let via_root = normalize_path(&project_root.join(&imp.path));
279                if rename_map.contains_key(&via_root) {
280                    Some(via_root)
281                } else {
282                    None
283                }
284            } else {
285                None
286            };
287
288            let old_target = match resolved_target {
289                Some(t) => t,
290                None => continue,
291            };
292
293            let new_target = &rename_map[&old_target];
294
295            let new_import_path = if imp.path.starts_with('.') {
296                match pathdiff::diff_paths(new_target, &effective_source_dir) {
297                    Some(p) => ensure_dot_prefix(&p),
298                    None => {
299                        stats.pathdiff_failures += 1;
300                        continue;
301                    }
302                }
303            } else {
304                match pathdiff::diff_paths(new_target, project_root) {
305                    Some(p) => normalize_slashes(&p.to_string_lossy()),
306                    None => {
307                        stats.pathdiff_failures += 1;
308                        continue;
309                    }
310                }
311            };
312
313            if new_import_path == imp.path {
314                stats.no_op_skips += 1;
315                continue;
316            }
317
318            let source_uri = match Url::from_file_path(source_fs_str) {
319                Ok(u) => u,
320                Err(_) => continue,
321            };
322
323            edits.entry(source_uri).or_default().push(TextEdit {
324                range: range_with_quotes(imp.inner_range),
325                new_text: format!("\"{}\"", new_import_path),
326            });
327        }
328    }
329
330    // ── Case 2: renamed files' own relative imports ─────────────────────
331    for rename in renames {
332        let old_dir = match rename.old_path.parent() {
333            Some(d) => d,
334            None => {
335                stats.no_parent += 1;
336                continue;
337            }
338        };
339        let new_dir = match rename.new_path.parent() {
340            Some(d) => d,
341            None => {
342                stats.no_parent += 1;
343                continue;
344            }
345        };
346
347        if old_dir == new_dir {
348            continue;
349        }
350
351        let old_fs_str = match rename.old_path.to_str() {
352            Some(s) => s,
353            None => continue,
354        };
355
356        let bytes = match get_source_bytes(old_fs_str) {
357            Some(b) => b,
358            None => {
359                stats.read_failures += 1;
360                continue;
361            }
362        };
363
364        let imports = links::ts_find_imports(&bytes);
365
366        let old_uri = match Url::from_file_path(&rename.old_path) {
367            Ok(u) => u,
368            Err(_) => continue,
369        };
370
371        for imp in &imports {
372            if !imp.path.starts_with('.') {
373                continue;
374            }
375
376            let target_fs = normalize_path(&old_dir.join(&imp.path));
377            let effective_target = rename_map.get(&target_fs).unwrap_or(&target_fs);
378
379            let new_rel = match pathdiff::diff_paths(effective_target, new_dir) {
380                Some(p) => p,
381                None => {
382                    stats.pathdiff_failures += 1;
383                    continue;
384                }
385            };
386
387            let new_import_str = ensure_dot_prefix(&new_rel);
388
389            if new_import_str == imp.path {
390                stats.no_op_skips += 1;
391                continue;
392            }
393
394            let already_edited = edits.get(&old_uri).map_or(false, |file_edits| {
395                let qr = range_with_quotes(imp.inner_range);
396                file_edits.iter().any(|e| e.range == qr)
397            });
398            if already_edited {
399                stats.dedup_skips += 1;
400                continue;
401            }
402
403            edits.entry(old_uri.clone()).or_default().push(TextEdit {
404                range: range_with_quotes(imp.inner_range),
405                new_text: format!("\"{}\"", new_import_str),
406            });
407        }
408    }
409
410    RenameResult { edits, stats }
411}
412
413/// Compute import-statement removal edits needed when one or more files are
414/// deleted.
415///
416/// For each Solidity source file, this scans all import directives and removes
417/// the full import statement (`import ...;`) if it resolves to a deleted file.
418///
419/// This is intended for `workspace/willDeleteFiles` preview edits.
420pub fn delete_imports(
421    source_files: &[String],
422    deletes: &[PathBuf],
423    project_root: &Path,
424    get_source_bytes: &dyn Fn(&str) -> Option<Vec<u8>>,
425) -> DeleteResult {
426    let mut edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
427    let mut stats = DeleteStats::default();
428
429    if deletes.is_empty() {
430        return DeleteResult { edits, stats };
431    }
432
433    let mut delete_set: HashMap<PathBuf, ()> = HashMap::with_capacity(deletes.len());
434    for p in deletes {
435        if delete_set.insert(normalize_path(p), ()).is_some() {
436            stats.duplicate_deletes += 1;
437        }
438    }
439
440    for source_fs_str in source_files {
441        let source_path = Path::new(source_fs_str);
442        let source_dir = match source_path.parent() {
443            Some(d) => d,
444            None => {
445                stats.no_parent += 1;
446                continue;
447            }
448        };
449
450        let bytes = match get_source_bytes(source_fs_str) {
451            Some(b) => b,
452            None => {
453                stats.read_failures += 1;
454                continue;
455            }
456        };
457
458        let source_str = match std::str::from_utf8(&bytes) {
459            Ok(s) => s,
460            Err(_) => {
461                stats.read_failures += 1;
462                continue;
463            }
464        };
465
466        let imports = links::ts_find_imports(&bytes);
467        let source_uri = match Url::from_file_path(source_fs_str) {
468            Ok(u) => u,
469            Err(_) => continue,
470        };
471
472        for imp in &imports {
473            let resolved = normalize_path(&source_dir.join(&imp.path));
474
475            let is_deleted = if delete_set.contains_key(&resolved) {
476                true
477            } else if !imp.path.starts_with('.') {
478                let via_root = normalize_path(&project_root.join(&imp.path));
479                delete_set.contains_key(&via_root)
480            } else {
481                false
482            };
483
484            if !is_deleted {
485                continue;
486            }
487
488            let Some(statement_range) = import_statement_range(source_str, imp.inner_range) else {
489                stats.statement_range_failures += 1;
490                continue;
491            };
492
493            let duplicate = edits.get(&source_uri).map_or(false, |file_edits| {
494                file_edits.iter().any(|e| e.range == statement_range)
495            });
496            if duplicate {
497                stats.dedup_skips += 1;
498                continue;
499            }
500
501            edits.entry(source_uri.clone()).or_default().push(TextEdit {
502                range: statement_range,
503                new_text: String::new(),
504            });
505        }
506    }
507
508    DeleteResult { edits, stats }
509}
510
511/// Backward-compatible wrapper: single rename, used by existing tests.
512pub fn rename_imports_single(
513    source_files: &[String],
514    old_uri: &Url,
515    new_uri: &Url,
516    project_root: &Path,
517    get_source_bytes: &dyn Fn(&str) -> Option<Vec<u8>>,
518) -> HashMap<Url, Vec<TextEdit>> {
519    let old_path = match old_uri.to_file_path() {
520        Ok(p) => p,
521        Err(_) => return HashMap::new(),
522    };
523    let new_path = match new_uri.to_file_path() {
524        Ok(p) => p,
525        Err(_) => return HashMap::new(),
526    };
527    rename_imports(
528        source_files,
529        &[FileRename { old_path, new_path }],
530        project_root,
531        get_source_bytes,
532    )
533    .edits
534}
535
536// ---------------------------------------------------------------------------
537// Cache patching
538// ---------------------------------------------------------------------------
539
540/// Apply computed edits to in-memory file content.
541///
542/// Returns the number of files patched (for logging).
543pub fn apply_edits_to_cache(
544    edits: &HashMap<Url, Vec<TextEdit>>,
545    cache: &mut HashMap<String, (i32, String)>,
546) -> usize {
547    let mut patched = 0;
548    for (uri, text_edits) in edits {
549        let uri_str = uri.to_string();
550        if let Some((version, content)) = cache.get(&uri_str).cloned() {
551            let new_content = apply_text_edits(&content, text_edits);
552            cache.insert(uri_str, (version, new_content));
553            patched += 1;
554        }
555    }
556    patched
557}
558
559// ---------------------------------------------------------------------------
560// Internal helpers
561// ---------------------------------------------------------------------------
562
563/// Expand a range to include the surrounding quote characters.
564fn range_with_quotes(inner: Range) -> Range {
565    Range {
566        start: Position {
567            line: inner.start.line,
568            character: inner.start.character.saturating_sub(1),
569        },
570        end: Position {
571            line: inner.end.line,
572            character: inner.end.character + 1,
573        },
574    }
575}
576
577/// Ensure a relative path starts with `./` or `../` for Solidity import convention.
578/// Always uses forward slashes regardless of platform.
579fn ensure_dot_prefix(rel: &Path) -> String {
580    let s = normalize_slashes(&rel.to_string_lossy());
581    if s.starts_with("..") || s.starts_with('.') {
582        s
583    } else {
584        format!("./{s}")
585    }
586}
587
588/// Replace backslashes with forward slashes for Solidity import paths.
589/// Solidity uses forward slashes in import strings regardless of platform.
590fn normalize_slashes(s: &str) -> String {
591    s.replace('\\', "/")
592}
593
594/// Determine a range that covers the full import statement containing `inner`.
595///
596/// The returned range starts at the `import` keyword and ends at the
597/// terminating `;`, plus one trailing newline when present.
598fn import_statement_range(source: &str, inner: Range) -> Option<Range> {
599    let start = utils::position_to_byte_offset(source, inner.start);
600    let end = utils::position_to_byte_offset(source, inner.end);
601    if start > end || end > source.len() {
602        return None;
603    }
604
605    let bytes = source.as_bytes();
606    let mut import_start = None;
607    let mut i = start;
608    while i > 0 {
609        if i >= 6 && &bytes[i - 6..i] == b"import" {
610            import_start = Some(i - 6);
611            break;
612        }
613        if bytes[i - 1] == b';' {
614            break;
615        }
616        i -= 1;
617    }
618    let import_start = import_start?;
619
620    let mut semi = end;
621    while semi < bytes.len() && bytes[semi] != b';' {
622        semi += 1;
623    }
624    if semi >= bytes.len() || bytes[semi] != b';' {
625        return None;
626    }
627
628    let mut import_end = semi + 1;
629    if import_end + 1 < bytes.len() && bytes[import_end] == b'\r' && bytes[import_end + 1] == b'\n'
630    {
631        import_end += 2;
632    } else if import_end < bytes.len() && bytes[import_end] == b'\n' {
633        import_end += 1;
634    }
635
636    Some(Range {
637        start: utils::byte_offset_to_position(source, import_start),
638        end: utils::byte_offset_to_position(source, import_end),
639    })
640}
641
642/// Apply a set of `TextEdit`s to a source string and return the new content.
643///
644/// Edits are sorted in reverse document order so that earlier byte offsets
645/// remain valid as we splice in replacements from the end.
646///
647/// Positions are interpreted according to the negotiated encoding
648/// (UTF-8 or UTF-16), matching the positions produced by `ts_find_imports`.
649pub fn apply_text_edits(source: &str, edits: &[TextEdit]) -> String {
650    // Convert to byte ranges on the original source.
651    let mut resolved: Vec<(usize, usize, &str)> = edits
652        .iter()
653        .filter_map(|e| {
654            let start = utils::position_to_byte_offset(source, e.range.start);
655            let end = utils::position_to_byte_offset(source, e.range.end);
656            if start > end {
657                None
658            } else {
659                Some((start, end, e.new_text.as_str()))
660            }
661        })
662        .collect();
663
664    // Keep non-overlapping edits in forward order. For overlapping spans,
665    // prefer the earliest-starting edit (wider edit when starts are equal).
666    resolved.sort_by(|a, b| a.0.cmp(&b.0).then(b.1.cmp(&a.1)));
667    let mut filtered: Vec<(usize, usize, &str)> = Vec::with_capacity(resolved.len());
668    for (start, end, new_text) in resolved {
669        if let Some((_, last_end, _)) = filtered.last()
670            && start < *last_end
671        {
672            continue;
673        }
674        filtered.push((start, end, new_text));
675    }
676
677    // Apply from back to front so earlier offsets remain valid.
678    let mut result = source.to_string();
679    for (start, end, new_text) in filtered.into_iter().rev() {
680        result.replace_range(start..end, new_text);
681    }
682    result
683}
684
685// ---------------------------------------------------------------------------
686// File scaffold generation
687// ---------------------------------------------------------------------------
688
689/// Generate scaffold content for a new `.sol` file.
690///
691/// Returns SPDX license identifier, pragma, and a stub contract/library/interface
692/// named after the file. The `solc_version` from `foundry.toml` is used for
693/// the pragma if available, otherwise defaults to `^0.8.0`.
694///
695/// `uri` is the file:// URI of the new file (used to derive the contract name).
696pub fn generate_scaffold(uri: &Url, solc_version: Option<&str>) -> Option<String> {
697    let path = uri.to_file_path().ok()?;
698    let stem = path.file_stem()?.to_str()?;
699
700    // Only scaffold .sol files.
701    let ext = path.extension()?;
702    if ext != "sol" {
703        return None;
704    }
705
706    let base_name = sanitize_identifier(stem);
707    if base_name.is_empty() {
708        return None;
709    }
710
711    // Derive pragma from solc_version.
712    // "0.8.26" → "^0.8.26", already-prefixed values pass through.
713    let pragma = match solc_version {
714        Some(v) if !v.is_empty() => {
715            let v = v.trim();
716            if v.starts_with('^')
717                || v.starts_with('>')
718                || v.starts_with('<')
719                || v.starts_with('=')
720                || v.starts_with('~')
721            {
722                v.to_string()
723            } else {
724                format!("^{v}")
725            }
726        }
727        _ => "^0.8.0".to_string(),
728    };
729
730    // Detect file kind from naming conventions.
731    let is_test = stem.ends_with(".t");
732    let is_script = stem.ends_with(".s");
733
734    let kind = if is_test || is_script {
735        // Foundry test/script files must always be contracts because they
736        // inherit from Test/Script.
737        "contract"
738    } else if stem.starts_with('I')
739        && stem.len() > 1
740        && stem.chars().nth(1).map_or(false, |c| c.is_uppercase())
741    {
742        "interface"
743    } else if stem.starts_with("Lib") || stem.starts_with("lib") {
744        "library"
745    } else {
746        "contract"
747    };
748
749    let contract_name = if is_test {
750        format!("{base_name}Test")
751    } else if is_script {
752        format!("{base_name}Script")
753    } else {
754        base_name
755    };
756
757    if is_test {
758        Some(format!(
759            "// SPDX-License-Identifier: MIT\n\
760             pragma solidity {pragma};\n\
761             \n\
762             import {{Test}} from \"forge-std/Test.sol\";\n\
763             \n\
764             {kind} {contract_name} is Test {{\n\
765             \n\
766             }}\n"
767        ))
768    } else if is_script {
769        Some(format!(
770            "// SPDX-License-Identifier: MIT\n\
771             pragma solidity {pragma};\n\
772             \n\
773             import {{Script}} from \"forge-std/Script.sol\";\n\
774             \n\
775             {kind} {contract_name} is Script {{\n\
776             \n\
777             }}\n"
778        ))
779    } else {
780        Some(format!(
781            "// SPDX-License-Identifier: MIT\n\
782             pragma solidity {pragma};\n\
783             \n\
784             {kind} {contract_name} {{\n\
785             \n\
786             }}\n"
787        ))
788    }
789}
790
791/// Convert a filename stem to a valid Solidity identifier.
792///
793/// Strips `.t` and `.s` suffixes (Foundry test/script convention), removes
794/// non-alphanumeric/underscore characters, and ensures the result doesn't
795/// start with a digit.
796fn sanitize_identifier(stem: &str) -> String {
797    // Strip common Foundry suffixes: "Foo.t" → "Foo", "Bar.s" → "Bar"
798    let stem = stem
799        .strip_suffix(".t")
800        .or_else(|| stem.strip_suffix(".s"))
801        .unwrap_or(stem);
802
803    let mut result = String::with_capacity(stem.len());
804    for ch in stem.chars() {
805        if ch.is_ascii_alphanumeric() || ch == '_' {
806            result.push(ch);
807        }
808    }
809    // Identifiers can't start with a digit.
810    if result.starts_with(|c: char| c.is_ascii_digit()) {
811        result.insert(0, '_');
812    }
813    // Avoid Solidity keywords as identifiers.
814    if !result.is_empty() && !utils::is_valid_solidity_identifier(&result) {
815        result.insert(0, '_');
816    }
817    result
818}
819
820/// Normalize a path by resolving `.` and `..` components without requiring
821/// the file to exist on disk (unlike `std::fs::canonicalize`).
822///
823/// Guards against excessive `..` that would pop past the root by only
824/// popping `Normal` components (never `RootDir` or `Prefix`).
825pub fn normalize_path(path: &Path) -> PathBuf {
826    let mut components = Vec::new();
827    for component in path.components() {
828        match component {
829            std::path::Component::CurDir => {}
830            std::path::Component::ParentDir => {
831                if matches!(components.last(), Some(std::path::Component::Normal(_))) {
832                    components.pop();
833                }
834            }
835            other => components.push(other),
836        }
837    }
838    components.iter().collect()
839}