Skip to main content

solidity_language_server/
file_operations.rs

1use crate::links;
2use crate::utils;
3use std::collections::HashMap;
4use std::path::{Path, PathBuf};
5use tower_lsp::lsp_types::{Position, Range, TextEdit, Url};
6
7// ---------------------------------------------------------------------------
8// Types
9// ---------------------------------------------------------------------------
10
11/// A single file rename: old absolute path → new absolute path.
12#[derive(Debug, Clone)]
13pub struct FileRename {
14    pub old_path: PathBuf,
15    pub new_path: PathBuf,
16}
17
18/// Diagnostic counters returned alongside edits so the caller can log them.
19#[derive(Debug, Default)]
20pub struct RenameStats {
21    /// Source files whose bytes could not be read.
22    pub read_failures: usize,
23    /// Source files with no parseable parent directory.
24    pub no_parent: usize,
25    /// Imports skipped because pathdiff returned None.
26    pub pathdiff_failures: usize,
27    /// Imports whose new path equals the old path (no-op, correctly skipped).
28    pub no_op_skips: usize,
29    /// Edits suppressed because Case 1 already covered the range.
30    pub dedup_skips: usize,
31    /// Duplicate old_path entries detected in the rename list (last wins).
32    pub duplicate_renames: usize,
33}
34
35/// Result of a rename_imports call: edits + diagnostic stats.
36pub struct RenameResult {
37    pub edits: HashMap<Url, Vec<TextEdit>>,
38    pub stats: RenameStats,
39}
40
41/// Diagnostic counters returned alongside delete edits.
42#[derive(Debug, Default)]
43pub struct DeleteStats {
44    /// Source files whose bytes could not be read.
45    pub read_failures: usize,
46    /// Source files with no parseable parent directory.
47    pub no_parent: usize,
48    /// Imports where we could not determine a full import-statement span.
49    pub statement_range_failures: usize,
50    /// Duplicate delete targets detected in the delete list.
51    pub duplicate_deletes: usize,
52    /// Duplicate edits skipped for the same statement range.
53    pub dedup_skips: usize,
54}
55
56/// Result of delete_imports call: edits + diagnostic stats.
57pub struct DeleteResult {
58    pub edits: HashMap<Url, Vec<TextEdit>>,
59    pub stats: DeleteStats,
60}
61
62// ---------------------------------------------------------------------------
63// Folder expansion
64// ---------------------------------------------------------------------------
65
66/// Expand rename entries that target folders into per-file renames.
67///
68/// For each entry, if `old_path` is a directory (or has no `.sol` extension),
69/// every source file under it is expanded to a concrete file rename.
70/// Uses `Path::strip_prefix` for component-aware matching (won't match
71/// `/src2` given `/src`).
72pub fn expand_folder_renames(
73    params: &[(PathBuf, PathBuf)],
74    source_files: &[String],
75) -> Vec<FileRename> {
76    // Deduplicate by old path; last entry wins.
77    let mut dedup: HashMap<PathBuf, PathBuf> = HashMap::new();
78    for (old_path, new_path) in params {
79        if old_path.is_dir() || !old_path.extension().map_or(false, |e| e == "sol") {
80            for sf in source_files {
81                let sf_path = Path::new(sf);
82                if let Ok(suffix) = sf_path.strip_prefix(old_path) {
83                    dedup.insert(sf_path.to_path_buf(), new_path.join(suffix));
84                }
85            }
86        } else {
87            dedup.insert(old_path.clone(), new_path.clone());
88        }
89    }
90    dedup
91        .into_iter()
92        .map(|(old_path, new_path)| FileRename { old_path, new_path })
93        .collect()
94}
95
96/// Expand delete entries that target folders into per-file paths.
97///
98/// For each entry, if it is a directory (or has no `.sol` extension),
99/// every source file under it is expanded to a concrete file path.
100pub fn expand_folder_deletes(params: &[PathBuf], source_files: &[String]) -> Vec<PathBuf> {
101    let mut dedup: HashMap<PathBuf, ()> = HashMap::new();
102    for old_path in params {
103        if old_path.is_dir() || !old_path.extension().map_or(false, |e| e == "sol") {
104            for sf in source_files {
105                let sf_path = Path::new(sf);
106                if sf_path.strip_prefix(old_path).is_ok() {
107                    dedup.insert(sf_path.to_path_buf(), ());
108                }
109            }
110        } else {
111            dedup.insert(old_path.clone(), ());
112        }
113    }
114    dedup.into_keys().collect()
115}
116
117/// Expand folder renames using candidate filesystem paths.
118///
119/// `candidate_paths` should be the union of discovered project files and files
120/// currently present in `text_cache` so folder renames don't miss entries.
121pub fn expand_folder_renames_from_paths(
122    params: &[(Url, Url)],
123    candidate_paths: &[PathBuf],
124) -> Vec<(String, String)> {
125    // Deduplicate by old URI; last entry wins.
126    let mut dedup: HashMap<String, String> = HashMap::new();
127    for (old_uri, new_uri) in params {
128        let old_path = match old_uri.to_file_path() {
129            Ok(p) => p,
130            Err(_) => continue,
131        };
132        let new_path = match new_uri.to_file_path() {
133            Ok(p) => p,
134            Err(_) => continue,
135        };
136
137        if old_path.extension().map_or(false, |e| e == "sol") && !old_path.is_dir() {
138            dedup.insert(old_uri.to_string(), new_uri.to_string());
139        } else {
140            for existing_path in candidate_paths {
141                if let Ok(suffix) = existing_path.strip_prefix(&old_path) {
142                    let new_file_path = new_path.join(suffix);
143                    let Ok(existing_uri) = Url::from_file_path(existing_path) else {
144                        continue;
145                    };
146                    let Ok(new_file_uri) = Url::from_file_path(&new_file_path) else {
147                        continue;
148                    };
149                    dedup.insert(existing_uri.to_string(), new_file_uri.to_string());
150                }
151            }
152        }
153    }
154    dedup.into_iter().collect()
155}
156
157/// Expand delete entries using candidate filesystem paths.
158///
159/// `candidate_paths` should be the union of discovered project files and files
160/// currently present in `text_cache` so folder deletes don't miss entries.
161pub fn expand_folder_deletes_from_paths(
162    params: &[Url],
163    candidate_paths: &[PathBuf],
164) -> Vec<PathBuf> {
165    let mut dedup: HashMap<PathBuf, ()> = HashMap::new();
166    for uri in params {
167        let old_path = match uri.to_file_path() {
168            Ok(p) => p,
169            Err(_) => continue,
170        };
171
172        if old_path.extension().map_or(false, |e| e == "sol") && !old_path.is_dir() {
173            dedup.insert(old_path, ());
174        } else {
175            for existing_path in candidate_paths {
176                if existing_path.strip_prefix(&old_path).is_ok() {
177                    dedup.insert(existing_path.clone(), ());
178                }
179            }
180        }
181    }
182    dedup.into_keys().collect()
183}
184
185// ---------------------------------------------------------------------------
186// Core rename logic
187// ---------------------------------------------------------------------------
188
189/// Compute import-path edits needed when one or more files are renamed/moved.
190///
191/// Handles batch renames correctly: when both an importer and its import target
192/// are being moved in the same request (e.g. folder rename), the relative path
193/// between them is computed using their **new** locations, so imports that are
194/// still valid after the move are left unchanged.
195///
196/// Uses tree-sitter to find import strings in each source file, making this
197/// robust against stale or unavailable solc AST data.
198///
199/// Handles two cases per renamed file:
200/// 1. **Other files import the renamed file** — their import path string must
201///    change to reflect the new location of the target.
202/// 2. **The renamed file's own relative imports** — if the file moved to a
203///    different directory, its relative imports to other files need updating.
204pub fn rename_imports(
205    source_files: &[String],
206    renames: &[FileRename],
207    project_root: &Path,
208    get_source_bytes: &dyn Fn(&str) -> Option<Vec<u8>>,
209) -> RenameResult {
210    let mut edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
211    let mut stats = RenameStats::default();
212
213    if renames.is_empty() {
214        return RenameResult { edits, stats };
215    }
216
217    // Build lookup: old_path → new_path.
218    // Detect duplicates (same old_path appears more than once).
219    let mut rename_map: HashMap<PathBuf, PathBuf> = HashMap::with_capacity(renames.len());
220    for r in renames {
221        if rename_map
222            .insert(r.old_path.clone(), r.new_path.clone())
223            .is_some()
224        {
225            stats.duplicate_renames += 1;
226        }
227    }
228
229    // ── Case 1: files that import a renamed file ───────────────────────
230    for source_fs_str in source_files {
231        let source_path = resolve_source_path(source_fs_str, project_root);
232
233        // If this source file is itself being renamed, use its NEW directory
234        // for computing replacement import paths.
235        let source_new_path = rename_map.get(&source_path);
236
237        let effective_source_dir = match source_new_path {
238            Some(new_p) => match new_p.parent() {
239                Some(d) => d.to_path_buf(),
240                None => {
241                    stats.no_parent += 1;
242                    continue;
243                }
244            },
245            None => match source_path.parent() {
246                Some(d) => d.to_path_buf(),
247                None => {
248                    stats.no_parent += 1;
249                    continue;
250                }
251            },
252        };
253
254        let current_source_dir = match source_path.parent() {
255            Some(d) => d,
256            None => {
257                stats.no_parent += 1;
258                continue;
259            }
260        };
261
262        let bytes = match source_path
263            .to_str()
264            .and_then(get_source_bytes)
265            .or_else(|| get_source_bytes(source_fs_str))
266        {
267            Some(b) => b,
268            None => {
269                stats.read_failures += 1;
270                continue;
271            }
272        };
273
274        let imports = links::ts_find_imports(&bytes);
275
276        for imp in &imports {
277            let resolved = normalize_path(&current_source_dir.join(&imp.path));
278
279            let resolved_target = if rename_map.contains_key(&resolved) {
280                Some(resolved)
281            } else if !imp.path.starts_with('.') {
282                let via_root = normalize_path(&project_root.join(&imp.path));
283                if rename_map.contains_key(&via_root) {
284                    Some(via_root)
285                } else {
286                    None
287                }
288            } else {
289                None
290            };
291
292            let old_target = match resolved_target {
293                Some(t) => t,
294                None => continue,
295            };
296
297            let new_target = &rename_map[&old_target];
298
299            let new_import_path = if imp.path.starts_with('.') {
300                match pathdiff::diff_paths(new_target, &effective_source_dir) {
301                    Some(p) => ensure_dot_prefix(&p),
302                    None => {
303                        stats.pathdiff_failures += 1;
304                        continue;
305                    }
306                }
307            } else {
308                match pathdiff::diff_paths(new_target, project_root) {
309                    Some(p) => normalize_slashes(&p.to_string_lossy()),
310                    None => {
311                        stats.pathdiff_failures += 1;
312                        continue;
313                    }
314                }
315            };
316
317            if new_import_path == imp.path {
318                stats.no_op_skips += 1;
319                continue;
320            }
321
322            let source_uri = match Url::from_file_path(&source_path) {
323                Ok(u) => u,
324                Err(_) => continue,
325            };
326
327            edits.entry(source_uri).or_default().push(TextEdit {
328                range: range_with_quotes(imp.inner_range),
329                new_text: format!("\"{}\"", new_import_path),
330            });
331        }
332    }
333
334    // ── Case 2: renamed files' own relative imports ─────────────────────
335    for rename in renames {
336        let old_dir = match rename.old_path.parent() {
337            Some(d) => d,
338            None => {
339                stats.no_parent += 1;
340                continue;
341            }
342        };
343        let new_dir = match rename.new_path.parent() {
344            Some(d) => d,
345            None => {
346                stats.no_parent += 1;
347                continue;
348            }
349        };
350
351        if old_dir == new_dir {
352            continue;
353        }
354
355        let old_fs_str = match rename.old_path.to_str() {
356            Some(s) => s,
357            None => continue,
358        };
359
360        let bytes = match get_source_bytes(old_fs_str) {
361            Some(b) => b,
362            None => {
363                stats.read_failures += 1;
364                continue;
365            }
366        };
367
368        let imports = links::ts_find_imports(&bytes);
369
370        let old_uri = match Url::from_file_path(&rename.old_path) {
371            Ok(u) => u,
372            Err(_) => continue,
373        };
374
375        for imp in &imports {
376            if !imp.path.starts_with('.') {
377                continue;
378            }
379
380            let target_fs = normalize_path(&old_dir.join(&imp.path));
381            let effective_target = rename_map.get(&target_fs).unwrap_or(&target_fs);
382
383            let new_rel = match pathdiff::diff_paths(effective_target, new_dir) {
384                Some(p) => p,
385                None => {
386                    stats.pathdiff_failures += 1;
387                    continue;
388                }
389            };
390
391            let new_import_str = ensure_dot_prefix(&new_rel);
392
393            if new_import_str == imp.path {
394                stats.no_op_skips += 1;
395                continue;
396            }
397
398            let already_edited = edits.get(&old_uri).map_or(false, |file_edits| {
399                let qr = range_with_quotes(imp.inner_range);
400                file_edits.iter().any(|e| e.range == qr)
401            });
402            if already_edited {
403                stats.dedup_skips += 1;
404                continue;
405            }
406
407            edits.entry(old_uri.clone()).or_default().push(TextEdit {
408                range: range_with_quotes(imp.inner_range),
409                new_text: format!("\"{}\"", new_import_str),
410            });
411        }
412    }
413
414    RenameResult { edits, stats }
415}
416
417/// Compute import-statement removal edits needed when one or more files are
418/// deleted.
419///
420/// For each Solidity source file, this scans all import directives and removes
421/// the full import statement (`import ...;`) if it resolves to a deleted file.
422///
423/// This is intended for `workspace/willDeleteFiles` preview edits.
424pub fn delete_imports(
425    source_files: &[String],
426    deletes: &[PathBuf],
427    project_root: &Path,
428    get_source_bytes: &dyn Fn(&str) -> Option<Vec<u8>>,
429) -> DeleteResult {
430    let mut edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
431    let mut stats = DeleteStats::default();
432
433    if deletes.is_empty() {
434        return DeleteResult { edits, stats };
435    }
436
437    let mut delete_set: HashMap<PathBuf, ()> = HashMap::with_capacity(deletes.len());
438    for p in deletes {
439        if delete_set.insert(normalize_path(p), ()).is_some() {
440            stats.duplicate_deletes += 1;
441        }
442    }
443
444    for source_fs_str in source_files {
445        let source_path = resolve_source_path(source_fs_str, project_root);
446        let source_dir = match source_path.parent() {
447            Some(d) => d,
448            None => {
449                stats.no_parent += 1;
450                continue;
451            }
452        };
453
454        let bytes = match source_path
455            .to_str()
456            .and_then(get_source_bytes)
457            .or_else(|| get_source_bytes(source_fs_str))
458        {
459            Some(b) => b,
460            None => {
461                stats.read_failures += 1;
462                continue;
463            }
464        };
465
466        let source_str = match std::str::from_utf8(&bytes) {
467            Ok(s) => s,
468            Err(_) => {
469                stats.read_failures += 1;
470                continue;
471            }
472        };
473
474        let imports = links::ts_find_imports(&bytes);
475        let source_uri = match Url::from_file_path(&source_path) {
476            Ok(u) => u,
477            Err(_) => continue,
478        };
479
480        for imp in &imports {
481            let resolved = normalize_path(&source_dir.join(&imp.path));
482
483            let is_deleted = if delete_set.contains_key(&resolved) {
484                true
485            } else if !imp.path.starts_with('.') {
486                let via_root = normalize_path(&project_root.join(&imp.path));
487                delete_set.contains_key(&via_root)
488            } else {
489                false
490            };
491
492            if !is_deleted {
493                continue;
494            }
495
496            let Some(statement_range) = import_statement_range(source_str, imp.inner_range) else {
497                stats.statement_range_failures += 1;
498                continue;
499            };
500
501            let duplicate = edits.get(&source_uri).map_or(false, |file_edits| {
502                file_edits.iter().any(|e| e.range == statement_range)
503            });
504            if duplicate {
505                stats.dedup_skips += 1;
506                continue;
507            }
508
509            edits.entry(source_uri.clone()).or_default().push(TextEdit {
510                range: statement_range,
511                new_text: String::new(),
512            });
513        }
514    }
515
516    DeleteResult { edits, stats }
517}
518
519/// Backward-compatible wrapper: single rename, used by existing tests.
520pub fn rename_imports_single(
521    source_files: &[String],
522    old_uri: &Url,
523    new_uri: &Url,
524    project_root: &Path,
525    get_source_bytes: &dyn Fn(&str) -> Option<Vec<u8>>,
526) -> HashMap<Url, Vec<TextEdit>> {
527    let old_path = match old_uri.to_file_path() {
528        Ok(p) => p,
529        Err(_) => return HashMap::new(),
530    };
531    let new_path = match new_uri.to_file_path() {
532        Ok(p) => p,
533        Err(_) => return HashMap::new(),
534    };
535    rename_imports(
536        source_files,
537        &[FileRename { old_path, new_path }],
538        project_root,
539        get_source_bytes,
540    )
541    .edits
542}
543
544// ---------------------------------------------------------------------------
545// Cache patching
546// ---------------------------------------------------------------------------
547
548/// Apply computed edits to in-memory file content.
549///
550/// Returns the number of files patched (for logging).
551pub fn apply_edits_to_cache(
552    edits: &HashMap<Url, Vec<TextEdit>>,
553    cache: &mut HashMap<crate::types::DocumentUri, (i32, String)>,
554) -> usize {
555    let mut patched = 0;
556    for (uri, text_edits) in edits {
557        let uri_str = uri.to_string();
558        if let Some((version, content)) = cache.get(uri_str.as_str()).cloned() {
559            let new_content = apply_text_edits(&content, text_edits);
560            cache.insert(uri_str.into(), (version, new_content));
561            patched += 1;
562        }
563    }
564    patched
565}
566
567// ---------------------------------------------------------------------------
568// Internal helpers
569// ---------------------------------------------------------------------------
570
571/// Expand a range to include the surrounding quote characters.
572fn range_with_quotes(inner: Range) -> Range {
573    Range {
574        start: Position {
575            line: inner.start.line,
576            character: inner.start.character.saturating_sub(1),
577        },
578        end: Position {
579            line: inner.end.line,
580            character: inner.end.character + 1,
581        },
582    }
583}
584
585/// Ensure a relative path starts with `./` or `../` for Solidity import convention.
586/// Always uses forward slashes regardless of platform.
587fn ensure_dot_prefix(rel: &Path) -> String {
588    let s = normalize_slashes(&rel.to_string_lossy());
589    if s.starts_with("..") || s.starts_with('.') {
590        s
591    } else {
592        format!("./{s}")
593    }
594}
595
596/// Resolve a source-file string to an absolute filesystem path.
597///
598/// Handles mixed forms produced by different indexers:
599/// - absolute (`/repo/example/A.sol`)
600/// - project-root relative (`A.sol`)
601/// - workspace-root relative (`example/A.sol`)
602fn resolve_source_path(source_fs_str: &str, project_root: &Path) -> PathBuf {
603    let raw = Path::new(source_fs_str);
604    if raw.is_absolute() {
605        return normalize_path(raw);
606    }
607
608    let joined_project = normalize_path(&project_root.join(raw));
609    if joined_project.exists() {
610        return joined_project;
611    }
612
613    if let Ok(cwd) = std::env::current_dir() {
614        let joined_cwd = normalize_path(&cwd.join(raw));
615        if joined_cwd.exists() {
616            return joined_cwd;
617        }
618    }
619
620    joined_project
621}
622
623/// Replace backslashes with forward slashes for Solidity import paths.
624/// Solidity uses forward slashes in import strings regardless of platform.
625fn normalize_slashes(s: &str) -> String {
626    s.replace('\\', "/")
627}
628
629/// Determine a range that covers the full import statement containing `inner`.
630///
631/// The returned range starts at the `import` keyword and ends at the
632/// terminating `;`, plus one trailing newline when present.
633fn import_statement_range(source: &str, inner: Range) -> Option<Range> {
634    let start = utils::position_to_byte_offset(source, inner.start);
635    let end = utils::position_to_byte_offset(source, inner.end);
636    if start > end || end > source.len() {
637        return None;
638    }
639
640    let bytes = source.as_bytes();
641    let mut import_start = None;
642    let mut i = start;
643    while i > 0 {
644        if i >= 6 && &bytes[i - 6..i] == b"import" {
645            import_start = Some(i - 6);
646            break;
647        }
648        if bytes[i - 1] == b';' {
649            break;
650        }
651        i -= 1;
652    }
653    let import_start = import_start?;
654
655    let mut semi = end;
656    while semi < bytes.len() && bytes[semi] != b';' {
657        semi += 1;
658    }
659    if semi >= bytes.len() || bytes[semi] != b';' {
660        return None;
661    }
662
663    let mut import_end = semi + 1;
664    if import_end + 1 < bytes.len() && bytes[import_end] == b'\r' && bytes[import_end + 1] == b'\n'
665    {
666        import_end += 2;
667    } else if import_end < bytes.len() && bytes[import_end] == b'\n' {
668        import_end += 1;
669    }
670
671    Some(Range {
672        start: utils::byte_offset_to_position(source, import_start),
673        end: utils::byte_offset_to_position(source, import_end),
674    })
675}
676
677/// Apply a set of `TextEdit`s to a source string and return the new content.
678///
679/// Edits are sorted in reverse document order so that earlier byte offsets
680/// remain valid as we splice in replacements from the end.
681///
682/// Positions are interpreted according to the negotiated encoding
683/// (UTF-8 or UTF-16), matching the positions produced by `ts_find_imports`.
684pub fn apply_text_edits(source: &str, edits: &[TextEdit]) -> String {
685    // Convert to byte ranges on the original source.
686    let mut resolved: Vec<(usize, usize, &str)> = edits
687        .iter()
688        .filter_map(|e| {
689            let start = utils::position_to_byte_offset(source, e.range.start);
690            let end = utils::position_to_byte_offset(source, e.range.end);
691            if start > end {
692                None
693            } else {
694                Some((start, end, e.new_text.as_str()))
695            }
696        })
697        .collect();
698
699    // Keep non-overlapping edits in forward order. For overlapping spans,
700    // prefer the earliest-starting edit (wider edit when starts are equal).
701    resolved.sort_by(|a, b| a.0.cmp(&b.0).then(b.1.cmp(&a.1)));
702    let mut filtered: Vec<(usize, usize, &str)> = Vec::with_capacity(resolved.len());
703    for (start, end, new_text) in resolved {
704        if let Some((_, last_end, _)) = filtered.last()
705            && start < *last_end
706        {
707            continue;
708        }
709        filtered.push((start, end, new_text));
710    }
711
712    // Apply from back to front so earlier offsets remain valid.
713    let mut result = source.to_string();
714    for (start, end, new_text) in filtered.into_iter().rev() {
715        result.replace_range(start..end, new_text);
716    }
717    result
718}
719
720// ---------------------------------------------------------------------------
721// File scaffold generation
722// ---------------------------------------------------------------------------
723
724/// Generate scaffold content for a new `.sol` file.
725///
726/// Returns SPDX license identifier, pragma, and a stub contract/library/interface
727/// named after the file. The `solc_version` from `foundry.toml` is used for
728/// the pragma if available, otherwise defaults to `^0.8.0`.
729///
730/// `uri` is the file:// URI of the new file (used to derive the contract name).
731pub fn generate_scaffold(uri: &Url, solc_version: Option<&str>) -> Option<String> {
732    let path = uri.to_file_path().ok()?;
733    let stem = path.file_stem()?.to_str()?;
734
735    // Only scaffold .sol files.
736    let ext = path.extension()?;
737    if ext != "sol" {
738        return None;
739    }
740
741    let base_name = sanitize_identifier(stem);
742    if base_name.is_empty() {
743        return None;
744    }
745
746    // Derive pragma from solc_version.
747    // "0.8.26" → "^0.8.26", already-prefixed values pass through.
748    let pragma = match solc_version {
749        Some(v) if !v.is_empty() => {
750            let v = v.trim();
751            if v.starts_with('^')
752                || v.starts_with('>')
753                || v.starts_with('<')
754                || v.starts_with('=')
755                || v.starts_with('~')
756            {
757                v.to_string()
758            } else {
759                format!("^{v}")
760            }
761        }
762        _ => "^0.8.0".to_string(),
763    };
764
765    // Detect file kind from naming conventions.
766    let is_test = stem.ends_with(".t");
767    let is_script = stem.ends_with(".s");
768
769    let kind = if is_test || is_script {
770        // Foundry test/script files must always be contracts because they
771        // inherit from Test/Script.
772        "contract"
773    } else if stem.starts_with('I')
774        && stem.len() > 1
775        && stem.chars().nth(1).map_or(false, |c| c.is_uppercase())
776    {
777        "interface"
778    } else if stem.starts_with("Lib") || stem.starts_with("lib") {
779        "library"
780    } else {
781        "contract"
782    };
783
784    let contract_name = if is_test {
785        format!("{base_name}Test")
786    } else if is_script {
787        format!("{base_name}Script")
788    } else {
789        base_name
790    };
791
792    if is_test {
793        Some(format!(
794            "// SPDX-License-Identifier: MIT\n\
795             pragma solidity {pragma};\n\
796             \n\
797             import {{Test}} from \"forge-std/Test.sol\";\n\
798             \n\
799             {kind} {contract_name} is Test {{\n\
800             \n\
801             }}\n"
802        ))
803    } else if is_script {
804        Some(format!(
805            "// SPDX-License-Identifier: MIT\n\
806             pragma solidity {pragma};\n\
807             \n\
808             import {{Script}} from \"forge-std/Script.sol\";\n\
809             \n\
810             {kind} {contract_name} is Script {{\n\
811             \n\
812             }}\n"
813        ))
814    } else {
815        Some(format!(
816            "// SPDX-License-Identifier: MIT\n\
817             pragma solidity {pragma};\n\
818             \n\
819             {kind} {contract_name} {{\n\
820             \n\
821             }}\n"
822        ))
823    }
824}
825
826/// Convert a filename stem to a valid Solidity identifier.
827///
828/// Strips `.t` and `.s` suffixes (Foundry test/script convention), removes
829/// non-alphanumeric/underscore characters, and ensures the result doesn't
830/// start with a digit.
831fn sanitize_identifier(stem: &str) -> String {
832    // Strip common Foundry suffixes: "Foo.t" → "Foo", "Bar.s" → "Bar"
833    let stem = stem
834        .strip_suffix(".t")
835        .or_else(|| stem.strip_suffix(".s"))
836        .unwrap_or(stem);
837
838    let mut result = String::with_capacity(stem.len());
839    for ch in stem.chars() {
840        if ch.is_ascii_alphanumeric() || ch == '_' {
841            result.push(ch);
842        }
843    }
844    // Identifiers can't start with a digit.
845    if result.starts_with(|c: char| c.is_ascii_digit()) {
846        result.insert(0, '_');
847    }
848    // Avoid Solidity keywords as identifiers.
849    if !result.is_empty() && !utils::is_valid_solidity_identifier(&result) {
850        result.insert(0, '_');
851    }
852    result
853}
854
855/// Normalize a path by resolving `.` and `..` components without requiring
856/// the file to exist on disk (unlike `std::fs::canonicalize`).
857///
858/// Guards against excessive `..` that would pop past the root by only
859/// popping `Normal` components (never `RootDir` or `Prefix`).
860pub fn normalize_path(path: &Path) -> PathBuf {
861    let mut components = Vec::new();
862    for component in path.components() {
863        match component {
864            std::path::Component::CurDir => {}
865            std::path::Component::ParentDir => {
866                if matches!(components.last(), Some(std::path::Component::Normal(_))) {
867                    components.pop();
868                }
869            }
870            other => components.push(other),
871        }
872    }
873    components.iter().collect()
874}