use std::path::{Path, PathBuf};
use crate::context::AppContext;
use crate::edit;
use crate::error::AftError;
use crate::imports;
use crate::lsp_hints;
use crate::parser::{detect_language, LangId};
use crate::protocol::{RawRequest, Response};
use crate::symbols::SymbolKind;
pub fn handle_move_symbol(req: &RawRequest, ctx: &AppContext) -> Response {
let file = match req.params.get("file").and_then(|v| v.as_str()) {
Some(f) => f,
None => {
return Response::error(
&req.id,
"invalid_request",
"move_symbol: missing required param 'file'",
);
}
};
let symbol_name = match req.params.get("symbol").and_then(|v| v.as_str()) {
Some(s) => s,
None => {
return Response::error(
&req.id,
"invalid_request",
"move_symbol: missing required param 'symbol'",
);
}
};
let destination = match req.params.get("destination").and_then(|v| v.as_str()) {
Some(d) => d,
None => {
return Response::error(
&req.id,
"invalid_request",
"move_symbol: missing required param 'destination'",
);
}
};
let scope = req.params.get("scope").and_then(|v| v.as_str());
let dry_run = edit::is_dry_run(&req.params);
let source_path_raw = match ctx.validate_path(&req.id, Path::new(file)) {
Ok(path) => path,
Err(resp) => return resp,
};
let dest_path_raw = match ctx.validate_path(&req.id, Path::new(destination)) {
Ok(path) => path,
Err(resp) => return resp,
};
if !source_path_raw.exists() {
return Response::error(
&req.id,
"file_not_found",
format!("source file not found: {}", file),
);
}
let source_canon =
std::fs::canonicalize(&source_path_raw).unwrap_or_else(|_| source_path_raw.clone());
let dest_canon = if dest_path_raw.exists() {
std::fs::canonicalize(&dest_path_raw).unwrap_or_else(|_| dest_path_raw.clone())
} else if let Some(parent) = dest_path_raw.parent() {
let canon_parent = std::fs::canonicalize(parent).unwrap_or_else(|_| parent.to_path_buf());
canon_parent.join(dest_path_raw.file_name().unwrap_or_default())
} else {
dest_path_raw.clone()
};
let source_path: &Path = &source_canon;
let dest_path: &Path = &dest_canon;
if source_path == dest_path {
return Response::error(
&req.id,
"invalid_request",
"move_symbol: source and destination are the same file",
);
}
let mut cg_ref = ctx.callgraph().borrow_mut();
let graph = match cg_ref.as_mut() {
Some(g) => g,
None => {
return Response::error(
&req.id,
"not_configured",
"move_symbol: project not configured — send 'configure' first",
);
}
};
let matches = match ctx.provider().resolve_symbol(&source_path, symbol_name) {
Ok(m) => m,
Err(e) => {
return Response::error(&req.id, e.code(), e.to_string());
}
};
let filtered = if matches.len() > 1 {
if let Some(scope_filter) = scope {
matches
.into_iter()
.filter(|m| {
m.symbol.scope_chain.iter().any(|s| s == scope_filter)
|| m.symbol.parent.as_deref() == Some(scope_filter)
})
.collect()
} else {
matches
}
} else {
matches
};
let filtered = if let Some(hints) = lsp_hints::parse_lsp_hints(req) {
lsp_hints::apply_lsp_disambiguation(filtered, &hints)
} else {
filtered
};
if filtered.is_empty() {
return Response::error(
&req.id,
"symbol_not_found",
format!("symbol '{}' not found in {}", symbol_name, file),
);
}
if filtered.len() > 1 {
let candidates: Vec<serde_json::Value> = filtered
.iter()
.map(|m| {
let sym = &m.symbol;
let qualified = if sym.scope_chain.is_empty() {
sym.name.clone()
} else {
format!("{}::{}", sym.scope_chain.join("::"), sym.name)
};
let kind_str = serde_json::to_value(&sym.kind)
.ok()
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_else(|| format!("{:?}", sym.kind).to_lowercase());
serde_json::json!({
"name": sym.name,
"qualified": qualified,
"line": sym.range.start_line + 1,
"kind": kind_str,
})
})
.collect();
return Response::success(
&req.id,
serde_json::json!({
"code": "ambiguous_symbol",
"candidates": candidates,
}),
);
}
let target = &filtered[0].symbol;
if !target.scope_chain.is_empty() || target.kind == SymbolKind::Method {
return Response::error(
&req.id,
"invalid_request",
format!(
"move_symbol: cannot move non-top-level symbol '{}' (kind: {:?}, scope: [{}]). Only top-level declarations can be moved.",
symbol_name,
target.kind,
target.scope_chain.join(", ")
),
);
}
let source_content = match std::fs::read_to_string(source_path) {
Ok(s) => s,
Err(e) => {
return Response::error(&req.id, "file_not_found", format!("{}: {}", file, e));
}
};
let raw_start_byte = edit::line_col_to_byte(
&source_content,
target.range.start_line,
target.range.start_col,
);
let end_byte =
edit::line_col_to_byte(&source_content, target.range.end_line, target.range.end_col);
let lang = detect_language(source_path);
let start_byte = if target.exported
&& matches!(
lang,
Some(LangId::TypeScript) | Some(LangId::Tsx) | Some(LangId::JavaScript)
) {
find_export_keyword_start(&source_content, raw_start_byte).unwrap_or(raw_start_byte)
} else {
raw_start_byte
};
let symbol_text = match source_content.get(start_byte..end_byte) {
Some(symbol_text) => symbol_text,
None => {
return Response::error(
&req.id,
"invalid_request",
format!(
"move_symbol: symbol byte range [{}..{}) is not on UTF-8 boundaries",
start_byte, end_byte
),
);
}
};
let dest_symbol_text = prepare_exported_symbol(symbol_text);
let new_source = match remove_symbol_from_source(&source_content, start_byte, end_byte) {
Ok(s) => s,
Err(e) => return Response::error(&req.id, e.code(), e.to_string()),
};
let dest_content = if dest_path.exists() {
std::fs::read_to_string(dest_path).unwrap_or_default()
} else {
String::new()
};
let new_dest = append_symbol_to_dest(&dest_content, &dest_symbol_text);
if let Err(e) = graph.build_file(source_path) {
return Response::error(&req.id, e.code(), e.to_string());
}
let consumers = match graph.callers_of(
source_path,
symbol_name,
1,
ctx.config().max_callgraph_files,
) {
Ok(result) => result.callers,
Err(err @ AftError::ProjectTooLarge { .. }) => {
return Response::error(&req.id, "project_too_large", format!("{}", err));
}
Err(_) => Vec::new(),
};
let project_root = graph.project_root().to_path_buf();
let consumer_files: Vec<PathBuf> = consumers
.iter()
.map(|cg| {
let p = PathBuf::from(&cg.file);
if p.is_absolute() {
p
} else {
project_root.join(&p)
}
})
.filter(|p| p != source_path && p != dest_path)
.collect();
let mut consumer_rewrites: Vec<(PathBuf, String, String)> = Vec::new(); for consumer_file in &consumer_files {
if !consumer_file.exists() {
continue;
}
let consumer_content = match std::fs::read_to_string(consumer_file) {
Ok(s) => s,
Err(_) => continue,
};
let new_consumer = rewrite_consumer_imports(
&consumer_content,
consumer_file,
source_path,
dest_path,
symbol_name,
lang,
);
if let Some(rewritten) = new_consumer {
consumer_rewrites.push((consumer_file.clone(), consumer_content, rewritten));
}
}
if dry_run {
let mut diffs: Vec<serde_json::Value> = Vec::new();
let source_dr = edit::dry_run_diff(&source_content, &new_source, source_path);
diffs.push(serde_json::json!({
"file": file,
"diff": source_dr.diff,
"syntax_valid": source_dr.syntax_valid,
}));
let dest_dr = edit::dry_run_diff(&dest_content, &new_dest, dest_path);
diffs.push(serde_json::json!({
"file": destination,
"diff": dest_dr.diff,
"syntax_valid": dest_dr.syntax_valid,
}));
for (path, original, new_content) in &consumer_rewrites {
let dr = edit::dry_run_diff(original, new_content, &path);
diffs.push(serde_json::json!({
"file": path.display().to_string(),
"diff": dr.diff,
"syntax_valid": dr.syntax_valid,
}));
}
return Response::success(
&req.id,
serde_json::json!({
"ok": true,
"dry_run": true,
"diffs": diffs,
}),
);
}
let checkpoint_name = format!("move_symbol:{}", symbol_name);
{
let mut all_files: Vec<PathBuf> = vec![source_path.to_path_buf()];
if dest_path.exists() {
all_files.push(dest_path.to_path_buf());
}
for (path, _, _) in &consumer_rewrites {
all_files.push(path.clone());
}
let backup_store = ctx.backup().borrow();
let mut cp_store = ctx.checkpoint().borrow_mut();
if let Err(e) = cp_store.create(req.session(), &checkpoint_name, all_files, &backup_store) {
return Response::error(&req.id, e.code(), e.to_string());
}
}
let mut written_files: Vec<PathBuf> = Vec::new();
let mut new_files: Vec<PathBuf> = Vec::new();
let mut results: Vec<serde_json::Value> = Vec::new();
let dest_existed = dest_path.exists();
match edit::write_format_validate(&source_path, &new_source, &ctx.config(), &req.params) {
Ok(wr) => {
if let Ok(final_content) = std::fs::read_to_string(source_path) {
ctx.lsp_notify_file_changed(source_path, &final_content);
}
written_files.push(source_path.to_path_buf());
results.push(serde_json::json!({
"file": file,
"syntax_valid": wr.syntax_valid,
"formatted": wr.formatted,
}));
}
Err(e) => {
restore_checkpoint(ctx, req.session(), &checkpoint_name);
return move_error(
&req.id,
file,
&written_files,
&new_files,
&format!("failed to write source: {}", e),
);
}
}
match edit::write_format_validate(&dest_path, &new_dest, &ctx.config(), &req.params) {
Ok(wr) => {
if let Ok(final_content) = std::fs::read_to_string(dest_path) {
ctx.lsp_notify_file_changed(dest_path, &final_content);
}
if dest_existed {
written_files.push(dest_path.to_path_buf());
} else {
new_files.push(dest_path.to_path_buf());
}
results.push(serde_json::json!({
"file": destination,
"syntax_valid": wr.syntax_valid,
"formatted": wr.formatted,
}));
}
Err(e) => {
restore_checkpoint(ctx, req.session(), &checkpoint_name);
cleanup_new_files(&new_files);
return move_error(
&req.id,
destination,
&written_files,
&new_files,
&format!("failed to write destination: {}", e),
);
}
}
let mut consumers_updated = 0;
for (path, _original, new_content) in &consumer_rewrites {
match edit::write_format_validate(&path, new_content, &ctx.config(), &req.params) {
Ok(wr) => {
if let Ok(final_content) = std::fs::read_to_string(&path) {
ctx.lsp_notify_file_changed(path, &final_content);
}
written_files.push(path.clone());
consumers_updated += 1;
results.push(serde_json::json!({
"file": path.display().to_string(),
"syntax_valid": wr.syntax_valid,
"formatted": wr.formatted,
}));
}
Err(e) => {
restore_checkpoint(ctx, req.session(), &checkpoint_name);
cleanup_new_files(&new_files);
return move_error(
&req.id,
&path.display().to_string(),
&written_files,
&new_files,
&format!("failed to write consumer: {}", e),
);
}
}
}
let files_modified = results.len();
log::debug!(
"[aft] move_symbol: {} from {} to {} ({} consumers updated)",
symbol_name,
file,
destination,
consumers_updated
);
Response::success(
&req.id,
serde_json::json!({
"ok": true,
"files_modified": files_modified,
"consumers_updated": consumers_updated,
"checkpoint_name": checkpoint_name,
"results": results,
}),
)
}
pub fn compute_relative_import_path(from_file: &Path, to_file: &Path) -> String {
let from_dir = from_file.parent().unwrap_or(Path::new(""));
let to_dir = to_file.parent().unwrap_or(Path::new(""));
let to_stem = to_file
.file_stem()
.unwrap_or_default()
.to_str()
.unwrap_or("");
let rel_dir = compute_relative_dir(from_dir, to_dir);
if rel_dir.is_empty() || rel_dir == "." {
format!("./{}", to_stem)
} else if rel_dir.starts_with("..") {
format!("{}/{}", rel_dir, to_stem)
} else {
format!("./{}/{}", rel_dir, to_stem)
}
}
fn compute_relative_dir(from: &Path, to: &Path) -> String {
let from_parts: Vec<&str> = from
.components()
.filter_map(|c| match c {
std::path::Component::Normal(s) => s.to_str(),
_ => None,
})
.collect();
let to_parts: Vec<&str> = to
.components()
.filter_map(|c| match c {
std::path::Component::Normal(s) => s.to_str(),
_ => None,
})
.collect();
let common_len = from_parts
.iter()
.zip(to_parts.iter())
.take_while(|(a, b)| a == b)
.count();
let ups = from_parts.len() - common_len;
let downs = &to_parts[common_len..];
let mut parts: Vec<&str> = Vec::new();
for _ in 0..ups {
parts.push("..");
}
for d in downs {
parts.push(d);
}
if parts.is_empty() {
".".to_string()
} else {
parts.join("/")
}
}
fn import_path_matches_file(import_module: &str, consumer_file: &Path, target_file: &Path) -> bool {
if !import_module.starts_with('.') {
return false;
}
let consumer_dir = consumer_file.parent().unwrap_or(Path::new(""));
let resolved = consumer_dir.join(import_module);
if paths_equivalent(&resolved, target_file) {
return true;
}
for ext in &["ts", "tsx", "js", "jsx"] {
let with_ext = resolved.with_extension(ext);
if paths_equivalent(&with_ext, target_file) {
return true;
}
}
let as_index = resolved.join("index");
for ext in &["ts", "tsx", "js", "jsx"] {
let with_ext = as_index.with_extension(ext);
if paths_equivalent(&with_ext, target_file) {
return true;
}
}
false
}
fn paths_equivalent(a: &Path, b: &Path) -> bool {
let norm_a = normalize_path(a);
let norm_b = normalize_path(b);
norm_a == norm_b
}
fn normalize_path(path: &Path) -> PathBuf {
let mut parts: Vec<std::path::Component> = Vec::new();
for comp in path.components() {
match comp {
std::path::Component::ParentDir => {
if let Some(last) = parts.last() {
if matches!(last, std::path::Component::Normal(_)) {
parts.pop();
continue;
}
}
parts.push(comp);
}
std::path::Component::CurDir => {} _ => parts.push(comp),
}
}
parts.iter().collect()
}
fn find_export_keyword_start(source: &str, decl_start: usize) -> Option<usize> {
if decl_start == 0 {
return None;
}
let bytes = source.as_bytes();
let window_start = decl_start.saturating_sub(200);
let window = &bytes[window_start..decl_start];
if window.len() < 6 {
return None;
}
let mut i = window.len() - 6 + 1; while i > 0 {
i -= 1;
if window[i] == b'e' {
if window.get(i..i + 6) == Some(b"export") {
let left_ok = i == 0
|| !matches!(window[i - 1], b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | b'$');
let right_ok = window
.get(i + 6)
.is_some_and(|&b| matches!(b, b' ' | b'\t' | b'\n' | b'\r'));
if left_ok && right_ok {
let abs = window_start + i;
let between = &bytes[abs + 6..decl_start];
let s = std::str::from_utf8(between).ok()?.trim();
if s.is_empty() || s == "default" {
return Some(abs);
}
}
}
}
}
None
}
fn prepare_exported_symbol(symbol_text: &str) -> String {
let trimmed = symbol_text.trim();
if trimmed.starts_with("export default")
|| trimmed.starts_with("export {")
|| trimmed.starts_with("export *")
|| trimmed.starts_with("export ")
{
return trimmed.to_string();
}
format!("export {}", trimmed)
}
fn remove_symbol_from_source(
source: &str,
start_byte: usize,
end_byte: usize,
) -> Result<String, crate::error::AftError> {
let mut actual_start = start_byte;
while actual_start > 0 && source.as_bytes()[actual_start - 1] != b'\n' {
actual_start -= 1;
}
let line_prefix = &source[actual_start..start_byte];
if line_prefix.trim().is_empty() {
} else {
actual_start = start_byte;
}
let mut actual_end = end_byte;
let bytes = source.as_bytes();
while actual_end < bytes.len() && (bytes[actual_end] == b' ' || bytes[actual_end] == b'\t') {
actual_end += 1;
}
if actual_end < bytes.len() && bytes[actual_end] == b'\n' {
actual_end += 1;
} else if actual_end < bytes.len() && bytes[actual_end] == b'\r' {
actual_end += 1;
if actual_end < bytes.len() && bytes[actual_end] == b'\n' {
actual_end += 1;
}
}
let peek_end = actual_end;
if peek_end < bytes.len() && bytes[peek_end] == b'\n' {
actual_end = peek_end + 1;
} else if peek_end < bytes.len() && bytes[peek_end] == b'\r' {
actual_end = peek_end + 1;
if actual_end < bytes.len() && bytes[actual_end] == b'\n' {
actual_end += 1;
}
}
edit::replace_byte_range(source, actual_start, actual_end, "")
}
fn append_symbol_to_dest(dest_content: &str, symbol_text: &str) -> String {
if dest_content.is_empty() {
format!("{}\n", symbol_text)
} else {
let trimmed_dest = dest_content.trim_end();
format!("{}\n\n{}\n", trimmed_dest, symbol_text)
}
}
fn rewrite_consumer_imports(
consumer_content: &str,
consumer_file: &Path,
source_file: &Path,
dest_file: &Path,
symbol_name: &str,
lang: Option<LangId>,
) -> Option<String> {
let lang = lang?;
if !matches!(lang, LangId::TypeScript | LangId::Tsx | LangId::JavaScript) {
return None;
}
let (_source_text, _tree, block) = match imports::parse_file_imports(consumer_file, lang) {
Ok(r) => r,
Err(_) => return None,
};
let content = consumer_content;
let mut result = content.to_string();
let mut offset_delta: isize = 0;
let mut matching_imports: Vec<(usize, &imports::ImportStatement)> = block
.imports
.iter()
.enumerate()
.filter(|(_, imp)| import_path_matches_file(&imp.module_path, consumer_file, source_file))
.collect();
matching_imports.sort_by(|a, b| b.1.byte_range.start.cmp(&a.1.byte_range.start));
let mut made_changes = false;
for (_, imp) in &matching_imports {
let has_moved_symbol = imp
.names
.iter()
.any(|n| imports::specifier_matches(n, symbol_name))
|| imp.default_import.as_deref() == Some(symbol_name);
if !has_moved_symbol {
continue;
}
let new_import_path = compute_relative_import_path(consumer_file, dest_file);
let remaining_names: Vec<String> = imp
.names
.iter()
.filter(|n| !imports::specifier_matches(n, symbol_name))
.cloned()
.collect();
let remaining_default = if imp.default_import.as_deref() == Some(symbol_name) {
None
} else {
imp.default_import.clone()
};
let type_only = imp.kind == imports::ImportKind::Type;
let start = match (imp.byte_range.start as isize).checked_add(offset_delta) {
Some(v) if v >= 0 => v as usize,
_ => return None, };
let end = match (imp.byte_range.end as isize).checked_add(offset_delta) {
Some(v) if v >= 0 => v as usize,
_ => return None, };
if remaining_names.is_empty() && remaining_default.is_none() {
let new_import = generate_import_with_alias(
&imp.raw_text,
symbol_name,
&new_import_path,
type_only,
lang,
);
let old_len = end - start;
result = format!("{}{}{}", &result[..start], new_import, &result[end..]);
offset_delta += new_import.len() as isize - old_len as isize;
} else {
let kept_import = imports::generate_import_line(
lang,
&imp.module_path,
&remaining_names,
remaining_default.as_deref(),
type_only,
);
let moved_import = generate_import_with_alias(
&imp.raw_text,
symbol_name,
&new_import_path,
type_only,
lang,
);
let replacement = format!("{}\n{}", kept_import, moved_import);
let old_len = end - start;
result = format!("{}{}{}", &result[..start], replacement, &result[end..]);
offset_delta += replacement.len() as isize - old_len as isize;
}
made_changes = true;
}
if made_changes {
Some(result)
} else {
None
}
}
fn generate_import_with_alias(
original_raw: &str,
symbol_name: &str,
new_module_path: &str,
type_only: bool,
_lang: LangId,
) -> String {
let alias = extract_alias(original_raw, symbol_name);
let names = if let Some(alias_name) = &alias {
vec![format!("{} as {}", symbol_name, alias_name)]
} else {
vec![symbol_name.to_string()]
};
let type_prefix = if type_only { "type " } else { "" };
let names_str = names.join(", ");
format!(
"import {}{{ {} }} from '{}';",
type_prefix, names_str, new_module_path
)
}
fn extract_alias(raw_text: &str, symbol_name: &str) -> Option<String> {
let pattern = format!("{} as ", symbol_name);
if let Some(pos) = raw_text.find(&pattern) {
let after = &raw_text[pos + pattern.len()..];
let alias: String = after
.chars()
.take_while(|c| c.is_alphanumeric() || *c == '_' || *c == '$')
.collect();
if !alias.is_empty() {
return Some(alias);
}
}
None
}
fn restore_checkpoint(ctx: &AppContext, session: &str, name: &str) {
let cp_store = ctx.checkpoint().borrow();
if let Err(e) = cp_store.restore(session, name) {
log::debug!(
"[aft] move_symbol rollback: failed to restore checkpoint '{}': {}",
name,
e
);
}
}
fn cleanup_new_files(new_files: &[PathBuf]) {
for path in new_files {
if path.exists() {
if let Err(e) = std::fs::remove_file(path) {
log::debug!(
"[aft] move_symbol rollback: failed to delete new file {}: {}",
path.display(),
e
);
}
}
}
}
fn move_error(
req_id: &str,
failed_file: &str,
written_files: &[PathBuf],
new_files: &[PathBuf],
message: &str,
) -> Response {
let mut rolled_back: Vec<serde_json::Value> = written_files
.iter()
.map(|p| {
serde_json::json!({
"file": p.display().to_string(),
"action": "restored",
})
})
.collect();
for p in new_files {
rolled_back.push(serde_json::json!({
"file": p.display().to_string(),
"action": "deleted",
}));
}
log::debug!(
"[aft] move_symbol failed at {}: {} — rolled back {} files",
failed_file,
message,
rolled_back.len()
);
Response::error_with_data(
req_id,
"move_symbol_failed",
message,
serde_json::json!({
"failed_file": failed_file,
"rolled_back": rolled_back,
}),
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn relative_path_same_directory() {
let from = Path::new("src/components/Button.ts");
let to = Path::new("src/components/utils.ts");
assert_eq!(compute_relative_import_path(from, to), "./utils");
}
#[test]
fn relative_path_parent_directory() {
let from = Path::new("src/components/Button.ts");
let to = Path::new("src/utils.ts");
assert_eq!(compute_relative_import_path(from, to), "../utils");
}
#[test]
fn relative_path_sibling_directory() {
let from = Path::new("src/components/Button.ts");
let to = Path::new("src/shared/utils.ts");
assert_eq!(compute_relative_import_path(from, to), "../shared/utils");
}
#[test]
fn relative_path_deeply_nested() {
let from = Path::new("src/features/auth/components/Login.ts");
let to = Path::new("src/lib/helpers.ts");
assert_eq!(
compute_relative_import_path(from, to),
"../../../lib/helpers"
);
}
#[test]
fn relative_path_child_directory() {
let from = Path::new("src/index.ts");
let to = Path::new("src/utils/helpers.ts");
assert_eq!(compute_relative_import_path(from, to), "./utils/helpers");
}
#[test]
fn relative_path_strips_extension() {
let from = Path::new("src/app.tsx");
let to = Path::new("src/components/Header.tsx");
assert_eq!(
compute_relative_import_path(from, to),
"./components/Header"
);
}
#[test]
fn prepare_exported_adds_export() {
let text = "function doStuff() { return 42; }";
assert_eq!(
prepare_exported_symbol(text),
"export function doStuff() { return 42; }"
);
}
#[test]
fn prepare_exported_preserves_existing() {
let text = "export function doStuff() { return 42; }";
assert_eq!(
prepare_exported_symbol(text),
"export function doStuff() { return 42; }"
);
}
#[test]
fn prepare_exported_preserves_export_reexports_and_default() {
assert_eq!(
prepare_exported_symbol("export default function doStuff() { return 42; }"),
"export default function doStuff() { return 42; }"
);
assert_eq!(
prepare_exported_symbol("export { doStuff } from './other';"),
"export { doStuff } from './other';"
);
assert_eq!(
prepare_exported_symbol("export * from './other';"),
"export * from './other';"
);
}
#[test]
fn extract_alias_found() {
let raw = "import { formatDate as fmtDate, other } from './utils';";
assert_eq!(
extract_alias(raw, "formatDate"),
Some("fmtDate".to_string())
);
}
#[test]
fn extract_alias_not_found() {
let raw = "import { formatDate, other } from './utils';";
assert_eq!(extract_alias(raw, "formatDate"), None);
}
#[test]
fn import_path_matches_same_dir() {
let consumer = Path::new("src/components/Button.ts");
let target = Path::new("src/components/utils.ts");
assert!(import_path_matches_file("./utils", consumer, target));
}
#[test]
fn import_path_matches_parent_dir() {
let consumer = Path::new("src/components/Button.ts");
let target = Path::new("src/service.ts");
assert!(import_path_matches_file("../service", consumer, target));
}
#[test]
fn import_path_no_match_different_file() {
let consumer = Path::new("src/components/Button.ts");
let target = Path::new("src/components/utils.ts");
assert!(!import_path_matches_file("./other", consumer, target));
}
#[test]
fn import_path_no_match_external() {
let consumer = Path::new("src/components/Button.ts");
let target = Path::new("src/components/utils.ts");
assert!(!import_path_matches_file("react", consumer, target));
}
#[test]
fn normalize_path_with_parent() {
let p = Path::new("src/components/../utils.ts");
assert_eq!(normalize_path(p), PathBuf::from("src/utils.ts"));
}
#[test]
fn normalize_path_with_current() {
let p = Path::new("src/./components/Button.ts");
assert_eq!(normalize_path(p), PathBuf::from("src/components/Button.ts"));
}
#[test]
fn remove_symbol_cleans_whitespace() {
let source = "export function keep() {}\n\nexport function remove() {}\n\nexport function alsoKeep() {}\n";
let start = source.find("export function remove").unwrap();
let end = start + "export function remove() {}".len();
let result = remove_symbol_from_source(source, start, end).unwrap();
assert!(result.contains("export function keep()"));
assert!(!result.contains("remove"));
assert!(result.contains("export function alsoKeep()"));
}
#[test]
fn append_to_empty_dest() {
let result = append_symbol_to_dest("", "export function foo() {}");
assert_eq!(result, "export function foo() {}\n");
}
#[test]
fn append_to_existing_dest() {
let result =
append_symbol_to_dest("export function bar() {}\n", "export function foo() {}");
assert_eq!(
result,
"export function bar() {}\n\nexport function foo() {}\n"
);
}
#[test]
fn find_export_keyword_simple_export() {
let source = "export function greet() {}\n";
assert_eq!(find_export_keyword_start(source, 7), Some(0));
}
#[test]
fn find_export_keyword_export_default() {
let source = "export default function f() {}\n";
assert_eq!(find_export_keyword_start(source, 15), Some(0));
}
#[test]
fn find_export_keyword_with_jsdoc_above() {
let source = "/** doc */\nexport function f() {}\n";
let decl_start = source.find("function").unwrap();
assert_eq!(find_export_keyword_start(source, decl_start), Some(11));
}
#[test]
fn find_export_keyword_no_export() {
let source = "function f() {}\n";
assert_eq!(find_export_keyword_start(source, 0), None);
}
#[test]
fn find_export_keyword_unrelated_word_before() {
let source = "let report = 1;\nfunction f() {}\n";
let decl_start = source.find("function").unwrap();
assert_eq!(find_export_keyword_start(source, decl_start), None);
}
#[test]
fn find_export_keyword_does_not_match_substring() {
let source = "let _export = 1;\nfunction f() {}\n";
let decl_start = source.find("function").unwrap();
assert_eq!(find_export_keyword_start(source, decl_start), None);
}
#[test]
fn find_export_keyword_at_zero_decl_start() {
assert_eq!(find_export_keyword_start("function f() {}", 0), None);
}
}