use std::collections::HashMap;
use std::path::Path;
use normalize_languages::parsers::parse_with_grammar;
use normalize_languages::support_for_path;
use crate::{PlannedEdit, RefactoringContext, RefactoringPlan};
pub struct AddParameterOutcome {
pub plan: RefactoringPlan,
pub call_sites_updated: usize,
}
pub async fn plan_add_parameter(
ctx: &RefactoringContext,
def_rel_path: &str,
function_name: &str,
param_name: &str,
param_type: Option<&str>,
default_value: &str,
position: Option<usize>,
) -> Result<AddParameterOutcome, String> {
let def_abs_path = ctx.root.join(def_rel_path);
let def_content = std::fs::read_to_string(&def_abs_path)
.map_err(|e| format!("Error reading {}: {}", def_rel_path, e))?;
let def_edit = plan_add_param_in_definition(
&def_abs_path,
&def_content,
function_name,
param_name,
param_type,
position,
)?;
let mut edits: Vec<PlannedEdit> = vec![def_edit];
let mut warnings: Vec<String> = vec![];
let refs = crate::actions::find_references(ctx, function_name, def_rel_path).await;
if ctx.index.is_none() {
warnings.push(
"Index not available; only updated definition file. \
Run `normalize structure rebuild` to enable call-site updates."
.to_string(),
);
}
let mut callers_by_file: HashMap<String, Vec<usize>> = HashMap::new();
for caller in &refs.callers {
callers_by_file
.entry(caller.file.clone())
.or_default()
.push(caller.line);
}
let mut call_sites_updated = 0usize;
for (rel_path, call_lines) in &callers_by_file {
let abs_path = ctx.root.join(rel_path);
let content = match std::fs::read_to_string(&abs_path) {
Ok(c) => c,
Err(_) => {
warnings.push(format!("Could not read caller file: {}", rel_path));
continue;
}
};
match plan_add_arg_in_file(
&abs_path,
&content,
function_name,
call_lines,
default_value,
position,
) {
Ok(Some(edit)) => {
call_sites_updated += call_lines.len();
if abs_path == def_abs_path {
let merged = merge_edits(&edits[0], &edit)?;
edits[0] = merged;
} else {
edits.push(edit);
}
}
Ok(None) => {
}
Err(e) => {
warnings.push(format!("Could not update {}: {}", rel_path, e));
}
}
}
Ok(AddParameterOutcome {
plan: RefactoringPlan {
operation: "add_parameter".to_string(),
edits,
warnings,
},
call_sites_updated,
})
}
fn plan_add_param_in_definition(
file: &Path,
content: &str,
function_name: &str,
param_name: &str,
param_type: Option<&str>,
position: Option<usize>,
) -> Result<PlannedEdit, String> {
let support = support_for_path(file)
.ok_or_else(|| format!("No language support for {}", file.display()))?;
let grammar = support.grammar_name();
let tree = parse_with_grammar(grammar, content).ok_or_else(|| {
format!(
"Grammar '{}' not available — install grammars with `normalize grammars install`",
grammar
)
})?;
let params_range = find_param_list(&tree.root_node(), content, grammar, function_name)
.ok_or_else(|| {
format!(
"Function '{}' not found in {}",
function_name,
file.display()
)
})?;
let param_text = format_param(grammar, param_name, param_type);
let new_content = insert_into_list(
content,
¶ms_range,
¶m_text,
position,
ListKind::Params,
);
Ok(PlannedEdit {
file: file.to_path_buf(),
original: content.to_string(),
new_content,
description: format!("add parameter '{}' to '{}'", param_name, function_name),
})
}
fn plan_add_arg_in_file(
file: &Path,
content: &str,
function_name: &str,
call_lines: &[usize],
default_value: &str,
position: Option<usize>,
) -> Result<Option<PlannedEdit>, String> {
let support = support_for_path(file)
.ok_or_else(|| format!("No language support for {}", file.display()))?;
let grammar = support.grammar_name();
let tree = parse_with_grammar(grammar, content).ok_or_else(|| {
format!(
"Grammar '{}' not available — install grammars with `normalize grammars install`",
grammar
)
})?;
let ranges = find_call_arg_lists(
&tree.root_node(),
content,
grammar,
function_name,
call_lines,
);
if ranges.is_empty() {
return Ok(None);
}
let mut sorted = ranges;
sorted.sort_by(|a, b| b.open_paren.cmp(&a.open_paren));
let mut new_content = content.to_string();
for r in &sorted {
let chunk = insert_into_list(&new_content, r, default_value, position, ListKind::Args);
new_content = chunk;
}
Ok(Some(PlannedEdit {
file: file.to_path_buf(),
original: content.to_string(),
new_content,
description: format!(
"add argument '{}' to calls of '{}'",
default_value, function_name
),
}))
}
enum ListKind {
Params,
Args,
}
struct ListRange {
open_paren: usize,
close_paren: usize,
comma_positions: Vec<usize>,
item_count: usize,
}
fn insert_into_list(
content: &str,
range: &ListRange,
text: &str,
position: Option<usize>,
kind: ListKind,
) -> String {
let separator = match kind {
ListKind::Params => ", ",
ListKind::Args => ", ",
};
if range.item_count == 0 {
let insert_at = range.open_paren + 1;
let mut out = content.to_string();
out.insert_str(insert_at, text);
return out;
}
let pos = position.unwrap_or(range.item_count);
if pos == 0 {
let insert_at = range.open_paren + 1;
let mut out = content.to_string();
out.insert_str(insert_at, &format!("{}{}", text, separator));
return out;
}
if pos >= range.item_count {
let insert_at = range.close_paren;
let mut out = content.to_string();
out.insert_str(insert_at, &format!("{}{}", separator, text));
return out;
}
let after_comma = range.comma_positions[pos - 1];
let ws = content[after_comma..].len() - content[after_comma..].trim_start().len();
let insert_at = after_comma + ws;
let mut out = content.to_string();
out.insert_str(insert_at, &format!("{}{}", text, separator));
out
}
fn walk_tree(node: tree_sitter::Node<'_>, f: &mut impl FnMut(tree_sitter::Node<'_>)) {
f(node);
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
walk_tree(child, f);
}
}
fn find_param_list(
root: &tree_sitter::Node<'_>,
content: &str,
grammar: &str,
name: &str,
) -> Option<ListRange> {
let fn_kinds = function_item_kinds(grammar);
let param_list_kind = param_list_kind(grammar);
let mut result: Option<ListRange> = None;
walk_tree(*root, &mut |node| {
if result.is_some() {
return;
}
if !fn_kinds.contains(&node.kind()) {
return;
}
if !function_name_matches(&node, content, name) {
return;
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == param_list_kind {
result = Some(list_range_from_node(&child, content));
break;
}
}
});
result
}
fn find_call_arg_lists(
root: &tree_sitter::Node<'_>,
content: &str,
grammar: &str,
function_name: &str,
call_lines: &[usize],
) -> Vec<ListRange> {
let call_kind = call_kind(grammar);
let arg_list_kind = arg_list_kind(grammar);
let mut results = vec![];
walk_tree(*root, &mut |node| {
if node.kind() != call_kind {
return;
}
let node_line = node.start_position().row + 1;
if !call_lines.contains(&node_line) {
return;
}
if !call_matches_name(&node, content, function_name) {
return;
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == arg_list_kind {
results.push(list_range_from_node(&child, content));
break;
}
}
});
results
}
fn list_range_from_node(node: &tree_sitter::Node<'_>, content: &str) -> ListRange {
let open_paren = node.start_byte();
let close_paren = node.end_byte().saturating_sub(1);
let mut comma_positions: Vec<usize> = vec![];
let mut item_count = 0usize;
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"(" | ")" => {}
"," => {
comma_positions.push(child.end_byte());
}
_ if !child.kind().starts_with('"') => {
item_count += 1;
}
_ => {}
}
let _ = content; }
ListRange {
open_paren,
close_paren,
comma_positions,
item_count,
}
}
fn function_item_kinds(grammar: &str) -> &'static [&'static str] {
match grammar {
"rust" => &["function_item", "function_signature_item"],
"python" => &["function_definition"],
"javascript" | "typescript" | "tsx" => &[
"function_declaration",
"function",
"method_definition",
"arrow_function",
],
_ => &[
"function_item",
"function_declaration",
"function_definition",
],
}
}
fn param_list_kind(grammar: &str) -> &'static str {
match grammar {
"python" => "parameters",
"javascript" | "typescript" | "tsx" => "formal_parameters",
_ => "parameters",
}
}
fn call_kind(grammar: &str) -> &'static str {
match grammar {
"python" => "call",
_ => "call_expression",
}
}
fn arg_list_kind(grammar: &str) -> &'static str {
match grammar {
"python" => "argument_list",
_ => "arguments",
}
}
fn format_param(grammar: &str, name: &str, ty: Option<&str>) -> String {
match grammar {
"rust" => match ty {
Some(t) => format!("{}: {}", name, t),
None => name.to_string(),
},
"typescript" | "tsx" => match ty {
Some(t) => format!("{}: {}", name, t),
None => name.to_string(),
},
"python" => name.to_string(),
"javascript" => name.to_string(),
_ => match ty {
Some(t) => format!("{}: {}", name, t),
None => name.to_string(),
},
}
}
fn function_name_matches(node: &tree_sitter::Node<'_>, content: &str, name: &str) -> bool {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "identifier" || child.kind() == "property_identifier" {
let text = &content[child.start_byte()..child.end_byte()];
return text == name;
}
}
false
}
fn call_matches_name(node: &tree_sitter::Node<'_>, content: &str, name: &str) -> bool {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
let kind = child.kind();
if kind == "identifier" || kind == "property_identifier" {
let text = &content[child.start_byte()..child.end_byte()];
return text == name;
}
if kind == "field_expression" || kind == "member_expression" {
let mut inner = child.walk();
for ic in child.children(&mut inner) {
if ic.kind() == "field_identifier"
|| ic.kind() == "property_identifier"
|| ic.kind() == "identifier"
{
let text = &content[ic.start_byte()..ic.end_byte()];
if text == name {
return true;
}
}
}
}
if kind == "attribute" {
let mut inner = child.walk();
for ic in child.children(&mut inner) {
if ic.kind() == "identifier" {
let text = &content[ic.start_byte()..ic.end_byte()];
if text == name {
return true;
}
}
}
}
}
false
}
fn merge_edits(first: &PlannedEdit, second: &PlannedEdit) -> Result<PlannedEdit, String> {
if first.file != second.file {
return Err(format!(
"Cannot merge edits for different files: {} vs {}",
first.file.display(),
second.file.display()
));
}
let original = &first.original;
let (def_pos, def_text) = extract_insertion(original, &first.new_content)?;
let (arg_pos, arg_text) = extract_insertion(original, &second.new_content)?;
let mut new_content = original.clone();
if def_pos >= arg_pos {
new_content.insert_str(def_pos, &def_text);
new_content.insert_str(arg_pos, &arg_text);
} else {
new_content.insert_str(arg_pos, &arg_text);
new_content.insert_str(def_pos, &def_text);
}
Ok(PlannedEdit {
file: first.file.clone(),
original: original.clone(),
new_content,
description: format!("{} + {}", first.description, second.description),
})
}
fn extract_insertion(original: &str, new_content: &str) -> Result<(usize, String), String> {
let orig_bytes = original.as_bytes();
let new_bytes = new_content.as_bytes();
let prefix_len = orig_bytes
.iter()
.zip(new_bytes.iter())
.take_while(|(a, b)| a == b)
.count();
let orig_tail = &orig_bytes[prefix_len..];
let new_tail = &new_bytes[prefix_len..];
let suffix_len = orig_tail
.iter()
.rev()
.zip(new_tail.iter().rev())
.take_while(|(a, b)| a == b)
.count();
if orig_tail.len() != suffix_len {
return Err(format!(
"merge_edits: expected a pure insertion but found deletion of {} bytes at offset {}",
orig_tail.len() - suffix_len,
prefix_len
));
}
let inserted_len = new_tail.len() - suffix_len;
let inserted = &new_content[prefix_len..prefix_len + inserted_len];
Ok((prefix_len, inserted.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
use normalize_edit::Editor;
fn make_ctx(root: &Path) -> RefactoringContext {
RefactoringContext {
root: root.to_path_buf(),
editor: Editor::new(),
index: None,
loader: normalize_languages::GrammarLoader::new(),
}
}
fn grammar_available(name: &str) -> bool {
normalize_languages::parsers::parser_for(name).is_some()
}
#[tokio::test]
async fn rust_add_param_no_callers() {
if !grammar_available("rust") {
eprintln!("skipping: rust grammar not available");
return;
}
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("test.rs");
let content = "fn my_func(a: i32) -> bool {\n true\n}\n";
std::fs::write(&file, content).unwrap();
let ctx = make_ctx(dir.path());
let result = plan_add_parameter(
&ctx,
"test.rs",
"my_func",
"b",
Some("String"),
"String::new()",
None,
)
.await
.unwrap();
assert_eq!(result.call_sites_updated, 0);
let edit = &result.plan.edits[0];
assert!(
edit.new_content.contains("b: String"),
"expected 'b: String' in: {}",
edit.new_content
);
assert!(
edit.new_content.contains("a: i32"),
"expected 'a: i32' still present"
);
assert!(!result.plan.warnings.is_empty());
}
#[tokio::test]
async fn rust_add_param_at_position_zero() {
if !grammar_available("rust") {
eprintln!("skipping: rust grammar not available");
return;
}
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("test.rs");
let content = "fn my_func(a: i32) -> bool {\n true\n}\n";
std::fs::write(&file, content).unwrap();
let ctx = make_ctx(dir.path());
let result = plan_add_parameter(
&ctx,
"test.rs",
"my_func",
"b",
Some("String"),
"String::new()",
Some(0),
)
.await
.unwrap();
let edit = &result.plan.edits[0];
let b_pos = edit.new_content.find("b: String").unwrap();
let a_pos = edit.new_content.find("a: i32").unwrap();
assert!(b_pos < a_pos, "b should come before a");
}
#[tokio::test]
async fn rust_empty_param_list() {
if !grammar_available("rust") {
eprintln!("skipping: rust grammar not available");
return;
}
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("test.rs");
let content = "fn my_func() -> bool {\n true\n}\n";
std::fs::write(&file, content).unwrap();
let ctx = make_ctx(dir.path());
let result = plan_add_parameter(&ctx, "test.rs", "my_func", "x", Some("i32"), "0", None)
.await
.unwrap();
let edit = &result.plan.edits[0];
assert!(
edit.new_content.contains("fn my_func(x: i32)"),
"got: {}",
edit.new_content
);
}
#[tokio::test]
async fn python_add_param_no_callers() {
if !grammar_available("python") {
eprintln!("skipping: python grammar not available");
return;
}
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("test.py");
let content = "def my_func(a, b):\n return True\n";
std::fs::write(&file, content).unwrap();
let ctx = make_ctx(dir.path());
let result = plan_add_parameter(&ctx, "test.py", "my_func", "c", None, "None", None)
.await
.unwrap();
let edit = &result.plan.edits[0];
assert!(
edit.new_content.contains(", c)"),
"expected ', c)' in: {}",
edit.new_content
);
}
#[tokio::test]
async fn typescript_add_param_with_type() {
if !grammar_available("typescript") {
eprintln!("skipping: typescript grammar not available");
return;
}
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("test.ts");
let content = "function myFunc(a: number, b: string): boolean {\n return true;\n}\n";
std::fs::write(&file, content).unwrap();
let ctx = make_ctx(dir.path());
let result = plan_add_parameter(
&ctx,
"test.ts",
"myFunc",
"c",
Some("boolean"),
"false",
None,
)
.await
.unwrap();
let edit = &result.plan.edits[0];
assert!(
edit.new_content.contains("c: boolean"),
"expected 'c: boolean' in: {}",
edit.new_content
);
}
#[test]
fn extract_insertion_middle() {
let original = "fn f(a: i32) {}";
let new = "fn f(a: i32, b: String) {}";
let (pos, text) = extract_insertion(original, new).unwrap();
assert_eq!(pos, 11);
assert_eq!(text, ", b: String");
}
#[test]
fn extract_insertion_front() {
let original = "fn f(a: i32) {}";
let new = "fn f(b: String, a: i32) {}";
let (pos, text) = extract_insertion(original, new).unwrap();
assert_eq!(pos, 5);
assert_eq!(text, "b: String, ");
}
#[test]
fn function_not_found_returns_err() {
if !grammar_available("rust") {
eprintln!("skipping: rust grammar not available");
return;
}
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("test.rs");
std::fs::write(&file, "fn other() {}\n").unwrap();
let res =
plan_add_param_in_definition(&file, "fn other() {}\n", "nonexistent", "x", None, None);
assert!(res.is_err());
let err = res.err().unwrap();
assert!(
err.contains("not found"),
"expected 'not found' in: {}",
err
);
}
}