mod messages;
mod repl;
use amql_engine::{
diff_annotations, execute_bench_request, execute_transaction, extract_aql_symbols,
find_project_root, format_bench_table, init_project, insert as nav_insert, load_manifest, meta,
nav_select, project_stats, read_source, remove as nav_remove, replace as nav_replace,
run_all_extractors, sidecar_for_colocated, suggest_repairs, unified_query, validate,
AnnotationStore, AqlDef, BaselineDef, BenchRequest, CodeCache, ExtractorRegistry,
InsertPosition, NodeRef, ProjectRoot, QueryOptions, RelativePath, ResolverRegistry, Scope,
TransactionOp,
};
use amql_log::{debug, info};
use clap::{CommandFactory, FromArgMatches, Parser, Subcommand};
use rmcp::ServiceExt;
use std::path::PathBuf;
use std::process::ExitCode;
#[derive(Parser)]
#[command(version = env!("CARGO_PKG_VERSION"))]
struct Cli {
#[arg(short, long, action = clap::ArgAction::Count, global = true)]
verbose: u8,
#[command(subcommand)]
command: Option<Command>,
}
#[derive(Subcommand)]
enum Command {
#[command(
long_about = "Run a unified query that joins code elements with annotations via binding keys.\n\nExamples:\n aql query 'route[method=\"GET\"]' --scope src/routes/\n aql query 'function[async][export]'\n aql query 'describe > test' --scope src/"
)]
Query {
selector: String,
#[arg(long)]
scope: Option<String>,
#[arg(long)]
limit: Option<usize>,
#[arg(long)]
offset: Option<usize>,
#[arg(long = "sort-by", allow_hyphen_values = true)]
sort_by: Option<String>,
},
#[command(
long_about = "Select annotations by CSS-like selector. Does not parse source code.\n\nExamples:\n aql select 'route[method=\"POST\"]'\n aql select 'describe > test' --file src/auth.test.ts\n aql select 'middleware[scope=\"global\"]'"
)]
Select {
selector: String,
#[arg(long)]
file: Option<String>,
#[arg(long)]
scope: Option<String>,
#[arg(long)]
limit: Option<usize>,
#[arg(long)]
offset: Option<usize>,
#[arg(long = "sort-by", allow_hyphen_values = true)]
sort_by: Option<String>,
},
Validate,
Repair,
Schema,
Locate {
source: String,
#[arg(long)]
word: Option<String>,
},
Extract {
name: String,
path: Option<String>,
},
Stats,
Init,
Diff,
#[command(
long_about = "Compare AQL token efficiency against named baseline approaches.\n\nExplicit AQL operation:\n aql bench --extract test --path src/ # auto-adds cat baseline\n aql bench --extract test --path src/ --baseline 'sg=sg -p ...' # named baselines\n aql bench --nav-select function_item --path src/main.rs\n aql bench --query 'route[method=GET]' --path src/\n\nConfig file:\n aql bench --config .config/aql.bench\n aql bench # auto-loads .config/aql.bench if present"
)]
Bench {
#[arg(long)]
extract: Option<String>,
#[arg(long = "nav-select")]
nav_select: Option<String>,
#[arg(long)]
query: Option<String>,
#[arg(long)]
path: Option<String>,
#[arg(long = "baseline")]
baselines: Vec<String>,
#[arg(long)]
config: Option<String>,
#[arg(long)]
json: bool,
},
#[command(
long_about = "Apply an ordered list of mutation ops atomically: all succeed or nothing is written.\n\nOps are a JSON array of TransactionOp objects (see aql nav select/read for NodeRefs).\nWithin a single file, order ops by descending start_byte to avoid offset drift.\n\nExamples:\n aql transact --file ops.json\n echo '[{\"type\":\"replace\",\"node\":{...},\"source\":\"x\"}]' | aql transact\n aql transact --file ops.json --json"
)]
Transact {
#[arg(long)]
file: Option<String>,
#[arg(long)]
json: bool,
},
Mcp {
#[arg(long)]
project: Option<String>,
},
#[command(
long_about = "Navigate and mutate source code using tree-sitter AST nodes.\n\nExamples:\n aql nav select --file src/main.rs function_item\n aql nav read '<node-json>'\n aql nav insert --position before '<node-json>' '// comment'\n aql nav replace '<node-json>' 'new_source'\n aql nav remove '<node-json>'"
)]
Nav {
#[command(subcommand)]
action: NavAction,
},
}
#[derive(Subcommand)]
enum NavAction {
Select {
selector: String,
#[arg(long)]
file: String,
},
Read {
node: String,
},
Insert {
target: String,
source: String,
#[arg(long, default_value = "before")]
position: String,
},
Replace {
node: String,
source: String,
},
Remove {
node: String,
},
}
fn main() -> ExitCode {
let cli = Cli::command()
.name(amql_engine::meta::NAME)
.about(msg!("cli.about", "full_name" => amql_engine::meta::FULL_NAME))
.get_matches();
let cli = Cli::from_arg_matches(&cli).unwrap_or_else(|e| e.exit());
amql_log::from_verbosity(cli.verbose);
let _ = color_eyre::install();
match cli.command {
Some(cmd) => dispatch_command(cmd),
None => repl::run_repl(),
}
}
fn print_error(label: &str, msg: &dyn std::fmt::Display) {
if std::io::IsTerminal::is_terminal(&std::io::stderr()) {
eprintln!("\x1b[1;31merror\x1b[0m[{label}]: {msg}");
} else {
eprintln!("error[{label}]: {msg}");
}
}
fn dispatch_command(cmd: Command) -> ExitCode {
if let Command::Locate { source, word } = cmd {
return dispatch_locate(&source, word.as_deref());
}
if let Command::Extract { name, path } = cmd {
return dispatch_extract(&name, path.as_deref());
}
if matches!(cmd, Command::Init) {
return dispatch_init();
}
if matches!(cmd, Command::Diff) {
return dispatch_diff();
}
if let Command::Bench {
extract,
nav_select,
query,
path,
baselines,
config,
json,
} = cmd
{
return dispatch_bench(BenchArgs {
extract: extract.as_deref(),
nav_select: nav_select.as_deref(),
query: query.as_deref(),
path: path.as_deref(),
raw_baselines: &baselines,
config_path: config.as_deref(),
json,
});
}
if let Command::Transact { file, json } = cmd {
return dispatch_transact(file.as_deref(), json);
}
if let Command::Mcp { project } = cmd {
return dispatch_mcp(project.as_deref());
}
if let Command::Nav { action } = cmd {
return dispatch_nav(action);
}
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let project_root = match find_project_root(&cwd) {
Some(root) => root,
None => {
print_error(
"config",
&format!(
"no {} found in any parent directory\n hint: create {} to mark the project root",
meta::schema_file(), meta::schema_file()
),
);
return ExitCode::FAILURE;
}
};
info!("project root: {}", project_root.display());
let manifest = match load_manifest(&project_root) {
Ok(m) => {
debug!(
"manifest: {} tags, {} extractors",
m.tags.len(),
m.extractors.len()
);
m
}
Err(e) => {
print_error("manifest", &e);
return ExitCode::FAILURE;
}
};
match cmd {
Command::Query {
selector,
scope,
limit,
offset,
sort_by,
} => {
info!("query: {selector}");
let resolvers = make_resolvers();
let mut cache = CodeCache::new(&project_root);
let mut store = AnnotationStore::new(&project_root);
store.load_all_from_locator();
load_extractors(&manifest, &project_root, &mut store);
let scope_str = scope.as_deref().unwrap_or("");
if !scope_str.is_empty() && !project_root.join(scope_str).exists() {
print_error(
"query",
&format_args!("scope path does not exist: {scope_str}"),
);
return ExitCode::FAILURE;
}
let opts = build_query_opts(limit, offset, sort_by);
match unified_query(
&selector,
&Scope::from(scope_str),
&mut cache,
&mut store,
&resolvers,
opts.as_ref(),
) {
Ok(results) => {
info!("{} results", results.len());
if results.is_empty() {
if let Some(hint) = tree_sitter_kind_hint(&selector) {
eprintln!("hint: {hint}");
}
}
println!("{}", serde_json::to_string_pretty(&results).unwrap());
ExitCode::SUCCESS
}
Err(e) => {
print_error("query", &e);
ExitCode::FAILURE
}
}
}
Command::Select {
selector,
file,
scope,
limit,
offset,
sort_by,
} => {
info!("select: {selector}");
let mut store = AnnotationStore::new(&project_root);
store.load_all_from_locator();
load_extractors(&manifest, &project_root, &mut store);
let scope_str = scope.as_deref().unwrap_or("");
if !scope_str.is_empty() && !project_root.join(scope_str).exists() {
print_error(
"select",
&format_args!("scope path does not exist: {scope_str}"),
);
return ExitCode::FAILURE;
}
let opts = build_query_opts(limit, offset, sort_by);
match store.select(&selector, file.as_deref(), scope.as_deref(), opts.as_ref()) {
Ok(results) => {
info!("{} results", results.len());
println!("{}", serde_json::to_string_pretty(&results).unwrap());
ExitCode::SUCCESS
}
Err(e) => {
print_error("select", &e);
ExitCode::FAILURE
}
}
}
Command::Validate => {
let mut store = AnnotationStore::new(&project_root);
store.load_all_from_locator();
let results = validate(&store, &manifest);
info!("{} issues", results.len());
println!("{}", serde_json::to_string_pretty(&results).unwrap());
if results
.iter()
.any(|r| r.level == amql_engine::ValidationLevel::Error)
{
ExitCode::FAILURE
} else {
ExitCode::SUCCESS
}
}
Command::Repair => {
let resolvers = make_resolvers();
let mut cache = CodeCache::new(&project_root);
cache.ensure_scope(&Scope::from(""), &resolvers);
let mut store = AnnotationStore::new(&project_root);
store.load_all_from_locator();
let suggestions = suggest_repairs(&store, Some(&cache));
info!("{} suggestions", suggestions.len());
println!("{}", serde_json::to_string_pretty(&suggestions).unwrap());
ExitCode::SUCCESS
}
Command::Schema => {
println!("{}", serde_json::to_string_pretty(&manifest).unwrap());
ExitCode::SUCCESS
}
Command::Stats => {
let resolvers = make_resolvers();
let mut store = AnnotationStore::new(&project_root);
store.load_all_from_locator();
load_extractors(&manifest, &project_root, &mut store);
let stats = project_stats(
&amql_engine::ProjectRoot::from(project_root.as_path()),
&store,
&resolvers,
);
println!("{}", serde_json::to_string_pretty(&stats).unwrap());
ExitCode::SUCCESS
}
Command::Locate { .. }
| Command::Extract { .. }
| Command::Init
| Command::Diff
| Command::Bench { .. }
| Command::Transact { .. }
| Command::Mcp { .. }
| Command::Nav { .. } => {
unreachable!()
}
}
}
fn build_query_opts(
limit: Option<usize>,
offset: Option<usize>,
sort_by: Option<String>,
) -> Option<QueryOptions> {
if limit.is_none() && offset.is_none() && sort_by.is_none() {
return None;
}
Some(QueryOptions::new(limit, offset, sort_by))
}
fn dispatch_extract(name: &str, path: Option<&str>) -> ExitCode {
let registry = ExtractorRegistry::with_defaults();
let builtin = match registry.get(name) {
Some(e) => e,
None => {
print_error(
"extract",
&format_args!(
"unknown extractor: {name}\navailable: {}",
registry.names().join(", ")
),
);
return ExitCode::FAILURE;
}
};
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let target = path.map(|p| cwd.join(p)).unwrap_or_else(|| cwd.clone());
info!("extractor: {name}, target: {}", target.display());
let extensions: rustc_hash::FxHashSet<&str> = builtin
.extensions()
.iter()
.map(|e| e.trim_start_matches('.'))
.collect();
let mut source_files: Vec<PathBuf> = Vec::new();
if target.is_file() {
source_files.push(target);
} else if target.is_dir() {
collect_source_files(&target, &extensions, &mut source_files);
} else {
print_error(
"extract",
&format_args!("path not found: {}", target.display()),
);
return ExitCode::FAILURE;
}
debug!("{} source files", source_files.len());
let project_root = amql_engine::ProjectRoot::from(
find_project_root(&cwd)
.unwrap_or_else(|| cwd.clone())
.as_path(),
);
let mut all_annotations = Vec::new();
for file in &source_files {
let relative = file
.strip_prefix(project_root.as_ref())
.map(|r| amql_engine::RelativePath::from(r.to_string_lossy().as_ref()))
.unwrap_or_else(|_| amql_engine::RelativePath::from(file.to_string_lossy().as_ref()));
let source = match std::fs::read_to_string(file) {
Ok(s) => s,
Err(e) => {
print_error(
"extract",
&format_args!("failed to read {}: {e}", file.display()),
);
continue;
}
};
let annotations = builtin.extract(&source, &relative);
debug!(
"{}: {} annotations",
AsRef::<str>::as_ref(&relative),
annotations.len()
);
all_annotations.extend(annotations);
}
info!("{} annotations total", all_annotations.len());
let output = serde_json::json!({ "annotations": all_annotations });
println!("{}", serde_json::to_string_pretty(&output).unwrap());
ExitCode::SUCCESS
}
fn is_identifier(s: &str) -> bool {
!s.is_empty()
&& s.chars()
.next()
.map(|c| c.is_ascii_alphabetic() || c == '_')
.unwrap_or(false)
&& s.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_')
}
fn parse_inline_baselines(raw: &[String]) -> Result<Vec<BaselineDef>, String> {
raw.iter()
.map(|s| {
let (name, cmd) = match s.split_once('=') {
Some((left, right)) if is_identifier(left) => (left.to_string(), right.to_string()),
_ => {
let first = s.split_whitespace().next().unwrap_or(s).to_string();
(first, s.clone())
}
};
if name.trim().is_empty() || cmd.trim().is_empty() {
return Err(format!(
"invalid baseline '{s}': name and command must be non-empty"
));
}
Ok(BaselineDef::Command { name, cmd })
})
.collect()
}
struct BenchArgs<'a> {
extract: Option<&'a str>,
nav_select: Option<&'a str>,
query: Option<&'a str>,
path: Option<&'a str>,
raw_baselines: &'a [String],
config_path: Option<&'a str>,
json: bool,
}
fn dispatch_bench(args: BenchArgs<'_>) -> ExitCode {
let BenchArgs {
extract,
nav_select,
query,
path,
raw_baselines,
config_path,
json,
} = args;
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let project_root = find_project_root(&cwd).unwrap_or_else(|| cwd.clone());
let root = ProjectRoot::from(project_root.as_path());
let registry = ExtractorRegistry::with_defaults();
let resolvers = make_resolvers();
let op_count = [extract.is_some(), nav_select.is_some(), query.is_some()]
.iter()
.filter(|&&b| b)
.count();
if op_count > 1 {
print_error(
"bench",
&"only one of --extract, --nav-select, --query may be specified",
);
return ExitCode::FAILURE;
}
let aql_def: Option<AqlDef> = extract
.map(|extractor| AqlDef::Extract {
extractor: extractor.to_string(),
path: path.unwrap_or(".").to_string(),
})
.or_else(|| {
nav_select.map(|selector| AqlDef::NavSelect {
path: path.unwrap_or(".").to_string(),
selector: selector.to_string(),
})
})
.or_else(|| {
query.map(|selector| AqlDef::Query {
selector: selector.to_string(),
scope: path.unwrap_or("").to_string(),
})
});
let baselines = match parse_inline_baselines(raw_baselines) {
Ok(b) => b,
Err(e) => {
print_error("bench", &e);
return ExitCode::FAILURE;
}
};
let req = BenchRequest {
aql: aql_def,
path: path.map(|p| p.to_string()),
baselines,
config: config_path.map(|p| p.to_string()),
};
match execute_bench_request(&root, req, ®istry, &resolvers) {
Ok(response) => {
if json {
println!("{}", serde_json::to_string_pretty(&response).unwrap());
} else {
print!(
"{}",
format_bench_table(&response.project, response.source_files, &response.cases)
);
}
let any_aql_error = response.cases.iter().any(|c| c.aql.error.is_some());
if any_aql_error {
ExitCode::FAILURE
} else {
ExitCode::SUCCESS
}
}
Err(e) => {
print_error("bench", &e);
ExitCode::FAILURE
}
}
}
fn dispatch_locate(source: &str, word: Option<&str>) -> ExitCode {
let sidecar_rel = sidecar_for_colocated(&amql_engine::RelativePath::from(source));
let source_path = std::path::Path::new(source);
let sidecar_path = source_path
.parent()
.map(|p| {
p.join(
std::path::Path::new(AsRef::<str>::as_ref(&sidecar_rel))
.file_name()
.unwrap_or_default(),
)
})
.unwrap_or_else(|| PathBuf::from(AsRef::<str>::as_ref(&sidecar_rel)));
if !sidecar_path.is_file() {
print_error(
"locate",
&format_args!("no sidecar found: {}", sidecar_path.display()),
);
return ExitCode::FAILURE;
}
let text = match std::fs::read_to_string(&sidecar_path) {
Ok(t) => t,
Err(e) => {
print_error(
"locate",
&format_args!("failed to read {}: {e}", sidecar_path.display()),
);
return ExitCode::FAILURE;
}
};
let line = word
.and_then(|w| {
extract_aql_symbols(&text)
.iter()
.find(|s| s.binding.as_ref() == w)
.map(|s| s.line)
})
.unwrap_or(0);
println!("{}:{}", sidecar_path.display(), line + 1);
ExitCode::SUCCESS
}
fn dispatch_diff() -> ExitCode {
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let project_root = match find_project_root(&cwd) {
Some(root) => root,
None => {
print_error(
"config",
&format!("no {} found in any parent directory", meta::schema_file()),
);
return ExitCode::FAILURE;
}
};
let mut current_store = AnnotationStore::new(&project_root);
current_store.load_all_from_locator();
let baseline_annotations = match load_baseline_annotations(&project_root) {
Ok(anns) => anns,
Err(e) => {
print_error("diff", &e);
return ExitCode::FAILURE;
}
};
let current_annotations = current_store.get_all_annotations();
let current_owned: Vec<_> = current_annotations.into_iter().cloned().collect();
let diff = diff_annotations(&baseline_annotations, ¤t_owned);
println!("{}", serde_json::to_string_pretty(&diff).unwrap());
if diff.added.is_empty() && diff.removed.is_empty() && diff.changed.is_empty() {
eprintln!("No annotation changes since last commit.");
} else {
eprintln!(
"{} added, {} removed, {} changed",
diff.added.len(),
diff.removed.len(),
diff.changed.len()
);
}
ExitCode::SUCCESS
}
fn load_baseline_annotations(
project_root: &std::path::Path,
) -> Result<Vec<amql_engine::Annotation>, String> {
let output = std::process::Command::new("git")
.args(["ls-tree", "-r", "--name-only", "HEAD"])
.current_dir(project_root)
.output()
.map_err(|e| format!("Failed to run git ls-tree: {e}"))?;
if !output.status.success() {
return Err("git ls-tree failed — are you in a git repository with commits?".to_string());
}
let file_list = String::from_utf8_lossy(&output.stdout);
let aql_files: Vec<&str> = file_list
.lines()
.filter(|l| l.ends_with(".aqm") && !l.ends_with(".d.aqm"))
.collect();
let mut baseline_store = AnnotationStore::new(project_root);
for aql_file in aql_files {
let show_output = std::process::Command::new("git")
.args(["show", &format!("HEAD:{aql_file}")])
.current_dir(project_root)
.output()
.map_err(|e| format!("Failed to read {aql_file} from HEAD: {e}"))?;
if !show_output.status.success() {
continue;
}
let content = String::from_utf8_lossy(&show_output.stdout);
let rel_source =
amql_engine::RelativePath::from(aql_file.strip_suffix(".aqm").unwrap_or(aql_file));
let _ = baseline_store.load_xml(&rel_source, &content);
}
Ok(baseline_store
.get_all_annotations()
.into_iter()
.cloned()
.collect())
}
fn dispatch_mcp(project: Option<&str>) -> ExitCode {
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let project_root = match project {
Some(p) => PathBuf::from(p),
None => match find_project_root(&cwd) {
Some(root) => root,
None => {
print_error(
"mcp",
&format!(
"no {} found in any parent directory\n hint: use --project to specify the project root",
meta::schema_file()
),
);
return ExitCode::FAILURE;
}
},
};
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::from_default_env()
.add_directive(tracing_subscriber::filter::LevelFilter::WARN.into()),
)
.with_writer(std::io::stderr)
.init();
let rt = tokio::runtime::Runtime::new().unwrap_or_else(|e| {
print_error("mcp", &format!("failed to create async runtime: {e}"));
std::process::exit(1);
});
rt.block_on(async {
let server =
match tokio::task::block_in_place(|| amql_mcp_server::AqlServer::new(&project_root)) {
Ok(s) => s,
Err(e) => {
print_error("mcp", &e);
return ExitCode::FAILURE;
}
};
match server.serve(rmcp::transport::stdio()).await {
Ok(service) => {
if let Err(e) = service.waiting().await {
print_error("mcp", &e);
return ExitCode::FAILURE;
}
ExitCode::SUCCESS
}
Err(e) => {
print_error("mcp", &e);
ExitCode::FAILURE
}
}
})
}
fn dispatch_init() -> ExitCode {
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
match init_project(&cwd) {
Ok(schema) => {
println!("Created {}:", meta::schema_file());
println!("{schema}");
ExitCode::SUCCESS
}
Err(e) => {
print_error("init", &e);
ExitCode::FAILURE
}
}
}
fn dispatch_transact(file: Option<&str>, json_output: bool) -> ExitCode {
use std::io::Read;
let raw = match file {
Some(path) => match std::fs::read_to_string(path) {
Ok(s) => s,
Err(e) => {
print_error("transact", &format!("Failed to read {path}: {e}"));
return ExitCode::FAILURE;
}
},
None => {
let mut buf = String::new();
if let Err(e) = std::io::stdin().read_to_string(&mut buf) {
print_error("transact", &format!("Failed to read stdin: {e}"));
return ExitCode::FAILURE;
}
buf
}
};
let ops: Vec<TransactionOp> = match serde_json::from_str(&raw) {
Ok(o) => o,
Err(e) => {
print_error("transact", &format!("Invalid ops JSON: {e}"));
return ExitCode::FAILURE;
}
};
if ops.is_empty() {
print_error("transact", &"ops array must be non-empty");
return ExitCode::FAILURE;
}
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let project_root = match find_project_root(&cwd) {
Some(root) => root,
None => {
print_error(
"transact",
&format!("no {} found in any parent directory", meta::schema_file()),
);
return ExitCode::FAILURE;
}
};
let project_root = ProjectRoot::from(project_root.as_path());
match execute_transaction(&project_root, ops) {
Ok(result) => {
if json_output {
println!("{}", serde_json::to_string_pretty(&result).unwrap());
} else {
println!(
"Transaction committed: {} file(s) modified, {} op(s) applied.",
result.files_modified.len(),
result.ops_applied
);
for f in &result.files_modified {
println!(" {f}");
}
}
ExitCode::SUCCESS
}
Err(e) => {
print_error("transact", &e);
ExitCode::FAILURE
}
}
}
fn read_source_arg(arg: &str) -> Result<String, String> {
if arg == "-" {
use std::io::Read;
let mut buf = String::new();
std::io::stdin()
.read_to_string(&mut buf)
.map_err(|e| format!("Failed to read stdin: {e}"))?;
Ok(buf)
} else {
Ok(arg.to_string())
}
}
fn parse_node_ref(json: &str) -> Result<NodeRef, String> {
serde_json::from_str(json).map_err(|e| format!("Invalid node JSON: {e}"))
}
fn dispatch_nav(action: NavAction) -> ExitCode {
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let root_path = find_project_root(&cwd).unwrap_or_else(|| cwd.clone());
let project_root = ProjectRoot::from(root_path.as_path());
match action {
NavAction::Select { selector, file } => {
let rel = RelativePath::from(file.as_str());
match nav_select(&project_root, &rel, None, &selector) {
Ok(result) => {
println!("{}", serde_json::to_string_pretty(&result).unwrap());
ExitCode::SUCCESS
}
Err(e) => {
print_error("nav/select", &e);
ExitCode::FAILURE
}
}
}
NavAction::Read { node } => match parse_node_ref(&node) {
Ok(node_ref) => match read_source(&project_root, &node_ref) {
Ok(text) => {
print!("{text}");
ExitCode::SUCCESS
}
Err(e) => {
print_error("nav/read", &e);
ExitCode::FAILURE
}
},
Err(e) => {
print_error("nav/read", &e);
ExitCode::FAILURE
}
},
NavAction::Insert {
target,
source,
position,
} => {
let pos = match position.as_str() {
"before" => InsertPosition::Before,
"after" => InsertPosition::After,
"into" => InsertPosition::Into,
other => {
print_error(
"nav/insert",
&format_args!("invalid position: {other} (expected: before, after, into)"),
);
return ExitCode::FAILURE;
}
};
let target_ref = match parse_node_ref(&target) {
Ok(r) => r,
Err(e) => {
print_error("nav/insert", &e);
return ExitCode::FAILURE;
}
};
let text = match read_source_arg(&source) {
Ok(t) => t,
Err(e) => {
print_error("nav/insert", &e);
return ExitCode::FAILURE;
}
};
match nav_insert(&project_root, &target_ref, pos, &text) {
Ok(result) => {
println!("{}", serde_json::to_string_pretty(&result).unwrap());
ExitCode::SUCCESS
}
Err(e) => {
print_error("nav/insert", &e);
ExitCode::FAILURE
}
}
}
NavAction::Replace { node, source } => {
let node_ref = match parse_node_ref(&node) {
Ok(r) => r,
Err(e) => {
print_error("nav/replace", &e);
return ExitCode::FAILURE;
}
};
let text = match read_source_arg(&source) {
Ok(t) => t,
Err(e) => {
print_error("nav/replace", &e);
return ExitCode::FAILURE;
}
};
match nav_replace(&project_root, &node_ref, &text) {
Ok(result) => {
println!("{}", serde_json::to_string_pretty(&result).unwrap());
ExitCode::SUCCESS
}
Err(e) => {
print_error("nav/replace", &e);
ExitCode::FAILURE
}
}
}
NavAction::Remove { node } => match parse_node_ref(&node) {
Ok(node_ref) => match nav_remove(&project_root, &node_ref) {
Ok((result, detached)) => {
let output = serde_json::json!({
"source": result.source,
"affected_nodes": result.affected_nodes,
"detached": detached,
});
println!("{}", serde_json::to_string_pretty(&output).unwrap());
ExitCode::SUCCESS
}
Err(e) => {
print_error("nav/remove", &e);
ExitCode::FAILURE
}
},
Err(e) => {
print_error("nav/remove", &e);
ExitCode::FAILURE
}
},
}
}
fn make_resolvers() -> ResolverRegistry {
ResolverRegistry::with_defaults()
}
const SKIP_DIRS: &[&str] = &[
"node_modules",
".git",
"dist",
"build",
"target",
"coverage",
];
fn collect_source_files(
dir: &std::path::Path,
extensions: &rustc_hash::FxHashSet<&str>,
out: &mut Vec<PathBuf>,
) {
let entries = match std::fs::read_dir(dir) {
Ok(e) => e,
Err(_) => return,
};
let mut sorted: Vec<_> = entries.flatten().collect();
sorted.sort_by_key(|e| e.file_name());
for entry in sorted {
let path = entry.path();
if path.is_dir() {
let name = entry.file_name();
let name_str = name.to_string_lossy();
if !SKIP_DIRS.contains(&name_str.as_ref()) {
collect_source_files(&path, extensions, out);
}
} else if path.is_file() {
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
if extensions.contains(ext) {
let fname = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if !fname.ends_with(".d.ts")
&& !fname.ends_with(".d.mts")
&& !fname.ends_with(".d.cts")
{
out.push(path);
}
}
}
}
}
const TS_KIND_HINTS: &[(&str, &str)] = &[
("arrow_function", "function[arrow]"),
("function_declaration", "function"),
("function_expression", "function"),
("generator_function_declaration", "function[generator]"),
("class_declaration", "class"),
("abstract_class_declaration", "class[abstract]"),
("method_definition", "method"),
("interface_declaration", "interface"),
("type_alias_declaration", "type"),
("enum_declaration", "enum"),
("function_item", "function"),
("struct_item", "struct"),
("enum_item", "enum"),
("trait_item", "trait"),
("impl_item", "impl"),
("mod_item", "module"),
("lexical_declaration", "const"),
("variable_declaration", "const"),
];
fn tree_sitter_kind_hint(selector: &str) -> Option<String> {
let tag = selector.split('[').next().unwrap_or(selector).trim();
TS_KIND_HINTS
.iter()
.find(|(kind, _)| *kind == tag)
.map(|(kind, suggestion)| {
format!(
"'{kind}' is a tree-sitter node kind, not an AQL tag. Try: aql query '{suggestion}'"
)
})
}
fn load_extractors(
manifest: &amql_engine::Manifest,
project_root: &std::path::Path,
store: &mut AnnotationStore,
) {
let project_root = amql_engine::ProjectRoot::from(project_root);
let registry = ExtractorRegistry::with_defaults();
let results = run_all_extractors(manifest, &project_root, ®istry);
for result in results {
if !result.annotations.is_empty() {
store.load_extractor_output(result.annotations);
}
}
}