use aft::bash_background::BgTaskRegistry;
use aft::config::Config;
use aft::context::{AppContext, SemanticIndexEvent, SemanticIndexStatus};
use aft::log_ctx;
use aft::lsp::client::LspEvent;
use aft::parser::TreeSitterProvider;
use aft::protocol::{EchoParams, PushFrame, RawRequest, Response};
use std::collections::HashSet;
use std::io::{self, BufRead, Write};
fn main() {
if std::env::args().any(|a| a == "--version" || a == "-V") {
println!("aft {}", env!("CARGO_PKG_VERSION"));
return;
}
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
.format(|buf, record| {
use std::io::Write;
let prefix = if record.target().starts_with("aft::lsp")
|| record.target().starts_with("aft_lsp")
{
"[aft-lsp]"
} else {
"[aft]"
};
writeln!(buf, "{} {}", prefix, record.args())
})
.init();
log::info!("started, pid {}", std::process::id());
let ctx = AppContext::new(Box::new(TreeSitterProvider::new()), Config::default());
install_signal_handler(ctx.bash_background().clone(), ctx.lsp_child_registry());
{
let filter_registry_handle = ctx.shared_filter_registry();
let compress_flag = ctx.bash_compress_flag();
ctx.bash_background()
.set_compressor(move |command: &str, output: String| {
if !compress_flag.load(std::sync::atomic::Ordering::Relaxed) {
return output;
}
let registry_guard = match filter_registry_handle.read() {
Ok(g) => g,
Err(poisoned) => poisoned.into_inner(),
};
aft::compress::compress_with_registry(command, &output, ®istry_guard)
});
}
let stdout_writer = ctx.stdout_writer();
ctx.set_progress_sender(Some(std::sync::Arc::new(Box::new(
move |frame: PushFrame| {
let Ok(mut writer) = stdout_writer.lock() else {
log::error!("stdout push frame lock poisoned");
return;
};
if let Err(e) = write_push_frame(&mut *writer, &frame) {
log::error!("stdout push frame write error: {}", e);
}
},
))));
let stdin = io::stdin();
let reader = stdin.lock();
for line_result in reader.lines() {
let line = match line_result {
Ok(l) => l,
Err(e) => {
log::error!("stdin read error: {}", e);
break;
}
};
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
let response = match serde_json::from_str::<RawRequest>(trimmed) {
Ok(req) => {
drain_search_index_events(&ctx);
drain_semantic_index_events(&ctx);
drain_watcher_events(&ctx);
drain_lsp_events(&ctx);
let session_id = req.session().to_string();
let command = req.command.clone();
let session_id_for_log = req.session_id.clone();
let mut response =
log_ctx::with_session(session_id_for_log, || dispatch(req, &ctx));
attach_bg_completions(&mut response, &ctx, &session_id, &command);
response
}
Err(e) => {
log::error!("parse error: {} — input: {}", e, trimmed);
Response::error(
"_parse_error",
"parse_error",
format!("failed to parse request: {}", e),
)
}
};
if let Err(e) = write_response(&ctx, &response) {
log::error!("stdout write error: {}", e);
break;
}
}
ctx.lsp().shutdown_all();
ctx.bash_background().detach();
log::info!("stdin closed, shutting down");
}
#[cfg(unix)]
fn install_signal_handler(
bg_registry: BgTaskRegistry,
lsp_children: aft::lsp::child_registry::LspChildRegistry,
) {
let signals = signal_hook::iterator::Signals::new([
signal_hook::consts::SIGINT,
signal_hook::consts::SIGTERM,
]);
let Ok(mut signals) = signals else {
if let Err(error) = signals {
log::error!("failed to install signal handlers: {error}");
}
return;
};
std::thread::spawn(move || {
if let Some(signal) = signals.forever().next() {
bg_registry.detach();
let killed = lsp_children.kill_all();
if killed > 0 {
log::info!("signal {}: killed {} LSP child process(es)", signal, killed);
}
std::process::exit(128 + signal);
}
});
}
#[cfg(not(unix))]
fn install_signal_handler(
_bg_registry: BgTaskRegistry,
_lsp_children: aft::lsp::child_registry::LspChildRegistry,
) {
}
fn attach_bg_completions(
response: &mut Response,
ctx: &AppContext,
session_id: &str,
command: &str,
) {
if matches!(
command,
"configure" | "bash_status" | "bash_promote" | "bash_drain_completions"
) {
return;
}
let completions = ctx
.bash_background()
.drain_completions_for_session(Some(session_id));
if completions.is_empty() {
return;
}
let value = serde_json::json!(completions);
match response.data.as_object_mut() {
Some(data) => {
data.insert("bg_completions".to_string(), value);
}
None => {
response.data = serde_json::json!({ "bg_completions": value });
}
}
}
fn dispatch(req: RawRequest, ctx: &AppContext) -> Response {
match req.command.as_str() {
"ping" => Response::success(&req.id, serde_json::json!({ "command": "pong" })),
"version" => Response::success(
&req.id,
serde_json::json!({ "version": env!("CARGO_PKG_VERSION") }),
),
"echo" => handle_echo(&req),
"bash" => aft::commands::bash::handle(&req, ctx),
"bash_drain_completions" => aft::commands::bash_drain_completions::handle(&req, ctx),
"bash_status" => aft::commands::bash_status::handle(&req, ctx),
"bash_promote" => aft::commands::bash_promote::handle(&req, ctx),
"bash_kill" => aft::commands::bash_kill::handle(&req, ctx),
"outline" => aft::commands::outline::handle_outline(&req, ctx),
"zoom" => aft::commands::zoom::handle_zoom(&req, ctx),
"read" => aft::commands::read::handle_read(&req, ctx),
"undo" => aft::commands::undo::handle_undo(&req, ctx),
"edit_history" => aft::commands::edit_history::handle_edit_history(&req, ctx),
"checkpoint" => aft::commands::checkpoint::handle_checkpoint(&req, ctx),
"restore_checkpoint" => {
aft::commands::restore_checkpoint::handle_restore_checkpoint(&req, ctx)
}
"list_checkpoints" => aft::commands::list_checkpoints::handle_list_checkpoints(&req, ctx),
"write" => aft::commands::write::handle_write(&req, ctx),
"delete_file" => aft::commands::delete_file::handle_delete_file(&req, ctx),
"move_file" => aft::commands::move_file::handle_move_file(&req, ctx),
"edit_symbol" => aft::commands::edit_symbol::handle_edit_symbol(&req, ctx),
"edit_match" => aft::commands::edit_match::handle_edit_match(&req, ctx),
"batch" => aft::commands::batch::handle_batch(&req, ctx),
"transaction" => aft::commands::transaction::handle_transaction(&req, ctx),
"add_import" => aft::commands::add_import::handle_add_import(&req, ctx),
"add_member" => aft::commands::add_member::handle_add_member(&req, ctx),
"add_derive" => aft::commands::add_derive::handle_add_derive(&req, ctx),
"add_decorator" => aft::commands::add_decorator::handle_add_decorator(&req, ctx),
"add_struct_tags" => aft::commands::add_struct_tags::handle_add_struct_tags(&req, ctx),
"wrap_try_catch" => aft::commands::wrap_try_catch::handle_wrap_try_catch(&req, ctx),
"remove_import" => aft::commands::remove_import::handle_remove_import(&req, ctx),
"organize_imports" => aft::commands::organize_imports::handle_organize_imports(&req, ctx),
"configure" => aft::commands::configure::handle_configure(&req, ctx),
"glob" => aft::commands::glob::handle_glob(&req, ctx),
"grep" => aft::commands::grep::handle_grep(&req, ctx),
"semantic_search" => aft::commands::semantic_search::handle_semantic_search(&req, ctx),
"status" => aft::commands::status::handle_status(&req, ctx),
"list_filters" => aft::commands::list_filters::handle_list_filters(&req, ctx),
"trust_filter_project" => {
aft::commands::trust_filter_project::handle_trust_filter_project(&req, ctx)
}
"untrust_filter_project" => {
aft::commands::untrust_filter_project::handle_untrust_filter_project(&req, ctx)
}
"call_tree" => aft::commands::call_tree::handle_call_tree(&req, ctx),
"callers" => aft::commands::callers::handle_callers(&req, ctx),
"trace_to" => aft::commands::trace_to::handle_trace_to(&req, ctx),
"impact" => aft::commands::impact::handle_impact(&req, ctx),
"trace_data" => aft::commands::trace_data::handle_trace_data(&req, ctx),
"move_symbol" => aft::commands::move_symbol::handle_move_symbol(&req, ctx),
"extract_function" => aft::commands::extract_function::handle_extract_function(&req, ctx),
"inline_symbol" => aft::commands::inline_symbol::handle_inline_symbol(&req, ctx),
"git_conflicts" => aft::commands::conflicts::handle_git_conflicts(ctx, &req),
"ast_search" => aft::commands::ast_search::handle_ast_search(&req, ctx),
"ast_replace" => aft::commands::ast_replace::handle_ast_replace(&req, ctx),
"lsp_diagnostics" => aft::commands::lsp_diagnostics::handle_lsp_diagnostics(&req, ctx),
"lsp_inspect" => aft::commands::lsp_inspect::handle_lsp_inspect(&req, ctx),
"lsp_hover" => aft::commands::lsp_hover::handle_lsp_hover(&req, ctx),
"lsp_goto_definition" => {
aft::commands::lsp_goto_definition::handle_lsp_goto_definition(&req, ctx)
}
"lsp_find_references" => {
aft::commands::lsp_find_references::handle_lsp_find_references(&req, ctx)
}
"lsp_prepare_rename" => {
aft::commands::lsp_prepare_rename::handle_lsp_prepare_rename(&req, ctx)
}
"lsp_rename" => aft::commands::lsp_rename::handle_lsp_rename(&req, ctx),
"snapshot" => handle_snapshot(&req, ctx),
_ => {
log::warn!("unknown command: {}", req.command);
Response::error(
&req.id,
"unknown_command",
format!("unknown command: {}", req.command),
)
}
}
}
fn handle_echo(req: &RawRequest) -> Response {
match serde_json::from_value::<EchoParams>(req.params.clone()) {
Ok(params) => Response::success(&req.id, serde_json::json!({ "message": params.message })),
Err(e) => Response::error(
&req.id,
"invalid_request",
format!("echo: invalid params: {}", e),
),
}
}
fn handle_snapshot(req: &RawRequest, ctx: &AppContext) -> Response {
let file = match req.params.get("file").and_then(|v| v.as_str()) {
Some(f) => f,
None => {
return Response::error(
&req.id,
"invalid_request",
"snapshot: missing required param 'file'",
);
}
};
let path = match ctx.validate_path(&req.id, std::path::Path::new(file)) {
Ok(p) => p,
Err(resp) => return resp,
};
let path = path.as_path();
let mut backup = ctx.backup().borrow_mut();
match backup.snapshot(req.session(), path, "manual snapshot") {
Ok(id) => Response::success(&req.id, serde_json::json!({ "backup_id": id })),
Err(e) => Response::error(&req.id, e.code(), e.to_string()),
}
}
fn write_response(ctx: &AppContext, response: &Response) -> io::Result<()> {
let stdout_writer = ctx.stdout_writer();
let mut writer = stdout_writer
.lock()
.map_err(|_| io::Error::other("stdout writer lock poisoned"))?;
serde_json::to_writer(&mut *writer, response)?;
writer.write_all(b"\n")?;
writer.flush()?;
Ok(())
}
fn write_push_frame(writer: &mut impl Write, frame: &PushFrame) -> io::Result<()> {
serde_json::to_writer(&mut *writer, frame)?;
writer.write_all(b"\n")?;
writer.flush()?;
Ok(())
}
const SOURCE_EXTENSIONS: &[&str] = &["ts", "tsx", "js", "jsx", "py", "rs", "go"];
pub(crate) fn watcher_event_invalidates(kind: ¬ify::EventKind) -> bool {
use notify::event::{MetadataKind, ModifyKind};
use notify::EventKind;
match kind {
EventKind::Create(_) | EventKind::Remove(_) => true,
EventKind::Modify(ModifyKind::Metadata(meta)) => !matches!(
meta,
MetadataKind::AccessTime
| MetadataKind::Permissions
| MetadataKind::Ownership
| MetadataKind::Extended
),
EventKind::Modify(_) => true,
_ => false,
}
}
fn watcher_path_is_infra_skip(path: &std::path::Path) -> bool {
use std::path::Component;
path.components().any(|c| {
matches!(c, Component::Normal(name) if matches!(
name.to_str().unwrap_or(""),
".git" | ".opencode" | ".alfonso" | ".gsd" | "node_modules" | "target"
))
})
}
fn watcher_path_is_gitignore(path: &std::path::Path) -> bool {
path.file_name().map(|n| n == ".gitignore").unwrap_or(false)
}
fn filter_watcher_raw_paths<I>(ctx: &AppContext, raw_paths: I) -> HashSet<std::path::PathBuf>
where
I: IntoIterator<Item = std::path::PathBuf>,
{
let raw_paths: Vec<std::path::PathBuf> = raw_paths.into_iter().collect();
if raw_paths.iter().any(|path| watcher_path_is_gitignore(path)) {
log::debug!("watcher: .gitignore changed, rebuilding matcher before filter");
ctx.rebuild_gitignore();
}
raw_paths
.into_iter()
.map(|path| std::fs::canonicalize(&path).unwrap_or(path))
.filter(|path| {
if watcher_path_is_infra_skip(path) {
return false;
}
if let Some(matcher) = ctx.gitignore() {
if path.starts_with(matcher.path()) {
let is_dir = path.is_dir();
if matcher
.matched_path_or_any_parents(path, is_dir)
.is_ignore()
{
return false;
}
}
}
true
})
.collect()
}
fn drain_watcher_events(ctx: &AppContext) {
let changed: HashSet<std::path::PathBuf> = {
let rx_ref = ctx.watcher_rx().borrow();
let rx = match rx_ref.as_ref() {
Some(rx) => rx,
None => return, };
let mut raw_paths = Vec::new();
while let Ok(event_result) = rx.try_recv() {
if let Ok(event) = event_result {
if !watcher_event_invalidates(&event.kind) {
continue;
}
for path in event.paths {
raw_paths.push(path);
}
}
}
filter_watcher_raw_paths(ctx, raw_paths)
};
if changed.is_empty() {
return;
}
if let Ok(mut symbol_cache) = ctx.symbol_cache().write() {
for path in &changed {
symbol_cache.invalidate(path);
}
}
let mut graph_ref = ctx.callgraph().borrow_mut();
if let Some(graph) = graph_ref.as_mut() {
for path in &changed {
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if SOURCE_EXTENSIONS.contains(&ext) {
graph.invalidate_file(path);
}
}
}
}
let mut index_ref = ctx.search_index().borrow_mut();
if let Some(index) = index_ref.as_mut() {
for path in &changed {
if path.exists() {
index.update_file(path);
} else {
index.remove_file(path);
}
}
}
let mut semantic_index_ref = ctx.semantic_index().borrow_mut();
if let Some(index) = semantic_index_ref.as_mut() {
for path in &changed {
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if SOURCE_EXTENSIONS.contains(&ext) {
index.invalidate_file(path);
}
}
}
}
log::info!("invalidated {} files", changed.len());
}
fn drain_search_index_events(ctx: &AppContext) {
let latest = {
let rx_ref = ctx.search_index_rx().borrow();
let Some(rx) = rx_ref.as_ref() else {
return;
};
let mut latest = None;
while let Ok(pair) = rx.try_recv() {
latest = Some(pair);
}
latest
};
if let Some(index) = latest {
*ctx.search_index().borrow_mut() = Some(index);
}
}
fn drain_semantic_index_events(ctx: &AppContext) {
let events = {
let rx_ref = ctx.semantic_index_rx().borrow();
let Some(rx) = rx_ref.as_ref() else {
return;
};
let mut events = Vec::new();
while let Ok(event) = rx.try_recv() {
events.push(event);
}
events
};
if events.is_empty() {
return;
}
let mut keep_receiver = true;
for event in events {
match event {
SemanticIndexEvent::Progress {
stage,
files,
entries_done,
entries_total,
} => {
*ctx.semantic_index_status().borrow_mut() = SemanticIndexStatus::Building {
stage,
files,
entries_done,
entries_total,
};
}
SemanticIndexEvent::Ready(index) => {
*ctx.semantic_index().borrow_mut() = Some(index);
*ctx.semantic_index_status().borrow_mut() = SemanticIndexStatus::Ready;
keep_receiver = false;
}
SemanticIndexEvent::Failed(error) => {
*ctx.semantic_index().borrow_mut() = None;
*ctx.semantic_index_status().borrow_mut() = SemanticIndexStatus::Failed(error);
keep_receiver = false;
}
}
}
if !keep_receiver {
*ctx.semantic_index_rx().borrow_mut() = None;
}
}
fn drain_lsp_events(ctx: &AppContext) {
let events = {
let mut lsp = ctx.lsp();
lsp.drain_events()
};
for event in events {
match event {
LspEvent::Notification {
server_kind,
root,
method,
params,
} => {
log::debug!(
"[aft-lsp] notification {:?} {} {} {}",
server_kind,
root.display(),
method,
params.unwrap_or(serde_json::Value::Null)
);
}
LspEvent::ServerRequest {
server_kind,
root,
id,
method,
params,
} => {
log::debug!(
"[aft-lsp] request {:?} {} {:?} {} {}",
server_kind,
root.display(),
id,
method,
params.unwrap_or(serde_json::Value::Null)
);
}
LspEvent::ServerExited { server_kind, root } => {
log::info!("exited {:?} {}", server_kind, root.display());
}
}
}
}
#[cfg(test)]
mod watcher_filter_tests {
use super::{filter_watcher_raw_paths, watcher_event_invalidates};
use aft::config::Config;
use aft::context::AppContext;
use aft::parser::TreeSitterProvider;
use notify::event::{
AccessKind, AccessMode, CreateKind, DataChange, MetadataKind, ModifyKind, RemoveKind,
RenameMode,
};
use notify::EventKind;
use tempfile::TempDir;
fn make_ctx_with_root(root: &std::path::Path) -> AppContext {
AppContext::new(
Box::new(TreeSitterProvider::new()),
Config {
project_root: Some(root.to_path_buf()),
..Config::default()
},
)
}
#[test]
fn create_and_remove_invalidate() {
assert!(watcher_event_invalidates(&EventKind::Create(
CreateKind::File
)));
assert!(watcher_event_invalidates(&EventKind::Remove(
RemoveKind::File
)));
}
#[test]
fn modify_data_invalidates() {
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Data(DataChange::Content)
)));
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Data(DataChange::Any)
)));
}
#[test]
fn modify_name_rename_invalidates() {
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Name(RenameMode::To)
)));
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Name(RenameMode::From)
)));
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Name(RenameMode::Both)
)));
}
#[test]
fn modify_metadata_writetime_invalidates() {
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Metadata(MetadataKind::WriteTime)
)));
}
#[test]
fn modify_metadata_any_or_other_invalidates() {
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Metadata(MetadataKind::Any)
)));
assert!(watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Metadata(MetadataKind::Other)
)));
}
#[test]
fn modify_metadata_access_time_does_not_invalidate() {
assert!(!watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Metadata(MetadataKind::AccessTime)
)));
}
#[test]
fn modify_metadata_permissions_ownership_extended_do_not_invalidate() {
assert!(!watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Metadata(MetadataKind::Permissions)
)));
assert!(!watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Metadata(MetadataKind::Ownership)
)));
assert!(!watcher_event_invalidates(&EventKind::Modify(
ModifyKind::Metadata(MetadataKind::Extended)
)));
}
#[test]
fn access_events_do_not_invalidate() {
assert!(!watcher_event_invalidates(&EventKind::Access(
AccessKind::Open(AccessMode::Read)
)));
assert!(!watcher_event_invalidates(&EventKind::Access(
AccessKind::Read
)));
assert!(!watcher_event_invalidates(&EventKind::Access(
AccessKind::Close(AccessMode::Read)
)));
}
#[test]
fn other_event_kinds_do_not_invalidate() {
assert!(!watcher_event_invalidates(&EventKind::Other));
assert!(!watcher_event_invalidates(&EventKind::Any));
}
#[test]
fn gitignore_write_rebuilds_before_filtering_same_batch_paths() {
let tmp = TempDir::new().unwrap();
let root = tmp.path();
let gitignore = root.join(".gitignore");
let ignored = root.join("foo.txt");
let kept = root.join("bar.txt");
std::fs::write(&ignored, "ignored").unwrap();
std::fs::write(&kept, "kept").unwrap();
let ctx = make_ctx_with_root(root);
ctx.rebuild_gitignore();
assert!(ctx.gitignore().is_none());
std::fs::write(&gitignore, "foo.txt\n").unwrap();
let changed =
filter_watcher_raw_paths(&ctx, vec![gitignore.clone(), ignored.clone(), kept.clone()]);
let gitignore = std::fs::canonicalize(gitignore).unwrap();
let ignored = std::fs::canonicalize(ignored).unwrap();
let kept = std::fs::canonicalize(kept).unwrap();
assert!(changed.contains(&gitignore));
assert!(!changed.contains(&ignored));
assert!(changed.contains(&kept));
}
}