use crate::cli::leindex::LeIndex;
use crate::cli::mcp::handlers::{all_tool_handlers, ToolHandler};
use crate::cli::mcp::protocol::{JsonRpcRequest, JsonRpcResponse};
use crate::cli::mcp::McpServer;
use crate::cli::registry::{ProjectRegistry, DEFAULT_MAX_PROJECTS};
use crate::phase::{run_phase_analysis, DocsMode, FormatMode, PhaseOptions, PhaseSelection};
use anyhow::Context;
use anyhow::Result as AnyhowResult;
use clap::{error::ErrorKind, Parser, Subcommand};
use serde_json::{Map, Value};
use std::fs;
use std::net::SocketAddr;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use std::sync::Arc;
use tracing::{info, warn};
const POST_INSTALL_SKIP_ENV: &str = "LEINDEX_SKIP_POST_INSTALL_HOOK";
const POST_INSTALL_STAR_MARKER: &str = ".github-starred";
const POST_INSTALL_VERSION_MARKER: &str = ".post-install-version";
const REPO_STAR_ENDPOINT: &str = "user/starred/scooter-lacroix/LeIndex";
#[derive(Parser, Debug)]
#[command(name = "leindex")]
#[command(author = "LeIndex Contributors")]
#[command(version = env!("CARGO_PKG_VERSION"))]
#[command(about = "Index, search, and analyze codebases with semantic understanding", long_about = None)]
#[command(subcommand_required = false)]
#[command(arg_required_else_help = false)]
pub struct Cli {
#[arg(global = true, long = "project", short = 'p')]
pub project_path: Option<PathBuf>,
#[arg(global = true, long = "verbose", short = 'v')]
pub verbose: bool,
#[arg(long = "stdio")]
pub stdio: bool,
#[command(subcommand)]
pub command: Option<Commands>,
}
#[derive(Subcommand, Debug)]
pub enum Commands {
#[command(visible_alias = "leindex_index")]
Index {
#[arg(value_name = "PATH")]
path: PathBuf,
#[arg(long = "force")]
force: bool,
#[arg(long = "progress")]
progress: bool,
},
#[command(visible_alias = "leindex_search")]
Search {
#[arg(value_name = "QUERY")]
query: String,
#[arg(long = "top-k", default_value = "10")]
top_k: usize,
},
#[command(visible_alias = "leindex_deep_analyze")]
Analyze {
#[arg(value_name = "QUERY")]
query: String,
#[arg(long = "tokens", default_value = "2000")]
token_budget: usize,
},
#[command(visible_alias = "leindex_context")]
Context {
#[arg(value_name = "NODE_ID")]
node_id: String,
#[arg(long = "tokens", default_value = "2000")]
token_budget: usize,
},
#[command(visible_aliases = ["leindex_phase_analysis", "phase_analysis"])]
Phase {
#[arg(long = "phase")]
phase: Option<u8>,
#[arg(long = "all", default_value_t = false)]
all: bool,
#[arg(long = "mode", default_value = "balanced")]
mode: String,
#[arg(long = "path")]
path: Option<PathBuf>,
#[arg(long = "max-files", default_value = "2000")]
max_files: usize,
#[arg(long = "max-focus-files", default_value = "20")]
max_focus_files: usize,
#[arg(long = "top-n", default_value = "10")]
top_n: usize,
#[arg(long = "max-chars", default_value = "12000")]
max_output_chars: usize,
#[arg(long = "include-docs", default_value_t = false)]
include_docs: bool,
#[arg(long = "docs-mode", default_value = "off")]
docs_mode: String,
#[arg(long = "no-incremental-refresh", default_value_t = false)]
no_incremental_refresh: bool,
},
#[command(visible_alias = "leindex_diagnostics")]
Diagnostics,
#[command(disable_help_subcommand = true)]
Tools {
#[command(subcommand)]
command: ToolCommands,
},
Serve {
#[arg(long = "host", default_value = "127.0.0.1")]
host: String,
#[arg(long = "port", default_value = "47268")]
port: u16,
},
Mcp {
#[arg(long = "stdio")]
stdio: bool,
},
Dashboard {
#[arg(long = "port", default_value = "5173")]
port: u16,
#[arg(long = "prod")]
prod: bool,
},
}
#[derive(Subcommand, Debug)]
pub enum ToolCommands {
List,
Help {
name: String,
},
Schema {
name: String,
},
Run {
name: String,
#[arg(long = "args", default_value = "{}")]
args_json: String,
#[arg(long = "set", value_name = "KEY=VALUE")]
set: Vec<String>,
},
}
impl Cli {
pub async fn run(self) -> AnyhowResult<()> {
init_logging_impl(self.verbose);
let global_project = self.project_path;
let command = if self.stdio {
Commands::Mcp { stdio: true }
} else {
self.command.unwrap_or(Commands::Mcp { stdio: false })
};
maybe_complete_post_install_actions(&command);
match command {
Commands::Index {
path,
force,
progress,
} => cmd_index_impl(path, force, progress).await,
Commands::Search { query, top_k } => {
cmd_search_impl(query, top_k, global_project).await
}
Commands::Analyze {
query,
token_budget,
} => cmd_analyze_impl(query, token_budget, global_project).await,
Commands::Context {
node_id,
token_budget,
} => cmd_context_impl(node_id, token_budget, global_project).await,
Commands::Phase {
phase,
all,
mode,
path,
max_files,
max_focus_files,
top_n,
max_output_chars,
include_docs,
docs_mode,
no_incremental_refresh,
} => {
cmd_phase_impl(
phase,
all,
mode,
path,
global_project,
max_files,
max_focus_files,
top_n,
max_output_chars,
include_docs,
docs_mode,
no_incremental_refresh,
)
.await
}
Commands::Diagnostics => cmd_diagnostics_impl(global_project).await,
Commands::Tools { command } => cmd_tools_impl(command, global_project).await,
Commands::Serve { host, port } => cmd_serve_impl(host, port).await,
Commands::Mcp { .. } => cmd_mcp_stdio_impl(global_project).await,
Commands::Dashboard { port, prod } => cmd_dashboard_impl(port, prod).await,
}
}
}
fn init_logging_impl(verbose: bool) {
let level = if verbose {
tracing::Level::DEBUG
} else {
tracing::Level::INFO
};
let subscriber = tracing_subscriber::fmt()
.with_max_level(level)
.with_writer(std::io::stderr)
.finish();
let _ = tracing::subscriber::set_global_default(subscriber);
}
fn maybe_complete_post_install_actions(command: &Commands) {
if std::env::var_os(POST_INSTALL_SKIP_ENV).is_some()
|| matches!(command, Commands::Mcp { .. })
|| !running_from_cargo_bin()
{
return;
}
let leindex_home = match resolve_leindex_home() {
Ok(path) => path,
Err(error) => {
warn!("Post-install actions skipped: {}", error);
return;
}
};
if post_install_is_current(&leindex_home) {
return;
}
if let Err(error) = complete_post_install_actions(command, &leindex_home) {
warn!("Post-install actions skipped: {}", error);
}
}
fn complete_post_install_actions(
command: &Commands,
leindex_home: &std::path::Path,
) -> AnyhowResult<()> {
fs::create_dir_all(leindex_home).context("failed to create LEINDEX_HOME")?;
cleanup_legacy_user_installations(leindex_home);
let marker_path = leindex_home.join(POST_INSTALL_STAR_MARKER);
if !marker_path.exists() {
emit_post_install_message(command, "Thank you for installing LeIndex.");
if try_star_repo() {
emit_post_install_message(command, "Starred scooter-lacroix/LeIndex on GitHub.");
fs::write(&marker_path, b"starred\n").context("failed to persist star marker")?;
} else {
emit_post_install_message(
command,
"Could not star the GitHub repo automatically. If GitHub CLI is signed in, run: gh api -X PUT user/starred/scooter-lacroix/LeIndex",
);
fs::write(&marker_path, b"prompted\n").context("failed to persist star marker")?;
}
}
warn_if_path_is_shadowed(command);
write_post_install_version_marker(leindex_home)?;
Ok(())
}
fn resolve_leindex_home() -> AnyhowResult<PathBuf> {
if let Ok(path) = std::env::var("LEINDEX_HOME") {
return Ok(PathBuf::from(path));
}
let home = dirs::home_dir().context("HOME is not available")?;
Ok(home.join(".leindex"))
}
fn post_install_is_current(leindex_home: &std::path::Path) -> bool {
let marker_path = leindex_home.join(POST_INSTALL_VERSION_MARKER);
match fs::read_to_string(marker_path) {
Ok(version) => version.trim() == env!("CARGO_PKG_VERSION"),
Err(_) => false,
}
}
fn write_post_install_version_marker(leindex_home: &std::path::Path) -> AnyhowResult<()> {
let marker_path = leindex_home.join(POST_INSTALL_VERSION_MARKER);
fs::write(marker_path, format!("{}\n", env!("CARGO_PKG_VERSION")))
.context("failed to persist post-install marker")
}
fn cleanup_legacy_user_installations(leindex_home: &std::path::Path) {
let Some(home) = dirs::home_dir() else {
return;
};
let binary_name = platform_binary_name("leindex");
let legacy_local_bin = home.join(".local").join("bin").join(&binary_name);
if legacy_local_bin.exists() {
match fs::remove_file(&legacy_local_bin) {
Ok(_) => info!("Removed legacy install at {}", legacy_local_bin.display()),
Err(error) => warn!(
"Failed to remove legacy install at {}: {}",
legacy_local_bin.display(),
error
),
}
}
let legacy_home_bin = leindex_home.join("bin").join(binary_name);
if legacy_home_bin.exists() {
match fs::remove_file(&legacy_home_bin) {
Ok(_) => info!("Removed legacy install at {}", legacy_home_bin.display()),
Err(error) => warn!(
"Failed to remove legacy install at {}: {}",
legacy_home_bin.display(),
error
),
}
}
}
fn running_from_cargo_bin() -> bool {
let Ok(current_exe) = std::env::current_exe() else {
return false;
};
let cargo_home = cargo_home_dir();
let Some(cargo_home) = cargo_home else {
return false;
};
current_exe == cargo_home.join("bin").join(platform_binary_name("leindex"))
}
fn resolve_path_binary(binary_name: &str) -> Option<PathBuf> {
let path_var = std::env::var_os("PATH")?;
for entry in std::env::split_paths(&path_var) {
let candidate = entry.join(binary_name);
if candidate.is_file() {
return Some(candidate);
}
if cfg!(windows) {
let exe_candidate = entry.join(platform_binary_name(binary_name));
if exe_candidate.is_file() {
return Some(exe_candidate);
}
}
}
None
}
fn warn_if_path_is_shadowed(command: &Commands) {
let Ok(current_exe) = std::env::current_exe() else {
return;
};
let Some(resolved) = resolve_path_binary("leindex") else {
return;
};
if resolved == current_exe {
return;
}
emit_post_install_message(
command,
&format!(
"`leindex` currently resolves to {} instead of {}. Remove the older binary or move {} earlier in PATH.",
resolved.display(),
current_exe.display(),
cargo_bin_dir()
.unwrap_or_else(|| current_exe.parent().unwrap_or_else(|| std::path::Path::new(".")).to_path_buf())
.display()
),
);
}
fn cargo_home_dir() -> Option<PathBuf> {
std::env::var("CARGO_HOME")
.map(PathBuf::from)
.ok()
.or_else(|| dirs::home_dir().map(|home| home.join(".cargo")))
}
fn cargo_bin_dir() -> Option<PathBuf> {
cargo_home_dir().map(|cargo_home| cargo_home.join("bin"))
}
fn platform_binary_name(binary_name: &str) -> String {
if cfg!(windows) {
format!("{}.exe", binary_name)
} else {
binary_name.to_string()
}
}
fn try_star_repo() -> bool {
let auth_ok = Command::new("gh")
.args(["auth", "status"])
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.map(|status| status.success())
.unwrap_or(false);
if !auth_ok {
return false;
}
Command::new("gh")
.args([
"api",
"-X",
"PUT",
"-H",
"Accept: application/vnd.github+json",
REPO_STAR_ENDPOINT,
])
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.map(|status| status.success())
.unwrap_or(false)
}
fn emit_post_install_message(command: &Commands, message: &str) {
if matches!(command, Commands::Serve { .. } | Commands::Dashboard { .. }) {
info!("{}", message);
} else {
eprintln!("{}", message);
}
}
fn get_project_path(explicit: Option<PathBuf>) -> PathBuf {
explicit.unwrap_or_else(|| std::env::current_dir().unwrap())
}
async fn cmd_index_impl(path: PathBuf, force: bool, _progress: bool) -> AnyhowResult<()> {
let canonical_path = path
.canonicalize()
.context("Failed to canonicalize project path")?;
info!("Indexing project at: {}", canonical_path.display());
let mut leindex = LeIndex::new(&canonical_path).context("Failed to create LeIndex instance")?;
let stats = tokio::task::spawn_blocking(move || leindex.index_project(force))
.await
.context("Indexing task failed")?
.context("Indexing failed")?;
println!("\n✓ Indexing complete!");
println!(" Files parsed: {}", stats.files_parsed);
println!(" Successful: {}", stats.successful_parses);
println!(" Failed: {}", stats.failed_parses);
println!(" Signatures: {}", stats.total_signatures);
println!(" PDG nodes: {}", stats.pdg_nodes);
println!(" PDG edges: {}", stats.pdg_edges);
println!(" Indexed nodes: {}", stats.indexed_nodes);
println!(" Time: {}ms", stats.indexing_time_ms);
Ok(())
}
async fn cmd_search_impl(
query: String,
top_k: usize,
project: Option<PathBuf>,
) -> AnyhowResult<()> {
let project_path = get_project_path(project);
let canonical_path = project_path
.canonicalize()
.context("Failed to canonicalize project path")?;
info!("Searching for: {}", query);
let mut leindex = LeIndex::new(&canonical_path).context("Failed to create LeIndex instance")?;
if let Err(e) = leindex.load_from_storage() {
warn!("Failed to load from storage: {}", e);
warn!("Project may not be indexed. Run 'leindex index' first.");
}
let results = leindex
.search(&query, top_k, None)
.context("Search failed")?;
if results.is_empty() {
println!("No results found for: {}", query);
return Ok(());
}
println!("\nFound {} result(s) for: '{}'\n", results.len(), query);
for (i, result) in results.iter().enumerate() {
println!("{}. {} ({})", i + 1, result.symbol_name, result.file_path);
println!(" ID: {}", result.node_id);
println!(" Overall Score: {:.2}", result.score.overall);
println!(
" Explanation: [Semantic: {:.2}, Text: {:.2}, Structural: {:.2}]",
result.score.semantic, result.score.text_match, result.score.structural
);
if let Some(context) = &result.context {
let context_preview = if context.len() > 100 {
format!("{}...", &context[..100])
} else {
context.clone()
};
println!(" Context: {}", context_preview);
}
println!();
}
Ok(())
}
async fn cmd_analyze_impl(
query: String,
token_budget: usize,
project: Option<PathBuf>,
) -> AnyhowResult<()> {
let project_path = get_project_path(project);
let canonical_path = project_path
.canonicalize()
.context("Failed to canonicalize project path")?;
info!("Analyzing: {}", query);
let mut leindex = LeIndex::new(&canonical_path).context("Failed to create LeIndex instance")?;
if let Err(e) = leindex.load_from_storage() {
warn!("Failed to load from storage: {}", e);
warn!("Project may not be indexed. Run 'leindex index' first.");
}
let result = leindex
.analyze(&query, token_budget)
.context("Analysis failed")?;
println!("\nAnalysis Results for: '{}'", query);
println!("Found {} entry point(s)", result.results.len());
println!("Tokens used: {}", result.tokens_used);
println!("Processing time: {}ms\n", result.processing_time_ms);
if let Some(context) = &result.context {
println!("Context:");
println!("{}", context);
}
Ok(())
}
async fn cmd_context_impl(
node_id: String,
token_budget: usize,
project: Option<PathBuf>,
) -> AnyhowResult<()> {
let args = merge_tool_args(
serde_json::json!({
"node_id": node_id,
"token_budget": token_budget
}),
&[],
project.as_ref(),
)?;
let value = execute_tool_handler("leindex_context", args, project).await?;
print_json_value(&value)?;
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn cmd_phase_impl(
phase: Option<u8>,
all: bool,
mode: String,
path: Option<PathBuf>,
project: Option<PathBuf>,
max_files: usize,
max_focus_files: usize,
top_n: usize,
max_output_chars: usize,
include_docs: bool,
docs_mode: String,
no_incremental_refresh: bool,
) -> AnyhowResult<()> {
if !all && phase.is_none() {
anyhow::bail!("Specify either --phase <1..5> or --all");
}
if all && phase.is_some() {
anyhow::bail!("Use either --phase or --all, not both");
}
let target_path = path
.or(project)
.unwrap_or_else(|| std::env::current_dir().unwrap());
let canonical_path = target_path
.canonicalize()
.context("Failed to canonicalize phase analysis path")?;
let (root, focus_files) = if canonical_path.is_file() {
let parent = canonical_path
.parent()
.map(PathBuf::from)
.ok_or_else(|| anyhow::anyhow!("phase analysis file path has no parent directory"))?;
(parent, vec![canonical_path.clone()])
} else {
(canonical_path, Vec::new())
};
let parsed_mode = FormatMode::parse(&mode)
.ok_or_else(|| anyhow::anyhow!("Invalid mode '{}'. Use ultra|balanced|verbose", mode))?;
let parsed_docs_mode = DocsMode::parse(&docs_mode).ok_or_else(|| {
anyhow::anyhow!(
"Invalid docs mode '{}'. Use off|markdown|text|all",
docs_mode
)
})?;
let selection = if all {
PhaseSelection::All
} else {
let p = phase.unwrap();
PhaseSelection::from_number(p)
.ok_or_else(|| anyhow::anyhow!("Invalid phase '{}'. Use 1..5", p))?
};
let options = PhaseOptions {
root,
focus_files,
mode: parsed_mode,
max_files,
max_focus_files,
top_n,
max_output_chars,
use_incremental_refresh: !no_incremental_refresh,
include_docs,
docs_mode: parsed_docs_mode,
hotspot_keywords: PhaseOptions::default().hotspot_keywords,
};
let report = tokio::task::spawn_blocking(move || run_phase_analysis(options, selection))
.await
.context("Phase task failed")??;
println!("{}", report.formatted_output);
Ok(())
}
async fn cmd_diagnostics_impl(project: Option<PathBuf>) -> AnyhowResult<()> {
let project_path = get_project_path(project);
let canonical_path = project_path
.canonicalize()
.context("Failed to canonicalize project path")?;
info!("Fetching diagnostics");
let mut leindex = LeIndex::new(&canonical_path).context("Failed to create LeIndex instance")?;
if let Err(e) = leindex.load_from_storage() {
warn!("Failed to load from storage: {}", e);
}
let diag = leindex
.get_diagnostics()
.context("Failed to get diagnostics")?;
println!("\nLeIndex Diagnostics\n");
println!("Project: {}", diag.project_id);
println!("Path: {}", diag.project_path);
println!("\nIndex Statistics:");
println!(" Files parsed: {}", diag.stats.files_parsed);
println!(" Successful: {}", diag.stats.successful_parses);
println!(" Failed: {}", diag.stats.failed_parses);
println!(" Total signatures: {}", diag.stats.total_signatures);
println!(" PDG nodes: {}", diag.stats.pdg_nodes);
println!(" PDG edges: {}", diag.stats.pdg_edges);
println!(" Indexed nodes: {}", diag.stats.indexed_nodes);
println!("\nMemory Usage:");
println!(
" Current: {:.2} MB",
diag.memory_usage_bytes as f64 / 1024.0 / 1024.0
);
println!(
" Total: {:.2} MB",
diag.total_memory_bytes as f64 / 1024.0 / 1024.0
);
println!(" Usage: {:.1}%", diag.memory_usage_percent);
if diag.memory_threshold_exceeded {
println!(" ⚠️ Memory threshold exceeded!");
}
Ok(())
}
async fn cmd_tools_impl(command: ToolCommands, project: Option<PathBuf>) -> AnyhowResult<()> {
match command {
ToolCommands::List => {
for handler in all_tool_handlers() {
println!("{}\t{}", handler.name(), handler.description());
}
Ok(())
}
ToolCommands::Help { name } => {
let handler = find_tool_handler(&name)
.ok_or_else(|| anyhow::anyhow!("Unknown tool '{}'", name))?;
print_tool_help(&handler);
Ok(())
}
ToolCommands::Schema { name } => {
let handler = find_tool_handler(&name)
.ok_or_else(|| anyhow::anyhow!("Unknown tool '{}'", name))?;
print_json_value(&handler.argument_schema())?;
Ok(())
}
ToolCommands::Run {
name,
args_json,
set,
} => {
let args = merge_tool_args(parse_tool_args_json(&args_json)?, &set, project.as_ref())?;
let value = execute_tool_handler(&name, args, project).await?;
print_json_value(&value)?;
Ok(())
}
}
}
async fn cmd_serve_impl(host: String, port: u16) -> AnyhowResult<()> {
let port = if let Ok(env_port) = std::env::var("LEINDEX_PORT") {
env_port.parse::<u16>().unwrap_or(port)
} else {
port
};
let addr: SocketAddr = format!("{}:{}", host, port)
.parse()
.context("Invalid address or port")?;
info!("Starting MCP server on {}", addr);
let current_dir = std::env::current_dir().context("Failed to get current directory")?;
let leindex = LeIndex::new(¤t_dir).context("Failed to create LeIndex instance")?;
let server = McpServer::with_address(addr, leindex).context("Failed to create MCP server")?;
println!("\nLeIndex MCP Server\n");
println!("Server starting on http://{}\n", addr);
println!("Available endpoints:");
println!(" POST /mcp - JSON-RPC 2.0 endpoint");
println!(" GET /mcp/tools/list - List available tools");
println!(" GET /health - Health check");
println!("\nConfiguration:");
println!(" Port: {} (override with LEINDEX_PORT env var)", port);
println!("\nPress Ctrl+C to stop the server\n");
server.run().await.context("Server error")?;
Ok(())
}
async fn cmd_mcp_stdio_impl(project: Option<PathBuf>) -> AnyhowResult<()> {
use crate::cli::mcp::protocol::{JsonRpcError, JsonRpcMessage, JsonRpcResponse};
use std::io::{self, BufRead, Read, Write};
let project_path = get_project_path(project);
let canonical_path = project_path
.canonicalize()
.context("Failed to canonicalize project path")?;
info!(
"Starting LeIndex MCP stdio server for project: {}",
canonical_path.display()
);
let mut leindex = LeIndex::new(&canonical_path).context("Failed to create LeIndex instance")?;
let _ = leindex.load_from_storage();
let registry = Arc::new(ProjectRegistry::with_initial_project(
DEFAULT_MAX_PROJECTS,
leindex,
));
let _ = crate::cli::mcp::server::SERVER_STATE.set(registry.clone());
let _ = crate::cli::mcp::server::HANDLERS.set(all_tool_handlers());
eprintln!("[INFO] LeIndex MCP stdio server starting");
eprintln!("[INFO] Project: {}", canonical_path.display());
eprintln!("[INFO] Reading JSON-RPC from stdin, writing to stdout");
eprintln!("[INFO] Press Ctrl+C to stop\n");
let stdin = io::stdin();
let mut stdout = io::stdout().lock();
let mut reader = io::BufReader::new(stdin.lock());
let mut use_content_length = false;
loop {
let mut line = String::new();
let bytes = match reader.read_line(&mut line) {
Ok(b) => b,
Err(e) => {
eprintln!("[ERROR] Failed to read stdin: {}", e);
continue;
}
};
if bytes == 0 {
break;
}
let line_trim = line.trim_end();
if line_trim.is_empty() {
continue;
}
let (json_payload, framed) = if line_trim
.to_ascii_lowercase()
.starts_with("content-length:")
{
let len_str = line_trim.split(':').nth(1).unwrap_or("").trim();
let length: usize = match len_str.parse() {
Ok(v) => v,
Err(e) => {
eprintln!("[ERROR] Invalid Content-Length header: {}", e);
continue;
}
};
loop {
let mut header = String::new();
if reader.read_line(&mut header).unwrap_or(0) == 0 {
break;
}
if header.trim().is_empty() {
break;
}
}
let mut buf = vec![0u8; length];
if let Err(e) = reader.read_exact(&mut buf) {
eprintln!("[ERROR] Failed to read JSON payload: {}", e);
break;
}
(String::from_utf8_lossy(&buf).to_string(), true)
} else {
(line_trim.to_string(), false)
};
use_content_length = use_content_length || framed;
let message = match JsonRpcMessage::from_json(&json_payload) {
Ok(m) => m,
Err(e) => {
let error_response = JsonRpcResponse::error(serde_json::Value::Null, e);
let response = serde_json::to_string(&error_response).unwrap_or_default();
if use_content_length {
let _ = writeln!(
stdout,
"Content-Length: {}\r\n\r\n{}",
response.len(),
response
);
} else if writeln!(stdout, "{}", response).is_err() {
break;
}
let _ = stdout.flush();
continue;
}
};
match message {
JsonRpcMessage::Notification(notification) => {
eprintln!(
"[INFO] Received notification: {} (type: {})",
notification.method,
notification.notification_type()
);
continue;
}
JsonRpcMessage::Request(request) => {
let request_id = request.id.clone().unwrap_or(serde_json::Value::Null);
let response = match handle_mcp_request(request, project_path.clone()).await {
Ok(r) => r,
Err(e) => JsonRpcResponse::error(
request_id,
JsonRpcError::internal_error(e.to_string()),
),
};
let response_json = match serde_json::to_string(&response) {
Ok(j) => j,
Err(e) => {
format!("{{\"jsonrpc\":\"2.0\",\"id\":null,\"error\":{{\"code\":-32700,\"message\":\"Failed to serialize response: {}\"}}}}", e)
}
};
if use_content_length {
if writeln!(
stdout,
"Content-Length: {}\r\n\r\n{}",
response_json.len(),
response_json
)
.is_err()
{
eprintln!("[ERROR] Failed to write to stdout");
break;
}
} else if writeln!(stdout, "{}", response_json).is_err() {
eprintln!("[ERROR] Failed to write to stdout");
break;
}
let _ = stdout.flush();
}
}
}
Ok(())
}
fn parse_tool_args_json(args_json: &str) -> AnyhowResult<Value> {
let value: Value =
serde_json::from_str(args_json).context("Tool arguments must be valid JSON")?;
if !value.is_object() {
anyhow::bail!("Tool arguments must be a JSON object");
}
Ok(value)
}
fn merge_tool_args(
args: Value,
set_args: &[String],
project: Option<&PathBuf>,
) -> AnyhowResult<Value> {
let mut object = match args {
Value::Object(map) => map,
_ => Map::new(),
};
for entry in set_args {
let (key, raw_value) = entry
.split_once('=')
.ok_or_else(|| anyhow::anyhow!("Invalid --set '{}'. Use KEY=VALUE", entry))?;
let value = serde_json::from_str(raw_value)
.unwrap_or_else(|_| Value::String(raw_value.to_string()));
object.insert(key.to_string(), value);
}
if let Some(project) = project {
if !object.contains_key("project_path") {
let canonical = project.canonicalize().unwrap_or_else(|_| project.clone());
object.insert(
"project_path".to_string(),
Value::String(canonical.display().to_string()),
);
}
}
Ok(Value::Object(object))
}
fn print_json_value(value: &Value) -> AnyhowResult<()> {
println!(
"{}",
serde_json::to_string_pretty(value).context("Failed to format JSON output")?
);
Ok(())
}
fn print_tool_help(handler: &ToolHandler) {
let schema = handler.argument_schema();
let normalized = normalize_tool_name(handler.name());
let short_name = normalized
.strip_prefix("leindex_")
.unwrap_or(normalized.as_str())
.to_string();
let kebab_short = short_name.replace('_', "-");
let kebab_full = normalized.replace('_', "-");
println!("{}", handler.name());
println!("{}", handler.description());
println!();
println!("Aliases:");
println!(" {}", handler.name());
if short_name != handler.name() {
println!(" {}", short_name);
}
if kebab_short != short_name {
println!(" {}", kebab_short);
}
if kebab_full != normalized && kebab_full != kebab_short {
println!(" {}", kebab_full);
}
println!();
println!("Usage:");
println!(" leindex tools help {}", handler.name());
println!(" leindex tools schema {}", handler.name());
println!(
" leindex tools run {} --args '<json-object>'",
handler.name()
);
println!(
" leindex tools run {} --set key=value --set other=true",
handler.name()
);
if let Some(properties) = schema.get("properties").and_then(|v| v.as_object()) {
println!();
println!("Arguments:");
let required = schema
.get("required")
.and_then(|v| v.as_array())
.map(|items| {
items
.iter()
.filter_map(|item| item.as_str())
.collect::<std::collections::HashSet<_>>()
})
.unwrap_or_default();
for (name, property) in properties {
let required_marker = if required.contains(name.as_str()) {
"required"
} else {
"optional"
};
let property_type = property
.get("type")
.and_then(|v| v.as_str())
.or_else(|| {
property
.get("oneOf")
.and_then(|v| v.as_array())
.map(|_| "multiple")
})
.unwrap_or("value");
let description = property
.get("description")
.and_then(|v| v.as_str())
.unwrap_or("");
let default = property.get("default");
println!(" {} ({}, {})", name, property_type, required_marker);
if !description.is_empty() {
println!(" {}", description);
}
if let Some(default) = default {
println!(" default: {}", default);
}
}
}
println!();
println!("Schema:");
println!(
"{}",
serde_json::to_string_pretty(&schema).unwrap_or_else(|_| "{}".to_string())
);
}
fn normalize_tool_name(name: &str) -> String {
name.trim().to_ascii_lowercase().replace('-', "_")
}
fn find_tool_handler(name: &str) -> Option<ToolHandler> {
let normalized = normalize_tool_name(name);
all_tool_handlers().into_iter().find(|handler| {
let handler_name = normalize_tool_name(handler.name());
handler_name == normalized
|| handler_name
.strip_prefix("leindex_")
.map(|short| short == normalized)
.unwrap_or(false)
})
}
async fn execute_tool_handler(
name: &str,
args: Value,
project: Option<PathBuf>,
) -> AnyhowResult<Value> {
let handler =
find_tool_handler(name).ok_or_else(|| anyhow::anyhow!("Unknown tool '{}'", name))?;
let registry = build_tool_registry(project)?;
handler
.execute(®istry, args)
.await
.map_err(|error| anyhow::anyhow!("{}", error))
}
fn build_tool_registry(project: Option<PathBuf>) -> AnyhowResult<Arc<ProjectRegistry>> {
let initial = get_project_path(project);
let canonical = initial.canonicalize().with_context(|| {
format!(
"Failed to canonicalize project path '{}'",
initial.display()
)
})?;
let project_root = if canonical.is_file() {
canonical
.parent()
.map(PathBuf::from)
.ok_or_else(|| anyhow::anyhow!("File path '{}' has no parent", canonical.display()))?
} else {
canonical
};
let mut leindex =
LeIndex::new(&project_root).context("Failed to create LeIndex instance for tool run")?;
let _ = leindex.load_from_storage();
Ok(Arc::new(ProjectRegistry::with_initial_project(
DEFAULT_MAX_PROJECTS,
leindex,
)))
}
async fn cmd_dashboard_impl(port: u16, prod: bool) -> AnyhowResult<()> {
use std::process::Command;
let current_dir = std::env::current_dir().context("Failed to get current directory")?;
let dashboard_path = {
let mut candidates = Vec::new();
candidates.push(current_dir.join("dashboard"));
let mut parent = current_dir.as_path();
for _ in 0..5 {
if let Some(next) = parent.parent() {
candidates.push(next.join("dashboard"));
parent = next;
} else {
break;
}
}
if let Ok(explicit) = std::env::var("LEINDEX_DASHBOARD_DIR") {
candidates.push(PathBuf::from(explicit));
}
if let Ok(home) = std::env::var("HOME") {
candidates.push(PathBuf::from(home).join(".leindex").join("dashboard"));
}
candidates
.into_iter()
.find(|path| path.exists() && path.is_dir())
.ok_or_else(|| {
anyhow::anyhow!(
"Dashboard directory not found. Checked current repo paths, LEINDEX_DASHBOARD_DIR, and ~/.leindex/dashboard."
)
})?
};
let bun_exists = Command::new("bun")
.arg("--version")
.output()
.map(|o| o.status.success())
.unwrap_or(false);
if !bun_exists {
anyhow::bail!(
"Bun is required to run the dashboard. Please install it first:\n curl -fsSL https://bun.sh/install | bash"
);
}
println!("\nLeIndex Dashboard\n");
println!("Starting dashboard server...\n");
if prod {
println!("Building dashboard for production...");
let build_status = Command::new("bun")
.current_dir(&dashboard_path)
.arg("run")
.arg("build")
.status()
.context("Failed to build dashboard")?;
if !build_status.success() {
anyhow::bail!("Dashboard build failed");
}
println!("\nDashboard built successfully!");
println!("Built files: {}/dist", dashboard_path.display());
println!("\nTo serve the production build, use:");
println!(" cd {} && bun run start", dashboard_path.display());
} else {
println!("Dashboard will be available at: http://localhost:{}", port);
println!("Press Ctrl+C to stop the server\n");
let status = Command::new("bun")
.current_dir(&dashboard_path)
.arg("run")
.arg("dev")
.status()
.context("Failed to start dashboard")?;
if !status.success() {
anyhow::bail!("Dashboard server exited with error");
}
}
Ok(())
}
async fn handle_mcp_request(
request: JsonRpcRequest,
_project_path: PathBuf,
) -> anyhow::Result<JsonRpcResponse> {
use crate::cli::mcp::server::{handle_tool_call, list_tools_json, HANDLERS, SERVER_STATE};
let method_name = request.method.clone();
let id = request.id.clone().unwrap_or(serde_json::Value::Null);
let state = SERVER_STATE
.get()
.ok_or_else(|| anyhow::anyhow!("Server state not initialized"))?;
let handlers = HANDLERS
.get()
.ok_or_else(|| anyhow::anyhow!("Handlers not initialized"))?;
match method_name.as_str() {
"initialize" => {
Ok(JsonRpcResponse::success(
id,
serde_json::json!({
"protocolVersion": "2024-11-05",
"capabilities": {
"tools": {
"listChanged": true
},
"prompts": {
"listChanged": true
},
"resources": {
"listChanged": true,
"subscribe": false
},
"logging": {},
"progress": true
},
"serverInfo": {
"name": "leindex",
"version": env!("CARGO_PKG_VERSION"),
"description": "LeIndex MCP Server - Semantic code indexing and analysis with PDG-based tools. Provides 18+ specialized tools for code comprehension: semantic search, symbol lookup, impact analysis, structural code queries, and intelligent editing. Uses Program Dependence Graphs for superior code understanding compared to traditional text-based tools."
}
}),
))
}
"notifications/initialized" => {
Ok(JsonRpcResponse::success(id, serde_json::json!({})))
}
"ping" => {
Ok(JsonRpcResponse::success(id, serde_json::json!({})))
}
"tools/call" => {
let result = handle_tool_call(state, handlers, &request).await;
Ok(JsonRpcResponse::from_result(id, result))
}
"tools/list" => {
Ok(JsonRpcResponse::success(id, list_tools_json(handlers)))
}
_ => Ok(JsonRpcResponse::error(
id,
crate::cli::mcp::protocol::JsonRpcError::method_not_found(method_name),
)),
}
}
pub async fn main() -> AnyhowResult<()> {
match Cli::try_parse() {
Ok(cli) => cli.run().await,
Err(err) => {
if matches!(
err.kind(),
ErrorKind::DisplayHelp | ErrorKind::DisplayVersion
) {
maybe_complete_post_install_actions(&Commands::Diagnostics);
}
err.exit()
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_cli_parsing() {
let cli = Cli::try_parse_from(["leindex", "index", "/path/to/project"]).unwrap();
assert!(matches!(cli.command, Some(Commands::Index { .. })));
}
#[test]
fn test_mcp_command_parsing() {
let cli = Cli::try_parse_from(["leindex", "mcp"]).unwrap();
assert!(matches!(cli.command, Some(Commands::Mcp { .. })));
}
#[test]
fn test_stdio_flag_parsing() {
let cli = Cli::try_parse_from(["leindex", "--stdio"]).unwrap();
assert!(cli.stdio);
}
#[test]
fn test_search_command() {
let cli = Cli::try_parse_from(["leindex", "search", "test query"]).unwrap();
match cli.command {
Some(Commands::Search { query, top_k, .. }) => {
assert_eq!(query, "test query");
assert_eq!(top_k, 10);
}
_ => panic!("Expected Search command"),
}
}
#[test]
fn test_phase_command_parsing() {
let cli =
Cli::try_parse_from(["leindex", "phase", "--phase", "2", "--mode", "ultra"]).unwrap();
match cli.command {
Some(Commands::Phase {
phase, all, mode, ..
}) => {
assert_eq!(phase, Some(2));
assert!(!all);
assert_eq!(mode, "ultra");
}
_ => panic!("Expected Phase command"),
}
}
#[test]
fn test_dashboard_command_parsing() {
let cli = Cli::try_parse_from(["leindex", "dashboard"]).unwrap();
match cli.command {
Some(Commands::Dashboard { port, prod }) => {
assert_eq!(port, 5173);
assert!(!prod);
}
_ => panic!("Expected Dashboard command"),
}
}
#[test]
fn test_dashboard_command_with_port() {
let cli = Cli::try_parse_from(["leindex", "dashboard", "--port", "3000"]).unwrap();
match cli.command {
Some(Commands::Dashboard { port, prod }) => {
assert_eq!(port, 3000);
assert!(!prod);
}
_ => panic!("Expected Dashboard command"),
}
}
#[test]
fn test_dashboard_command_prod() {
let cli = Cli::try_parse_from(["leindex", "dashboard", "--prod"]).unwrap();
match cli.command {
Some(Commands::Dashboard { port, prod }) => {
assert_eq!(port, 5173);
assert!(prod);
}
_ => panic!("Expected Dashboard command"),
}
}
#[test]
fn test_tools_help_command_parsing() {
let cli = Cli::try_parse_from(["leindex", "tools", "help", "project_map"]).unwrap();
match cli.command {
Some(Commands::Tools {
command: ToolCommands::Help { name },
}) => assert_eq!(name, "project_map"),
_ => panic!("Expected tools help command"),
}
}
#[test]
fn test_tools_run_command_parsing() {
let cli = Cli::try_parse_from([
"leindex",
"tools",
"run",
"project_map",
"--args",
"{\"depth\":1}",
"--set",
"include_symbols=true",
])
.unwrap();
match cli.command {
Some(Commands::Tools {
command:
ToolCommands::Run {
name,
args_json,
set,
},
}) => {
assert_eq!(name, "project_map");
assert_eq!(args_json, "{\"depth\":1}");
assert_eq!(set, vec!["include_symbols=true"]);
}
_ => panic!("Expected tools run command"),
}
}
#[test]
fn test_find_tool_handler_accepts_short_and_full_names() {
assert!(find_tool_handler("leindex_project_map").is_some());
assert!(find_tool_handler("project_map").is_some());
assert!(find_tool_handler("project-map").is_some());
}
}