use std::collections::{HashMap, HashSet};
use std::io::IsTerminal;
use std::path::{Path, PathBuf};
use std::process;
use std::sync::atomic::{AtomicBool, Ordering};
use std::time::Duration;
use clap::{Parser, Subcommand};
use piano::build::{
CargoTarget, build_instrumented, cargo_metadata, clean_stale_piano_files, find_current_package,
find_project_root, find_target, prebuild_runtime, prebuild_runtime_from_path,
};
use piano::error::{Error, io_context};
use piano::report::{
diff_runs, diff_runs_json, find_latest_run_file, find_latest_run_file_since,
find_ndjson_by_run_id, format_json, format_per_thread_tables, format_table, load_latest_run,
load_latest_runs_per_thread, load_ndjson, load_ndjson_per_thread, load_run, load_run_by_id,
load_tagged_run, load_two_latest_runs, relative_time, resolve_tag, reverse_resolve_tag,
save_tag,
};
use piano::resolve::{
ResolveResult, ResolvedTarget, SkippedFunction, TargetSpec, module_prefix, qualify,
resolve_targets,
};
#[derive(Parser)]
#[command(
name = "piano",
about = "Automated instrumentation-based profiling for Rust",
version,
after_help = "Workflow: piano profile [OPTIONS] (or: piano build, piano run, piano report)"
)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Parser)]
struct BuildOpts {
#[arg(long = "fn", value_name = "PATTERN")]
fn_patterns: Vec<String>,
#[arg(long, requires = "fn_patterns")]
exact: bool,
#[arg(long = "file", value_name = "PATH")]
file_patterns: Vec<PathBuf>,
#[arg(long = "mod", value_name = "NAME")]
mod_patterns: Vec<String>,
#[arg(long = "skip", value_name = "FUNC")]
skip_patterns: Vec<String>,
#[arg(long, value_name = "N")]
depth: Option<usize>,
#[arg(long)]
project: Option<PathBuf>,
#[arg(long)]
runtime_path: Option<PathBuf>,
#[arg(long, conflicts_with = "example")]
bin: Option<String>,
#[arg(long, conflicts_with = "bin")]
example: Option<String>,
#[arg(long)]
cpu_time: bool,
#[arg(long, value_name = "DIR")]
output_dir: Option<PathBuf>,
#[arg(long)]
list_skipped: bool,
}
#[derive(Subcommand)]
enum Commands {
Build {
#[command(flatten)]
opts: BuildOpts,
},
Run {
#[arg(long, value_name = "SECONDS", value_parser = parse_duration_secs)]
duration: Option<f64>,
#[arg(long, value_name = "SECONDS", default_value = "10", value_parser = parse_kill_timeout)]
kill_timeout: f64,
#[arg(long, value_name = "DIR")]
output_dir: Option<PathBuf>,
#[arg(last = true)]
args: Vec<String>,
},
Profile {
#[command(flatten)]
opts: BuildOpts,
#[arg(long, conflicts_with = "top")]
all: bool,
#[arg(long, value_name = "N", conflicts_with = "all")]
top: Option<usize>,
#[arg(long)]
json: bool,
#[arg(long)]
threads: bool,
#[arg(long)]
ignore_exit_code: bool,
#[arg(long, value_name = "SECONDS", value_parser = parse_duration_secs)]
duration: Option<f64>,
#[arg(long, value_name = "SECONDS", default_value = "10", value_parser = parse_kill_timeout)]
kill_timeout: f64,
#[arg(last = true)]
args: Vec<String>,
},
Report {
run: Option<PathBuf>,
#[arg(long, conflicts_with = "top")]
all: bool,
#[arg(long, value_name = "N", conflicts_with = "all")]
top: Option<usize>,
#[arg(long)]
json: bool,
#[arg(long)]
threads: bool,
#[arg(long)]
uncorrected: bool,
#[arg(long, value_name = "DIR")]
output_dir: Option<PathBuf>,
},
Diff {
a: Option<PathBuf>,
b: Option<PathBuf>,
#[arg(long, conflicts_with = "top")]
all: bool,
#[arg(long, value_name = "N", conflicts_with = "all")]
top: Option<usize>,
#[arg(long)]
json: bool,
#[arg(long, value_name = "DIR")]
output_dir: Option<PathBuf>,
},
Tag {
name: Option<String>,
#[arg(long, value_name = "DIR")]
output_dir: Option<PathBuf>,
},
}
fn parse_duration_secs(s: &str) -> Result<f64, String> {
let secs: f64 = s
.parse()
.map_err(|e: std::num::ParseFloatError| e.to_string())?;
if secs.is_nan() || secs.is_infinite() {
return Err("invalid duration".to_string());
}
if secs < 0.0 {
return Err("duration cannot be negative".to_string());
}
if secs == 0.0 {
return Err("duration cannot be zero".to_string());
}
if secs > std::time::Duration::MAX.as_secs_f64() {
return Err("duration is too large".to_string());
}
Ok(secs)
}
fn parse_kill_timeout(s: &str) -> Result<f64, String> {
let secs: f64 = s
.parse()
.map_err(|e: std::num::ParseFloatError| e.to_string())?;
if secs.is_nan() || secs.is_infinite() {
return Err("invalid timeout".to_string());
}
if secs < 0.0 {
return Err("timeout cannot be negative".to_string());
}
Ok(secs)
}
const DEFAULT_TOP_N: usize = 10;
fn resolve_display_limit(all: bool, top: Option<usize>) -> (bool, Option<usize>, bool) {
if all {
(true, None, false)
} else {
(false, Some(top.unwrap_or(DEFAULT_TOP_N)), top.is_none())
}
}
fn main() {
if let Ok(config_path) = std::env::var(piano::wrapper::CONFIG_ENV) {
std::process::exit(piano::wrapper::run_wrapper(&config_path));
}
let cli = Cli::parse();
if let Err(e) = run(cli) {
eprintln!("error: {e}");
process::exit(1);
}
}
fn run(cli: Cli) -> Result<(), Error> {
let project_root = find_project_root(&std::env::current_dir()?).ok();
match cli.command {
Commands::Build { opts } => cmd_build(opts, &project_root),
Commands::Run {
duration,
kill_timeout,
output_dir,
args,
} => cmd_run(duration, kill_timeout, output_dir, args, &project_root),
Commands::Profile {
opts,
all,
top,
json,
threads,
ignore_exit_code,
duration,
kill_timeout,
args,
} => {
let (show_all, limit, show_footer) = resolve_display_limit(all, top);
cmd_profile(
opts,
&project_root,
show_all,
limit,
show_footer,
json,
threads,
ignore_exit_code,
duration,
kill_timeout,
args,
)
}
Commands::Report {
run,
all,
top,
json,
threads,
uncorrected,
output_dir,
} => {
let (show_all, limit, show_footer) = resolve_display_limit(all, top);
cmd_report(
run,
show_all,
limit,
show_footer,
json,
threads,
uncorrected,
&project_root,
output_dir,
)
}
Commands::Diff {
a,
b,
all,
top,
json,
output_dir,
} => {
let (show_all, limit, show_footer) = resolve_display_limit(all, top);
cmd_diff(
a,
b,
show_all,
limit,
show_footer,
json,
&project_root,
output_dir,
)
}
Commands::Tag { name, output_dir } => cmd_tag(name, &project_root, output_dir),
}
}
fn unique_skip_reasons(skipped: &[SkippedFunction]) -> String {
skipped
.iter()
.map(|s| s.reason.to_string())
.collect::<std::collections::BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>()
.join(", ")
}
fn assign_name_ids(
all_qualified: &[piano::naming::QualifiedFunction],
display_names: &[String],
) -> (HashMap<String, u32>, HashMap<String, String>, u32) {
let mut next_id: u32 = 0;
let mut name_ids: HashMap<String, u32> = HashMap::new();
let mut displays: HashMap<String, String> = HashMap::new();
for (qf, display) in all_qualified.iter().zip(display_names.iter()) {
if qf.minimal == "main" {
continue;
}
name_ids.entry(qf.minimal.clone()).or_insert_with(|| {
let id = next_id;
next_id += 1;
id
});
displays
.entry(qf.minimal.clone())
.or_insert_with(|| display.clone());
}
(name_ids, displays, next_id)
}
fn build_project(
opts: BuildOpts,
project_root: &Option<PathBuf>,
) -> Result<Option<(PathBuf, PathBuf, usize)>, Error> {
let BuildOpts {
fn_patterns,
exact,
file_patterns,
mod_patterns,
skip_patterns,
depth,
project,
runtime_path,
bin,
example,
cpu_time,
output_dir,
list_skipped,
} = opts;
if depth.is_some() {
return Err(Error::BuildFailed(
"--depth requires call-graph analysis which is not yet implemented".into(),
));
}
let project = match project {
Some(p) => p,
None => project_root.clone().ok_or_else(|| {
Error::BuildFailed("could not find Cargo.toml in any parent directory".into())
})?,
};
if !project.exists() {
return Err(Error::BuildFailed(format!(
"project directory does not exist: {}",
project.display()
)));
}
let project = std::fs::canonicalize(&project).map_err(io_context("canonicalize", &project))?;
let mut specs: Vec<TargetSpec> = Vec::new();
for p in fn_patterns {
specs.push(TargetSpec::Fn(p));
}
for p in file_patterns {
specs.push(TargetSpec::File(p));
}
for m in mod_patterns {
specs.push(TargetSpec::Mod(m));
}
let metadata = cargo_metadata(&project)?;
let workspace_root = metadata.workspace_root.canonicalize().map_err(|e| {
Error::BuildFailed(format!(
"failed to canonicalize workspace root {}: {e}",
metadata.workspace_root.display()
))
})?;
let target_kind = if example.is_some() { "example" } else { "bin" };
let target_name = example.as_deref().or(bin.as_deref());
let (package_name, bin_src_path) = if target_name.is_some() || metadata.packages.len() == 1 {
let pkg_filter = if metadata.packages.len() == 1 {
None
} else {
find_current_package(&metadata, &project).map(|p| p.name.as_str())
};
find_target(&metadata, pkg_filter, target_name, target_kind)?
} else {
let pkg = find_current_package(&metadata, &project).ok_or_else(|| {
Error::BuildFailed(format!(
"could not determine which package to build in workspace at {}",
workspace_root.display()
))
})?;
find_target(&metadata, Some(&pkg.name), None, "bin")?
};
let pkg = metadata
.packages
.iter()
.find(|p| p.name == package_name)
.expect("package must exist: find_bin_target returned it");
let pkg_root = pkg
.manifest_path
.parent()
.ok_or_else(|| Error::BuildFailed("package manifest has no parent directory".into()))?;
let pkg_root = pkg_root.canonicalize().map_err(|e| {
Error::BuildFailed(format!(
"failed to canonicalize package root {}: {e}",
pkg_root.display()
))
})?;
let src_dir = if pkg_root.join("src").is_dir() {
pkg_root.join("src")
} else {
let bin_rel = bin_src_path
.canonicalize()
.unwrap_or_else(|_| bin_src_path.clone());
bin_rel.parent().unwrap_or(&pkg_root).to_path_buf()
};
let ResolveResult {
targets,
skipped,
all_functions,
} = resolve_targets(&src_dir, &specs, exact)?;
let targets: Vec<ResolvedTarget> = if skip_patterns.is_empty() {
targets
} else {
targets
.into_iter()
.filter_map(|mut t| {
t.functions.retain(|qf| {
let bare = qf.minimal.rsplit("::").next().unwrap_or(&qf.minimal);
!skip_patterns
.iter()
.any(|skip| bare == skip.as_str() || qf.minimal == *skip)
});
if t.functions.is_empty() {
None
} else {
Some(t)
}
})
.collect()
};
if list_skipped {
if skipped.is_empty() {
eprintln!("no functions skipped");
} else {
for s in &skipped {
println!("{}: {} ({})", s.path.display(), s.name, s.reason);
}
}
return Ok(None);
}
if !specs.is_empty() && !skipped.is_empty() {
eprintln!(
"warning: {} function(s) skipped ({}) -- run piano build --list-skipped to see which",
skipped.len(),
unique_skip_reasons(&skipped)
);
}
let total_fns: usize = targets.iter().map(|t| t.functions.len()).sum();
eprintln!(
"found {} function(s) across {} file(s)",
total_fns,
targets.len()
);
const INSTRUMENT_ALL_WARN_THRESHOLD: usize = 200;
if specs.is_empty() && total_fns > INSTRUMENT_ALL_WARN_THRESHOLD {
eprintln!(
"warning: instrumenting {total_fns} functions may add overhead. \
Use --fn, --file, or --mod to narrow scope"
);
}
for target in &targets {
let relative = target.file.strip_prefix(&src_dir).unwrap_or(&target.file);
eprintln!(" {}:", relative.display());
for qf in &target.functions {
eprintln!(" {}", qf.minimal);
}
}
let member_subdir = if pkg_root != workspace_root {
Some(
pkg_root
.strip_prefix(&workspace_root)
.map_err(|e| std::io::Error::other(e.to_string()))?
.to_path_buf(),
)
} else {
None
};
let src_rel = src_dir.strip_prefix(&pkg_root).unwrap_or(Path::new("src"));
let mut all_qualified: Vec<piano::naming::QualifiedFunction> = Vec::new();
for target in &targets {
let prefix = module_prefix(target.file.strip_prefix(&src_dir).unwrap_or(&target.file));
for qf in &target.functions {
all_qualified.push(piano::naming::QualifiedFunction::new(
&qualify(&prefix, &qf.minimal),
&qualify(&prefix, &qf.medium),
&qualify(&prefix, &qf.full),
));
}
}
let display_names = piano::naming::disambiguate(&all_qualified);
let (global_name_ids, global_display_names, _next_id) =
assign_name_ids(&all_qualified, &display_names);
let measured_names: HashSet<String> = targets
.iter()
.flat_map(|t| {
let prefix = module_prefix(t.file.strip_prefix(&src_dir).unwrap_or(&t.file));
t.functions
.iter()
.map(move |qf| qualify(&prefix, &qf.minimal))
})
.collect();
let target_dir = project.join("target").join("piano");
eprintln!("pre-building piano-runtime...");
let runtime = match runtime_path {
Some(ref path) => {
let abs_path = std::fs::canonicalize(path).map_err(io_context("canonicalize", path))?;
prebuild_runtime_from_path(&abs_path, &project, &target_dir, &[])?
}
None => prebuild_runtime(&project, &target_dir, &[])?,
};
clean_stale_piano_files(&src_dir)?;
piano::staging::clean_stale_staging(&target_dir);
let runs_dir = match &output_dir {
Some(dir) => dir.clone(),
None => target_dir.join("runs"),
};
std::fs::create_dir_all(&runs_dir).map_err(io_context("create directory", &runs_dir))?;
let mut targets_relative: HashMap<PathBuf, HashMap<String, u32>> = HashMap::new();
for af in &all_functions {
let relative = af.file.strip_prefix(&src_dir).unwrap_or(&af.file);
let prefix = module_prefix(relative);
let measured: HashMap<String, u32> = af
.functions
.iter()
.filter_map(|qf| {
let qualified = qualify(&prefix, &qf.minimal);
if measured_names.contains(&qualified) {
global_name_ids
.get(&qualified)
.map(|&id| (qf.minimal.clone(), id))
} else {
None
}
})
.collect();
let ws_relative = if let Some(ref sub) = member_subdir {
PathBuf::from(sub).join(src_rel).join(relative)
} else {
PathBuf::from(src_rel).join(relative)
};
targets_relative.insert(ws_relative, measured);
}
let bin_src_canonical = bin_src_path.canonicalize().map_err(|e| {
Error::BuildFailed(format!(
"failed to canonicalize binary source path {}: {e}",
bin_src_path.display()
))
})?;
let bin_entry_relative = bin_src_canonical
.strip_prefix(&workspace_root)
.map_err(|_| {
Error::BuildFailed(format!(
"binary source {} is outside workspace root {}",
bin_src_canonical.display(),
workspace_root.display()
))
})?
.to_path_buf();
let mut name_table: Vec<(u32, String)> = global_name_ids
.iter()
.map(|(qualified, &id)| {
let display = global_display_names
.get(qualified)
.cloned()
.unwrap_or_else(|| qualified.clone());
(id, display)
})
.collect();
name_table.sort_by_key(|(id, _)| *id);
let mut modified_files: std::collections::HashSet<std::path::PathBuf> =
targets_relative.keys().cloned().collect();
modified_files.insert(bin_entry_relative.clone());
let config = piano::wrapper::WrapperConfig {
runtime_rlib: runtime.rlib_path,
runtime_deps_dir: runtime.deps_dir,
entry_point: piano::wrapper::EntryPointConfig {
source_path: bin_entry_relative,
name_table,
runs_dir: runs_dir.clone(),
cpu_time,
},
targets: targets_relative,
};
let config_path = target_dir.join("config.json");
let config_json = serde_json::to_string(&config)
.map_err(|e| Error::BuildFailed(format!("failed to serialize wrapper config: {e}")))?;
std::fs::write(&config_path, config_json)
.map_err(io_context("write wrapper config", &config_path))?;
let pkg_arg = if member_subdir.is_some() {
Some(package_name.as_str())
} else {
None
};
let cargo_target = if let Some(ref ex) = example {
Some(CargoTarget::Example(ex.as_str()))
} else {
bin.as_deref().map(CargoTarget::Bin)
};
let binary = build_instrumented(
&workspace_root,
&target_dir,
pkg_arg,
cargo_target,
&config_path,
&modified_files,
)?;
Ok(Some((binary, runs_dir, total_fns)))
}
fn cmd_build(opts: BuildOpts, project_root: &Option<PathBuf>) -> Result<(), Error> {
let Some((binary, _runs_dir, _total_fns)) = build_project(opts, project_root)? else {
return Ok(());
};
let display_name = binary
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| binary.display().to_string());
eprintln!("built: {display_name}");
if !std::io::stdout().is_terminal() {
println!("{}", binary.display());
}
Ok(())
}
fn is_binary_extension(ext: &std::ffi::OsStr) -> bool {
cfg!(windows) && ext == "exe"
}
fn find_latest_binary(project_root: &Option<PathBuf>) -> Result<PathBuf, Error> {
let project = project_root.as_ref().ok_or(Error::NoBinary)?;
let release_dir = project.join("target/piano/release");
if !release_dir.is_dir() {
return Err(Error::NoBinary);
}
let mut dirs = vec![release_dir.clone()];
let examples_dir = release_dir.join("examples");
if examples_dir.is_dir() {
dirs.push(examples_dir);
}
let mut best: Option<(PathBuf, std::time::SystemTime)> = None;
for dir in &dirs {
let entries = std::fs::read_dir(dir).map_err(io_context("read directory", dir))?;
for entry in entries {
let entry = entry.map_err(io_context("read directory entry", dir))?;
let path = entry.path();
if !path.is_file() {
continue;
}
if let Some(ext) = path.extension() {
if !is_binary_extension(ext) {
continue;
}
}
let meta = entry
.metadata()
.map_err(io_context("read metadata", &path))?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if meta.permissions().mode() & 0o111 == 0 {
continue;
}
}
let mtime = meta
.modified()
.map_err(io_context("read modified time", &path))?;
if best.as_ref().is_none_or(|(_, t)| mtime > *t) {
best = Some((path, mtime));
}
}
}
best.map(|(p, _)| p).ok_or(Error::NoBinary)
}
enum StopReason {
Normal,
Duration,
Interrupted,
ForceKilled,
}
struct ChildOutcome {
status: process::ExitStatus,
stop_reason: StopReason,
}
static INTERRUPTED: AtomicBool = AtomicBool::new(false);
fn terminate_child(child: &process::Child) {
#[cfg(unix)]
unsafe {
libc::kill(child.id() as libc::pid_t, libc::SIGTERM);
}
#[cfg(windows)]
unsafe {
extern "system" {
fn OpenProcess(access: u32, inherit: i32, pid: u32) -> *mut std::ffi::c_void;
fn TerminateProcess(handle: *mut std::ffi::c_void, exit_code: u32) -> i32;
fn CloseHandle(handle: *mut std::ffi::c_void) -> i32;
}
const PROCESS_TERMINATE: u32 = 0x0001;
let handle = OpenProcess(PROCESS_TERMINATE, 0, child.id());
if !handle.is_null() {
TerminateProcess(handle, 1);
CloseHandle(handle);
}
}
}
fn wait_or_kill(
child: &mut process::Child,
grace: Duration,
binary: &Path,
) -> Result<(process::ExitStatus, bool), Error> {
if grace == Duration::ZERO {
let status = child.wait().map_err(|e| {
Error::RunFailed(format!("failed to wait for {}: {e}", binary.display()))
})?;
return Ok((status, false));
}
let start = std::time::Instant::now();
loop {
match child.try_wait() {
Ok(Some(status)) => return Ok((status, false)),
Ok(None) => {
if start.elapsed() >= grace {
let _ = child.kill();
let status = child.wait().map_err(|e| {
Error::RunFailed(format!("failed to wait for {}: {e}", binary.display()))
})?;
return Ok((status, true));
}
std::thread::sleep(Duration::from_millis(10));
}
Err(e) => {
return Err(Error::RunFailed(format!(
"failed to wait for {}: {e}",
binary.display()
)));
}
}
}
}
fn run_child(
binary: &Path,
args: &[String],
timeout: Option<Duration>,
kill_timeout: Duration,
suppress_stdout: bool,
env: &[(&str, &str)],
) -> Result<ChildOutcome, Error> {
INTERRUPTED.store(false, Ordering::SeqCst);
#[cfg(unix)]
{
extern "C" fn sigint_handler(_sig: i32) {
INTERRUPTED.store(true, Ordering::SeqCst);
}
unsafe {
let mut act: libc::sigaction = std::mem::zeroed();
act.sa_sigaction = sigint_handler as *const () as usize;
act.sa_flags = libc::SA_RESETHAND;
libc::sigaction(libc::SIGINT, &act, std::ptr::null_mut());
}
}
let mut cmd = process::Command::new(binary);
cmd.args(args);
for &(key, val) in env {
cmd.env(key, val);
}
if suppress_stdout {
cmd.stdout(process::Stdio::null());
}
let mut child = cmd
.spawn()
.map_err(|e| Error::RunFailed(format!("failed to run {}: {e}", binary.display())))?;
if let Some(dur) = timeout {
eprintln!("will stop after {}s", dur.as_secs_f64());
}
let start = std::time::Instant::now();
let poll_interval = Duration::from_millis(10);
let mut sigterm_sent_at: Option<std::time::Instant> = None;
let outcome = loop {
match child.try_wait() {
Ok(Some(status)) => {
let stop_reason = if sigterm_sent_at.is_some() {
StopReason::Duration
} else if INTERRUPTED.load(Ordering::SeqCst) {
StopReason::Interrupted
} else {
StopReason::Normal
};
break ChildOutcome {
status,
stop_reason,
};
}
Ok(None) => {}
Err(e) => {
return Err(Error::RunFailed(format!(
"failed to wait for {}: {e}",
binary.display()
)));
}
}
if let Some(dur) = timeout {
if sigterm_sent_at.is_none() && start.elapsed() >= dur {
terminate_child(&child);
sigterm_sent_at = Some(std::time::Instant::now());
}
}
if INTERRUPTED.load(Ordering::SeqCst) {
terminate_child(&child);
let (status, force_killed) = wait_or_kill(&mut child, kill_timeout, binary)?;
break ChildOutcome {
status,
stop_reason: if force_killed {
StopReason::ForceKilled
} else {
StopReason::Interrupted
},
};
}
if let Some(sent_at) = sigterm_sent_at {
if kill_timeout > Duration::ZERO && sent_at.elapsed() >= kill_timeout {
let _ = child.kill();
let status = child.wait().map_err(|e| {
Error::RunFailed(format!("failed to wait for {}: {e}", binary.display()))
})?;
break ChildOutcome {
status,
stop_reason: StopReason::ForceKilled,
};
}
}
std::thread::sleep(poll_interval);
};
#[cfg(unix)]
unsafe {
let mut act: libc::sigaction = std::mem::zeroed();
act.sa_sigaction = libc::SIG_DFL;
libc::sigaction(libc::SIGINT, &act, std::ptr::null_mut());
}
Ok(outcome)
}
fn cmd_run(
duration: Option<f64>,
kill_timeout: f64,
output_dir: Option<PathBuf>,
args: Vec<String>,
project_root: &Option<PathBuf>,
) -> Result<(), Error> {
let binary = find_latest_binary(project_root)?;
eprintln!("running: {}", binary.display());
eprintln!("--- program output ---");
let output_dir_str = output_dir
.as_ref()
.map(|d| d.to_string_lossy().into_owned());
let env: Vec<(&str, &str)> = match &output_dir_str {
Some(dir) => vec![("PIANO_RUNS_DIR", dir.as_str())],
None => vec![],
};
let timeout = duration.map(Duration::from_secs_f64);
let kill_dur = Duration::from_secs_f64(kill_timeout);
let outcome = run_child(&binary, &args, timeout, kill_dur, false, &env)?;
if matches!(outcome.stop_reason, StopReason::ForceKilled) {
eprintln!("warning: program did not respond to SIGTERM -- terminated after --kill-timeout");
}
match outcome.stop_reason {
StopReason::Duration | StopReason::ForceKilled => std::process::exit(0),
StopReason::Interrupted => std::process::exit(130),
StopReason::Normal => std::process::exit(outcome.status.code().unwrap_or(1)),
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_profile(
opts: BuildOpts,
project_root: &Option<PathBuf>,
show_all: bool,
limit: Option<usize>,
show_footer: bool,
json: bool,
threads: bool,
ignore_exit_code: bool,
duration: Option<f64>,
kill_timeout: f64,
args: Vec<String>,
) -> Result<(), Error> {
let Some((binary, runs_dir, total_fns)) = build_project(opts, project_root)? else {
return Ok(());
};
let display_name = binary
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| binary.display().to_string());
eprintln!("built: {display_name}");
eprintln!("--- program output ---");
let profile_start_ms = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_millis();
let child_env: Vec<(&str, &str)> = vec![];
let timeout = duration.map(Duration::from_secs_f64);
let kill_dur = Duration::from_secs_f64(kill_timeout);
let outcome = run_child(&binary, &args, timeout, kill_dur, json, &child_env)?;
let intentional_stop = matches!(
outcome.stop_reason,
StopReason::Duration | StopReason::Interrupted | StopReason::ForceKilled
);
if matches!(outcome.stop_reason, StopReason::ForceKilled) {
eprintln!("warning: program did not respond to SIGTERM -- terminated after --kill-timeout");
}
if !outcome.status.success() && !ignore_exit_code && !intentional_stop {
if let Some(code) = outcome.status.code() {
eprintln!(
"warning: program exited with code {code}; profiling results may be incomplete"
);
} else {
eprintln!("warning: program terminated by signal; profiling results may be incomplete");
}
}
if std::env::var_os("PIANO_RUNS_DIR").is_none() {
unsafe { std::env::set_var("PIANO_RUNS_DIR", &runs_dir) };
}
let effective_runs_dir = default_runs_dir(project_root)?;
if find_latest_run_file_since(&effective_runs_dir, profile_start_ms)?.is_none() {
if !outcome.status.success() && !ignore_exit_code && !intentional_stop {
return Ok(());
}
if total_fns == 0 {
return Err(Error::NoFunctionsInstrumented);
}
return Err(Error::NoDataWritten(effective_runs_dir));
}
eprintln!("--- profiling report ---");
let report_result = match cmd_report(
None,
show_all,
limit,
show_footer,
json,
threads,
false,
project_root,
None,
) {
Ok(()) => Ok(()),
Err(Error::NoRuns)
if !outcome.status.success() && !ignore_exit_code && !intentional_stop =>
{
Ok(())
}
Err(Error::NoRuns) if total_fns == 0 => {
Err(Error::NoFunctionsInstrumented)
}
Err(Error::NoRuns) => {
Err(Error::NoDataWritten(runs_dir))
}
Err(e) => Err(e),
};
report_result?;
if !outcome.status.success() && !ignore_exit_code && !intentional_stop {
std::process::exit(outcome.status.code().unwrap_or(1));
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
fn cmd_report(
run_path: Option<PathBuf>,
show_all: bool,
limit: Option<usize>,
show_footer: bool,
json: bool,
threads: bool,
uncorrected: bool,
project_root: &Option<PathBuf>,
output_dir: Option<PathBuf>,
) -> Result<(), Error> {
if let Some(ref dir) = output_dir {
unsafe { std::env::set_var("PIANO_RUNS_DIR", dir) };
}
let resolved_path = match &run_path {
Some(p) if p.exists() => Some(p.clone()),
Some(p) => {
let tag = p.to_string_lossy();
let tags_dir = default_tags_dir(project_root)?;
let runs_dir = default_runs_dir(project_root)?;
let run_id = resolve_tag(&tags_dir, &tag)?;
match find_ndjson_by_run_id(&runs_dir, &run_id)? {
Some(ndjson_path) => Some(ndjson_path),
None => {
let run = load_run_by_id(&runs_dir, &run_id).map_err(|e| match e {
Error::NoRuns => Error::RunNotFound {
tag: tag.to_string(),
},
other => other,
})?;
if json {
println!("{}", format_json(&run, show_all, limit));
} else {
anstream::print!("{}", format_table(&run, show_all, limit, show_footer));
}
return Ok(());
}
}
}
None => {
let dir = default_runs_dir(project_root)?;
find_latest_run_file(&dir)?
}
};
if let Some(path) = &resolved_path
&& path.extension().and_then(|e| e.to_str()) == Some("ndjson")
{
if threads {
let thread_runs = load_ndjson_per_thread(path, uncorrected)?;
match thread_runs {
Some(runs) => {
if json {
let entries: Vec<_> = runs
.iter()
.enumerate()
.map(|(i, run)| {
serde_json::json!({
"thread": i + 1,
"functions": serde_json::from_str::<serde_json::Value>(
&format_json(run, show_all, limit)
).unwrap_or_default()
})
})
.collect();
println!(
"{}",
serde_json::to_string_pretty(&entries)
.expect("JSON serialization should not fail")
);
} else {
anstream::print!(
"{}",
format_per_thread_tables(&runs, show_all, limit, show_footer)
);
}
}
None => {
eprintln!(
"warning: --threads requires per-thread data; this file predates thread tracking"
);
eprintln!("warning: showing aggregated view instead");
let (run, _completeness) = load_ndjson(path, uncorrected)?;
if json {
println!("{}", format_json(&run, show_all, limit));
} else {
anstream::print!("{}", format_table(&run, show_all, limit, show_footer));
}
}
}
} else {
let (run, _completeness) = load_ndjson(path, uncorrected)?;
if json {
println!("{}", format_json(&run, show_all, limit));
} else {
anstream::print!("{}", format_table(&run, show_all, limit, show_footer));
}
}
return Ok(());
}
if threads {
let dir = default_runs_dir(project_root)?;
let thread_runs = load_latest_runs_per_thread(&dir)?;
anstream::print!(
"{}",
format_per_thread_tables(&thread_runs, show_all, limit, show_footer)
);
return Ok(());
}
let run = match resolved_path {
Some(p) => load_run(&p)?,
None => {
let dir = default_runs_dir(project_root)?;
load_latest_run(&dir)?
}
};
if json {
println!("{}", format_json(&run, show_all, limit));
} else {
anstream::print!("{}", format_table(&run, show_all, limit, show_footer));
}
Ok(())
}
fn diff_label(arg: &Path) -> String {
if arg.exists() {
arg.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_else(|| arg.to_string_lossy().into_owned())
} else {
arg.to_string_lossy().into_owned()
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_diff(
a: Option<PathBuf>,
b: Option<PathBuf>,
show_all: bool,
limit: Option<usize>,
_show_footer: bool,
json: bool,
project_root: &Option<PathBuf>,
output_dir: Option<PathBuf>,
) -> Result<(), Error> {
if let Some(ref dir) = output_dir {
unsafe { std::env::set_var("PIANO_RUNS_DIR", dir) };
}
match (a, b) {
(Some(a), Some(b)) => {
let label_a = diff_label(&a);
let label_b = diff_label(&b);
let run_a = resolve_run_arg(&a, project_root)?;
let run_b = resolve_run_arg(&b, project_root)?;
if json {
println!("{}", diff_runs_json(&run_a, &run_b, show_all, limit));
} else {
anstream::print!(
"{}",
diff_runs(&run_a, &run_b, &label_a, &label_b, show_all, limit)
);
}
}
(None, None) => {
let runs_dir = default_runs_dir(project_root)?;
let tags_dir = default_tags_dir(project_root).ok();
let (previous, latest) = load_two_latest_runs(&runs_dir)?;
if json {
println!("{}", diff_runs_json(&previous, &latest, show_all, limit));
} else {
let label_a = resolve_diff_label(&tags_dir, &previous, &runs_dir, "previous");
let label_b = resolve_diff_label(&tags_dir, &latest, &runs_dir, "latest");
eprintln!("comparing: {label_a} vs {label_b}");
anstream::print!(
"{}",
diff_runs(&previous, &latest, &label_a, &label_b, show_all, limit)
);
}
}
_ => {
return Err(Error::DiffArgCount);
}
}
Ok(())
}
fn resolve_diff_label(
tags_dir: &Option<PathBuf>,
run: &piano::report::Run,
runs_dir: &Path,
role: &str,
) -> String {
if let (Some(tags), Some(run_id)) = (tags_dir, &run.run_id) {
if let Some(tag) = reverse_resolve_tag(tags, run_id) {
return tag;
}
}
let stem = run.timestamp_ms.to_string();
for ext in &["ndjson", "json"] {
let path = runs_dir.join(format!("{stem}.{ext}"));
if let Ok(meta) = std::fs::metadata(&path) {
if let Ok(modified) = meta.modified() {
let rel = relative_time(modified);
return format!("{role} ({rel})");
}
}
}
format!("{role} ({stem})")
}
fn cmd_tag(
name: Option<String>,
project_root: &Option<PathBuf>,
output_dir: Option<PathBuf>,
) -> Result<(), Error> {
if let Some(ref dir) = output_dir {
unsafe { std::env::set_var("PIANO_RUNS_DIR", dir) };
}
let Some(name) = name else {
let tags_dir = match default_tags_dir(project_root) {
Ok(dir) => dir,
Err(Error::NoRuns) => {
eprintln!("no tags saved");
return Ok(());
}
Err(e) => return Err(e),
};
let mut entries: Vec<String> = std::fs::read_dir(&tags_dir)
.map_err(|source| Error::TagReadError {
path: tags_dir.clone(),
source,
})?
.filter_map(|entry| {
let entry = entry.ok()?;
if entry.file_type().ok()?.is_file() {
Some(entry.file_name().to_string_lossy().into_owned())
} else {
None
}
})
.collect();
if entries.is_empty() {
eprintln!("no tags saved");
return Ok(());
}
entries.sort();
for tag in &entries {
println!("{tag}");
}
return Ok(());
};
let runs_dir = default_runs_dir(project_root)?;
let tags_dir = default_tags_dir(project_root)?;
let latest = load_latest_run(&runs_dir)?;
let run_id = latest.run_id.ok_or(Error::NoRuns)?;
save_tag(&tags_dir, &name, &run_id)?;
eprintln!("tagged '{name}'");
Ok(())
}
fn resolve_run_arg(
arg: &Path,
project_root: &Option<PathBuf>,
) -> Result<piano::report::Run, Error> {
if arg.exists() {
return load_run(arg);
}
let tag = arg.to_string_lossy();
let tags_dir = default_tags_dir(project_root)?;
let runs_dir = default_runs_dir(project_root)?;
load_tagged_run(&tags_dir, &runs_dir, &tag)
}
fn default_runs_dir(project_root: &Option<PathBuf>) -> Result<PathBuf, Error> {
if let Ok(dir) = std::env::var("PIANO_RUNS_DIR") {
return Ok(PathBuf::from(dir));
}
let project = project_root.as_ref().ok_or(Error::NoRuns)?;
let local = project.join("target/piano/runs");
if local.is_dir() {
return Ok(local);
}
Err(Error::NoRuns)
}
fn default_tags_dir(project_root: &Option<PathBuf>) -> Result<PathBuf, Error> {
let project = project_root.as_ref().ok_or(Error::NoRuns)?;
let local = project.join("target/piano/tags");
if local.is_dir() {
return Ok(local);
}
let runs_local = project.join("target/piano/runs");
if runs_local.is_dir() {
std::fs::create_dir_all(&local).map_err(io_context("create directory", &local))?;
return Ok(local);
}
Err(Error::NoRuns)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn is_binary_extension_exe() {
let ext = std::ffi::OsStr::new("exe");
assert_eq!(is_binary_extension(ext), cfg!(windows));
}
#[test]
fn is_binary_extension_rejects_non_binary() {
for name in &["d", "fingerprint", "rmeta", "rlib", "o", "so", "dylib"] {
let ext = std::ffi::OsStr::new(name);
assert!(
!is_binary_extension(ext),
"extension .{name} should not be treated as binary"
);
}
}
#[test]
fn parse_duration_zero() {
let err = parse_duration_secs("0").unwrap_err();
assert_eq!(err, "duration cannot be zero");
}
#[test]
fn parse_duration_negative() {
let err = parse_duration_secs("-1").unwrap_err();
assert_eq!(err, "duration cannot be negative");
}
#[test]
fn parse_duration_nan() {
let err = parse_duration_secs("nan").unwrap_err();
assert_eq!(err, "invalid duration");
}
#[test]
fn parse_duration_inf() {
let err = parse_duration_secs("inf").unwrap_err();
assert_eq!(err, "invalid duration");
}
#[test]
fn parse_duration_neg_inf() {
let err = parse_duration_secs("-inf").unwrap_err();
assert_eq!(err, "invalid duration");
}
#[test]
fn parse_duration_too_large() {
let err = parse_duration_secs("1e300").unwrap_err();
assert_eq!(err, "duration is too large");
}
#[test]
fn parse_duration_negative_zero() {
let err = parse_duration_secs("-0.0").unwrap_err();
assert_eq!(err, "duration cannot be zero");
}
#[test]
fn parse_duration_valid_fractional() {
let secs = parse_duration_secs("0.5").unwrap();
assert_eq!(secs, 0.5);
}
#[test]
fn parse_duration_invalid_string() {
assert!(parse_duration_secs("abc").is_err());
}
#[test]
fn name_table_excludes_main() {
let all_qualified = vec![
piano::naming::QualifiedFunction::new("main", "main", "main"),
piano::naming::QualifiedFunction::new("process", "process", "process"),
piano::naming::QualifiedFunction::new("db::query", "db::query", "db::query"),
];
let display_names = piano::naming::disambiguate(&all_qualified);
let (name_ids, display_map, _next_id) = assign_name_ids(&all_qualified, &display_names);
assert!(
!name_ids.contains_key("main"),
"main must not appear in the name table"
);
assert!(
!display_map.contains_key("main"),
"main must not appear in the display names"
);
assert!(
name_ids.contains_key("process"),
"non-main functions must be in the name table"
);
assert!(
name_ids.contains_key("db::query"),
"module-qualified functions must be in the name table"
);
}
}