use startled::{
benchmark::{run_function_benchmark, run_stack_benchmark},
report::generate_reports,
telemetry::{init_telemetry, init_tracing},
types::{EnvVar, StackBenchmarkConfig},
utils::validate_fs_safe_name,
};
use anyhow::{anyhow, Context, Result};
use aws_sdk_cloudformation::Client as CloudFormationClient;
use aws_sdk_lambda::Client as LambdaClient;
use clap::{crate_authors, crate_description, CommandFactory, Parser, Subcommand, ValueEnum};
use clap_complete::generate;
use clap_complete::Shell as ClapShell; use std::fs;
#[derive(Debug, Copy, Clone, PartialEq, Eq, ValueEnum)]
pub enum Theme {
Light,
Dark,
}
const USAGE_EXAMPLES: &str = "\
EXAMPLES:
# Benchmark a single Lambda function with 10 concurrent invocations
startled function my-lambda-function -c 10
# Benchmark all functions in a CloudFormation stack matching \"service-a\"
startled stack my-app-stack -s \"service-a\" --output-dir ./benchmark_results
# Benchmark a function with a specific memory size and payload from a file
startled function my-lambda-function --memory 512 --payload-file ./payload.json
# Generate HTML reports from benchmark results in a directory
startled report -d ./benchmark_results -o ./reports --screenshot light \\
--title \"Performance Analysis\" --description \"Comparison of OTEL configurations\"
# Generate shell completions for bash
startled generate-completions bash";
#[derive(Parser)]
#[command(author = crate_authors!(", "), version, about = crate_description!(), long_about = None, after_help = USAGE_EXAMPLES)]
struct Args {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Function {
function_name: String,
#[arg(short, long)]
memory: i32,
#[arg(short = 'c', long, default_value_t = 1)]
concurrent: u32,
#[arg(short = 'n', long, default_value_t = 1)]
number: u32,
#[arg(short = 'd', long = "dir")]
output_dir: Option<String>,
#[arg(long, conflicts_with = "payload_file")]
payload: Option<String>,
#[arg(long = "payload-file", conflicts_with = "payload")]
payload_file: Option<String>,
#[arg(short = 'e', long = "env", value_parser = clap::value_parser!(EnvVar))]
environment: Vec<EnvVar>,
#[arg(long = "proxy")]
proxy: Option<String>,
},
Stack {
stack_name: String,
#[arg(short = 's', long)]
select: String,
#[arg(long = "select-regex")]
select_regex: Option<String>,
#[arg(long = "select-name")]
select_name: Option<String>,
#[arg(short = 'm', long)]
memory: i32,
#[arg(short = 'c', long, default_value_t = 1)]
concurrent: u32,
#[arg(short = 'n', long, default_value_t = 1)]
number: u32,
#[arg(short = 'd', long = "dir")]
output_dir: Option<String>,
#[arg(long, conflicts_with = "payload_file")]
payload: Option<String>,
#[arg(long = "payload-file", conflicts_with = "payload")]
payload_file: Option<String>,
#[arg(short = 'e', long = "env", value_parser = clap::value_parser!(EnvVar))]
environment: Vec<EnvVar>,
#[arg(long = "proxy")]
proxy: Option<String>,
#[arg(long, default_value_t = false)]
parallel: bool,
},
Report {
#[arg(short = 'd', long = "dir", required = true)]
input_dir: String,
#[arg(short = 'o', long = "output", required = true)]
output_dir: String,
#[arg(long = "title")]
title: Option<String>,
#[arg(long = "description")]
description: Option<String>,
#[arg(long = "suffix", default_value = "html")]
suffix: String,
#[arg(long, value_name = "THEME")]
screenshot: Option<Theme>,
#[arg(long = "template-dir")]
template_dir: Option<String>,
#[arg(long = "readme", value_name = "MARKDOWN_FILE")]
readme_file: Option<String>,
#[arg(long = "base-url", value_name = "URL_PATH")]
base_url: Option<String>,
#[arg(long, default_value_t = false)]
local_browsing: bool,
},
#[command(name = "generate-completions", hide = true)]
GenerateCompletions {
#[arg(value_enum)]
shell: ClapShell,
},
}
#[tokio::main]
async fn main() {
if let Err(err) = run().await {
eprintln!("\n❌ Error: {}", err);
if let Some(cause) = err.source() {
eprintln!("\nCaused by:");
let mut current = Some(cause);
let mut i = 0;
while let Some(e) = current {
eprintln!(" {}: {}", i, e);
current = e.source();
i += 1;
}
}
std::process::exit(1);
}
}
async fn run() -> Result<()> {
let args = Args::parse();
if let Commands::GenerateCompletions { shell } = args.command {
let mut cmd = Args::command();
let bin_name = cmd.get_name().to_string();
generate(shell, &mut cmd, bin_name, &mut std::io::stdout());
return Ok(());
}
let tracer_provider = match &args.command {
Commands::Function { .. } | Commands::Stack { .. } => Some(init_telemetry().await?),
Commands::Report { .. } => {
init_tracing(); None
}
Commands::GenerateCompletions { .. } => None,
};
match args.command {
Commands::Function {
function_name,
memory,
concurrent,
number,
output_dir,
payload,
payload_file,
environment,
proxy,
} => {
let config = aws_config::load_from_env().await;
let client = LambdaClient::new(&config);
let payload = if let Some(file) = payload_file {
Some(
fs::read_to_string(&file)
.context(format!("Failed to read payload file: {}", file))?,
)
} else {
payload
};
if let Some(ref p) = payload {
serde_json::from_str::<serde_json::Value>(p).context("Invalid JSON payload")?;
}
let final_output_dir = output_dir.map(|base_path| {
let mut path = std::path::PathBuf::from(base_path);
path.push("function");
path.to_string_lossy().into_owned()
});
run_function_benchmark(
&client,
&function_name,
memory,
concurrent,
number,
payload.as_deref(),
final_output_dir.as_deref(),
&environment
.iter()
.map(|e| (e.key.as_str(), e.value.as_str()))
.collect::<Vec<_>>(),
true,
proxy.as_deref(),
false,
None,
)
.await
}
Commands::Stack {
stack_name,
select,
select_regex,
select_name,
memory,
concurrent,
number,
output_dir,
payload,
payload_file,
environment,
proxy,
parallel,
} => {
let directory_group_name = if let Some(name_override) = &select_name {
validate_fs_safe_name(name_override)
.map_err(|e| anyhow!("Invalid --select-name: {}", e))?;
name_override.clone()
} else {
validate_fs_safe_name(&select)
.map_err(|e| anyhow!("Invalid --select pattern for directory name: {}. Use --select-name to specify a different directory name.", e))?;
select.clone()
};
let final_output_dir_for_benchmark_group: Option<String> =
output_dir.map(|base_path| format!("{}/{}", base_path, directory_group_name));
execute_stack_command(
stack_name,
select, select_regex, memory,
concurrent,
number,
final_output_dir_for_benchmark_group,
payload,
payload_file,
environment,
proxy,
parallel,
)
.await
}
Commands::Report {
input_dir,
output_dir,
title,
description,
suffix,
screenshot,
template_dir,
readme_file,
base_url,
local_browsing,
} => {
let screenshot_theme = screenshot.map(|theme| match theme {
Theme::Light => "light",
Theme::Dark => "dark",
});
generate_reports(
&input_dir,
&output_dir,
title.as_deref(),
description.as_deref(),
&suffix,
base_url.as_deref(),
screenshot_theme,
template_dir,
readme_file,
local_browsing,
)
.await
}
Commands::GenerateCompletions { .. } => {
unreachable!(
"clap should have handled GenerateCompletions and exited before this match arm"
);
}
}?;
if let Some(provider) = tracer_provider {
if let Err(e) = provider.force_flush() {
tracing::error!("Failed to flush spans: {}", e);
}
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn execute_stack_command(
stack_name: String,
select_pattern_arg: String, select_regex_arg: Option<String>, memory: i32,
concurrent: u32,
number: u32,
output_dir: Option<String>, payload: Option<String>,
payload_file: Option<String>,
environment: Vec<EnvVar>,
proxy: Option<String>,
parallel: bool,
) -> Result<()> {
let config = aws_config::load_from_env().await;
let lambda_client = LambdaClient::new(&config);
let cf_client = CloudFormationClient::new(&config);
let payload = if payload.is_some() {
payload } else if let Some(file) = payload_file {
Some(fs::read_to_string(&file).context(format!("Failed to read payload file: {}", file))?)
} else {
None
};
if let Some(ref p) = payload {
serde_json::from_str::<serde_json::Value>(p).context("Invalid JSON payload")?;
}
let config = StackBenchmarkConfig {
stack_name,
select_pattern: select_pattern_arg,
select_regex: select_regex_arg,
memory_size: memory,
concurrent_invocations: concurrent as usize,
number: number as usize,
output_dir, payload,
environment,
client_metrics_mode: true,
proxy_function: proxy,
parallel,
};
run_stack_benchmark(&lambda_client, &cf_client, config).await
}