use crate::crud_flow::{CrudFlowConfig, CrudFlowDetector};
use crate::data_driven::{DataDistribution, DataDrivenConfig, DataDrivenGenerator, DataMapping};
use crate::dynamic_params::{DynamicParamProcessor, DynamicPlaceholder};
use crate::error::{BenchError, Result};
use crate::executor::K6Executor;
use crate::invalid_data::{InvalidDataConfig, InvalidDataGenerator};
use crate::k6_gen::{K6Config, K6ScriptGenerator};
use crate::mock_integration::{
MockIntegrationConfig, MockIntegrationGenerator, MockServerDetector,
};
use crate::owasp_api::{OwaspApiConfig, OwaspApiGenerator, OwaspCategory, ReportFormat};
use crate::parallel_executor::{AggregatedResults, ParallelExecutor};
use crate::parallel_requests::{ParallelConfig, ParallelRequestGenerator};
use crate::param_overrides::ParameterOverrides;
use crate::reporter::TerminalReporter;
use crate::request_gen::RequestGenerator;
use crate::scenarios::LoadScenario;
use crate::security_payloads::{
SecurityCategory, SecurityPayload, SecurityPayloads, SecurityTestConfig, SecurityTestGenerator,
};
use crate::spec_dependencies::{
topological_sort, DependencyDetector, ExtractedValues, SpecDependencyConfig,
};
use crate::spec_parser::SpecParser;
use crate::target_parser::parse_targets_file;
use crate::wafbench::WafBenchLoader;
use mockforge_core::openapi::multi_spec::{
load_specs_from_directory, load_specs_from_files, merge_specs, ConflictStrategy,
};
use mockforge_core::openapi::spec::OpenApiSpec;
use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
use std::str::FromStr;
pub fn parse_header_string(input: &str) -> Result<HashMap<String, String>> {
let mut headers = HashMap::new();
for pair in input.split(',') {
let parts: Vec<&str> = pair.splitn(2, ':').collect();
if parts.len() != 2 {
return Err(BenchError::Other(format!(
"Invalid header format: '{}'. Expected 'Key:Value'",
pair
)));
}
headers.insert(parts[0].trim().to_string(), parts[1].trim().to_string());
}
Ok(headers)
}
pub struct BenchCommand {
pub spec: Vec<PathBuf>,
pub spec_dir: Option<PathBuf>,
pub merge_conflicts: String,
pub spec_mode: String,
pub dependency_config: Option<PathBuf>,
pub target: String,
pub base_path: Option<String>,
pub duration: String,
pub vus: u32,
pub scenario: String,
pub operations: Option<String>,
pub exclude_operations: Option<String>,
pub auth: Option<String>,
pub headers: Option<String>,
pub output: PathBuf,
pub generate_only: bool,
pub script_output: Option<PathBuf>,
pub threshold_percentile: String,
pub threshold_ms: u64,
pub max_error_rate: f64,
pub verbose: bool,
pub skip_tls_verify: bool,
pub targets_file: Option<PathBuf>,
pub max_concurrency: Option<u32>,
pub results_format: String,
pub params_file: Option<PathBuf>,
pub crud_flow: bool,
pub flow_config: Option<PathBuf>,
pub extract_fields: Option<String>,
pub parallel_create: Option<u32>,
pub data_file: Option<PathBuf>,
pub data_distribution: String,
pub data_mappings: Option<String>,
pub per_uri_control: bool,
pub error_rate: Option<f64>,
pub error_types: Option<String>,
pub security_test: bool,
pub security_payloads: Option<PathBuf>,
pub security_categories: Option<String>,
pub security_target_fields: Option<String>,
pub wafbench_dir: Option<String>,
pub wafbench_cycle_all: bool,
pub conformance: bool,
pub conformance_api_key: Option<String>,
pub conformance_basic_auth: Option<String>,
pub conformance_report: PathBuf,
pub conformance_categories: Option<String>,
pub conformance_report_format: String,
pub conformance_headers: Vec<String>,
pub conformance_all_operations: bool,
pub conformance_custom: Option<PathBuf>,
pub conformance_delay_ms: u64,
pub use_k6: bool,
pub owasp_api_top10: bool,
pub owasp_categories: Option<String>,
pub owasp_auth_header: String,
pub owasp_auth_token: Option<String>,
pub owasp_admin_paths: Option<PathBuf>,
pub owasp_id_fields: Option<String>,
pub owasp_report: Option<PathBuf>,
pub owasp_report_format: String,
pub owasp_iterations: u32,
}
impl BenchCommand {
pub async fn load_and_merge_specs(&self) -> Result<OpenApiSpec> {
let mut all_specs: Vec<(PathBuf, OpenApiSpec)> = Vec::new();
if !self.spec.is_empty() {
let specs = load_specs_from_files(self.spec.clone())
.await
.map_err(|e| BenchError::Other(format!("Failed to load spec files: {}", e)))?;
all_specs.extend(specs);
}
if let Some(spec_dir) = &self.spec_dir {
let dir_specs = load_specs_from_directory(spec_dir).await.map_err(|e| {
BenchError::Other(format!("Failed to load specs from directory: {}", e))
})?;
all_specs.extend(dir_specs);
}
if all_specs.is_empty() {
return Err(BenchError::Other(
"No spec files provided. Use --spec or --spec-dir.".to_string(),
));
}
if all_specs.len() == 1 {
return Ok(all_specs.into_iter().next().expect("checked len() == 1 above").1);
}
let conflict_strategy = match self.merge_conflicts.as_str() {
"first" => ConflictStrategy::First,
"last" => ConflictStrategy::Last,
_ => ConflictStrategy::Error,
};
merge_specs(all_specs, conflict_strategy)
.map_err(|e| BenchError::Other(format!("Failed to merge specs: {}", e)))
}
fn get_spec_display_name(&self) -> String {
if self.spec.len() == 1 {
self.spec[0].to_string_lossy().to_string()
} else if !self.spec.is_empty() {
format!("{} spec files", self.spec.len())
} else if let Some(dir) = &self.spec_dir {
format!("specs from {}", dir.display())
} else {
"no specs".to_string()
}
}
pub async fn execute(&self) -> Result<()> {
if let Some(targets_file) = &self.targets_file {
if self.conformance {
return self.execute_multi_target_conformance(targets_file).await;
}
return self.execute_multi_target(targets_file).await;
}
if self.spec_mode == "sequential" && (self.spec.len() > 1 || self.spec_dir.is_some()) {
return self.execute_sequential_specs().await;
}
TerminalReporter::print_header(
&self.get_spec_display_name(),
&self.target,
0, &self.scenario,
Self::parse_duration(&self.duration)?,
);
if !K6Executor::is_k6_installed() {
TerminalReporter::print_error("k6 is not installed");
TerminalReporter::print_warning(
"Install k6 from: https://k6.io/docs/get-started/installation/",
);
return Err(BenchError::K6NotFound);
}
if self.conformance {
return self.execute_conformance_test().await;
}
TerminalReporter::print_progress("Loading OpenAPI specification(s)...");
let merged_spec = self.load_and_merge_specs().await?;
let parser = SpecParser::from_spec(merged_spec);
if self.spec.len() > 1 || self.spec_dir.is_some() {
TerminalReporter::print_success(&format!(
"Loaded and merged {} specification(s)",
self.spec.len() + self.spec_dir.as_ref().map(|_| 1).unwrap_or(0)
));
} else {
TerminalReporter::print_success("Specification loaded");
}
let mock_config = self.build_mock_config().await;
if mock_config.is_mock_server {
TerminalReporter::print_progress("Mock server integration enabled");
}
if self.crud_flow {
return self.execute_crud_flow(&parser).await;
}
if self.owasp_api_top10 {
return self.execute_owasp_test(&parser).await;
}
TerminalReporter::print_progress("Extracting API operations...");
let mut operations = if let Some(filter) = &self.operations {
parser.filter_operations(filter)?
} else {
parser.get_operations()
};
if let Some(exclude) = &self.exclude_operations {
let before_count = operations.len();
operations = parser.exclude_operations(operations, exclude)?;
let excluded_count = before_count - operations.len();
if excluded_count > 0 {
TerminalReporter::print_progress(&format!(
"Excluded {} operations matching '{}'",
excluded_count, exclude
));
}
}
if operations.is_empty() {
return Err(BenchError::Other("No operations found in spec".to_string()));
}
TerminalReporter::print_success(&format!("Found {} operations", operations.len()));
let param_overrides = if let Some(params_file) = &self.params_file {
TerminalReporter::print_progress("Loading parameter overrides...");
let overrides = ParameterOverrides::from_file(params_file)?;
TerminalReporter::print_success(&format!(
"Loaded parameter overrides ({} operation-specific, {} defaults)",
overrides.operations.len(),
if overrides.defaults.is_empty() { 0 } else { 1 }
));
Some(overrides)
} else {
None
};
TerminalReporter::print_progress("Generating request templates...");
let templates: Vec<_> = operations
.iter()
.map(|op| {
let op_overrides = param_overrides.as_ref().map(|po| {
po.get_for_operation(op.operation_id.as_deref(), &op.method, &op.path)
});
RequestGenerator::generate_template_with_overrides(op, op_overrides.as_ref())
})
.collect::<Result<Vec<_>>>()?;
TerminalReporter::print_success("Request templates generated");
let custom_headers = self.parse_headers()?;
let base_path = self.resolve_base_path(&parser);
if let Some(ref bp) = base_path {
TerminalReporter::print_progress(&format!("Using base path: {}", bp));
}
TerminalReporter::print_progress("Generating k6 load test script...");
let scenario =
LoadScenario::from_str(&self.scenario).map_err(BenchError::InvalidScenario)?;
let security_testing_enabled = self.security_test || self.wafbench_dir.is_some();
let k6_config = K6Config {
target_url: self.target.clone(),
base_path,
scenario,
duration_secs: Self::parse_duration(&self.duration)?,
max_vus: self.vus,
threshold_percentile: self.threshold_percentile.clone(),
threshold_ms: self.threshold_ms,
max_error_rate: self.max_error_rate,
auth_header: self.auth.clone(),
custom_headers,
skip_tls_verify: self.skip_tls_verify,
security_testing_enabled,
};
let generator = K6ScriptGenerator::new(k6_config, templates);
let mut script = generator.generate()?;
TerminalReporter::print_success("k6 script generated");
let has_advanced_features = self.data_file.is_some()
|| self.error_rate.is_some()
|| self.security_test
|| self.parallel_create.is_some()
|| self.wafbench_dir.is_some();
if has_advanced_features {
script = self.generate_enhanced_script(&script)?;
}
if mock_config.is_mock_server {
let setup_code = MockIntegrationGenerator::generate_setup(&mock_config);
let teardown_code = MockIntegrationGenerator::generate_teardown(&mock_config);
let helper_code = MockIntegrationGenerator::generate_vu_id_helper();
if let Some(import_end) = script.find("export const options") {
script.insert_str(
import_end,
&format!(
"\n// === Mock Server Integration ===\n{}\n{}\n{}\n",
helper_code, setup_code, teardown_code
),
);
}
}
TerminalReporter::print_progress("Validating k6 script...");
let validation_errors = K6ScriptGenerator::validate_script(&script);
if !validation_errors.is_empty() {
TerminalReporter::print_error("Script validation failed");
for error in &validation_errors {
eprintln!(" {}", error);
}
return Err(BenchError::Other(format!(
"Generated k6 script has {} validation error(s). Please check the output above.",
validation_errors.len()
)));
}
TerminalReporter::print_success("Script validation passed");
let script_path = if let Some(output) = &self.script_output {
output.clone()
} else {
self.output.join("k6-script.js")
};
if let Some(parent) = script_path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(&script_path, &script)?;
TerminalReporter::print_success(&format!("Script written to: {}", script_path.display()));
if self.generate_only {
println!("\nScript generated successfully. Run it with:");
println!(" k6 run {}", script_path.display());
return Ok(());
}
TerminalReporter::print_progress("Executing load test...");
let executor = K6Executor::new()?;
std::fs::create_dir_all(&self.output)?;
let results = executor.execute(&script_path, Some(&self.output), self.verbose).await?;
let duration_secs = Self::parse_duration(&self.duration)?;
TerminalReporter::print_summary(&results, duration_secs);
println!("\nResults saved to: {}", self.output.display());
Ok(())
}
async fn execute_multi_target(&self, targets_file: &Path) -> Result<()> {
TerminalReporter::print_progress("Parsing targets file...");
let targets = parse_targets_file(targets_file)?;
let num_targets = targets.len();
TerminalReporter::print_success(&format!("Loaded {} targets", num_targets));
if targets.is_empty() {
return Err(BenchError::Other("No targets found in file".to_string()));
}
let max_concurrency = self.max_concurrency.unwrap_or(10) as usize;
let max_concurrency = max_concurrency.min(num_targets);
TerminalReporter::print_header(
&self.get_spec_display_name(),
&format!("{} targets", num_targets),
0,
&self.scenario,
Self::parse_duration(&self.duration)?,
);
let executor = ParallelExecutor::new(
BenchCommand {
spec: self.spec.clone(),
spec_dir: self.spec_dir.clone(),
merge_conflicts: self.merge_conflicts.clone(),
spec_mode: self.spec_mode.clone(),
dependency_config: self.dependency_config.clone(),
target: self.target.clone(), base_path: self.base_path.clone(),
duration: self.duration.clone(),
vus: self.vus,
scenario: self.scenario.clone(),
operations: self.operations.clone(),
exclude_operations: self.exclude_operations.clone(),
auth: self.auth.clone(),
headers: self.headers.clone(),
output: self.output.clone(),
generate_only: self.generate_only,
script_output: self.script_output.clone(),
threshold_percentile: self.threshold_percentile.clone(),
threshold_ms: self.threshold_ms,
max_error_rate: self.max_error_rate,
verbose: self.verbose,
skip_tls_verify: self.skip_tls_verify,
targets_file: None,
max_concurrency: None,
results_format: self.results_format.clone(),
params_file: self.params_file.clone(),
crud_flow: self.crud_flow,
flow_config: self.flow_config.clone(),
extract_fields: self.extract_fields.clone(),
parallel_create: self.parallel_create,
data_file: self.data_file.clone(),
data_distribution: self.data_distribution.clone(),
data_mappings: self.data_mappings.clone(),
per_uri_control: self.per_uri_control,
error_rate: self.error_rate,
error_types: self.error_types.clone(),
security_test: self.security_test,
security_payloads: self.security_payloads.clone(),
security_categories: self.security_categories.clone(),
security_target_fields: self.security_target_fields.clone(),
wafbench_dir: self.wafbench_dir.clone(),
wafbench_cycle_all: self.wafbench_cycle_all,
owasp_api_top10: self.owasp_api_top10,
owasp_categories: self.owasp_categories.clone(),
owasp_auth_header: self.owasp_auth_header.clone(),
owasp_auth_token: self.owasp_auth_token.clone(),
owasp_admin_paths: self.owasp_admin_paths.clone(),
owasp_id_fields: self.owasp_id_fields.clone(),
owasp_report: self.owasp_report.clone(),
owasp_report_format: self.owasp_report_format.clone(),
owasp_iterations: self.owasp_iterations,
conformance: false,
conformance_api_key: None,
conformance_basic_auth: None,
conformance_report: PathBuf::from("conformance-report.json"),
conformance_categories: None,
conformance_report_format: "json".to_string(),
conformance_headers: vec![],
conformance_all_operations: false,
conformance_custom: None,
conformance_delay_ms: 0,
use_k6: false,
},
targets,
max_concurrency,
);
let start_time = std::time::Instant::now();
let aggregated_results = executor.execute_all().await?;
let elapsed = start_time.elapsed();
self.report_multi_target_results(&aggregated_results, elapsed)?;
Ok(())
}
fn report_multi_target_results(
&self,
results: &AggregatedResults,
elapsed: std::time::Duration,
) -> Result<()> {
TerminalReporter::print_multi_target_summary(results);
let total_secs = elapsed.as_secs();
let hours = total_secs / 3600;
let minutes = (total_secs % 3600) / 60;
let seconds = total_secs % 60;
if hours > 0 {
println!("\n Total Elapsed Time: {}h {}m {}s", hours, minutes, seconds);
} else if minutes > 0 {
println!("\n Total Elapsed Time: {}m {}s", minutes, seconds);
} else {
println!("\n Total Elapsed Time: {}s", seconds);
}
if self.results_format == "aggregated" || self.results_format == "both" {
let summary_path = self.output.join("aggregated_summary.json");
let summary_json = serde_json::json!({
"total_elapsed_seconds": elapsed.as_secs(),
"total_targets": results.total_targets,
"successful_targets": results.successful_targets,
"failed_targets": results.failed_targets,
"aggregated_metrics": {
"total_requests": results.aggregated_metrics.total_requests,
"total_failed_requests": results.aggregated_metrics.total_failed_requests,
"avg_duration_ms": results.aggregated_metrics.avg_duration_ms,
"p95_duration_ms": results.aggregated_metrics.p95_duration_ms,
"p99_duration_ms": results.aggregated_metrics.p99_duration_ms,
"error_rate": results.aggregated_metrics.error_rate,
"total_rps": results.aggregated_metrics.total_rps,
"avg_rps": results.aggregated_metrics.avg_rps,
"total_vus_max": results.aggregated_metrics.total_vus_max,
},
"target_results": results.target_results.iter().map(|r| {
serde_json::json!({
"target_url": r.target_url,
"target_index": r.target_index,
"success": r.success,
"error": r.error,
"total_requests": r.results.total_requests,
"failed_requests": r.results.failed_requests,
"avg_duration_ms": r.results.avg_duration_ms,
"min_duration_ms": r.results.min_duration_ms,
"med_duration_ms": r.results.med_duration_ms,
"p90_duration_ms": r.results.p90_duration_ms,
"p95_duration_ms": r.results.p95_duration_ms,
"p99_duration_ms": r.results.p99_duration_ms,
"max_duration_ms": r.results.max_duration_ms,
"rps": r.results.rps,
"vus_max": r.results.vus_max,
"output_dir": r.output_dir.to_string_lossy(),
})
}).collect::<Vec<_>>(),
});
std::fs::write(&summary_path, serde_json::to_string_pretty(&summary_json)?)?;
TerminalReporter::print_success(&format!(
"Aggregated summary saved to: {}",
summary_path.display()
));
}
let csv_path = self.output.join("all_targets.csv");
let mut csv = String::from(
"target_url,success,requests,failed,rps,vus,min_ms,avg_ms,med_ms,p90_ms,p95_ms,p99_ms,max_ms,error\n",
);
for r in &results.target_results {
csv.push_str(&format!(
"{},{},{},{},{:.1},{},{:.1},{:.1},{:.1},{:.1},{:.1},{:.1},{:.1},{}\n",
r.target_url,
r.success,
r.results.total_requests,
r.results.failed_requests,
r.results.rps,
r.results.vus_max,
r.results.min_duration_ms,
r.results.avg_duration_ms,
r.results.med_duration_ms,
r.results.p90_duration_ms,
r.results.p95_duration_ms,
r.results.p99_duration_ms,
r.results.max_duration_ms,
r.error.as_deref().unwrap_or(""),
));
}
let _ = std::fs::write(&csv_path, &csv);
println!("\nResults saved to: {}", self.output.display());
println!(" - Per-target results: {}", self.output.join("target_*").display());
println!(" - All targets CSV: {}", csv_path.display());
if self.results_format == "aggregated" || self.results_format == "both" {
println!(
" - Aggregated summary: {}",
self.output.join("aggregated_summary.json").display()
);
}
Ok(())
}
pub fn parse_duration(duration: &str) -> Result<u64> {
let duration = duration.trim();
if let Some(secs) = duration.strip_suffix('s') {
secs.parse::<u64>()
.map_err(|_| BenchError::Other(format!("Invalid duration: {}", duration)))
} else if let Some(mins) = duration.strip_suffix('m') {
mins.parse::<u64>()
.map(|m| m * 60)
.map_err(|_| BenchError::Other(format!("Invalid duration: {}", duration)))
} else if let Some(hours) = duration.strip_suffix('h') {
hours
.parse::<u64>()
.map(|h| h * 3600)
.map_err(|_| BenchError::Other(format!("Invalid duration: {}", duration)))
} else {
duration
.parse::<u64>()
.map_err(|_| BenchError::Other(format!("Invalid duration: {}", duration)))
}
}
pub fn parse_headers(&self) -> Result<HashMap<String, String>> {
match &self.headers {
Some(s) => parse_header_string(s),
None => Ok(HashMap::new()),
}
}
fn parse_extracted_values(output_dir: &Path) -> Result<ExtractedValues> {
let extracted_path = output_dir.join("extracted_values.json");
if !extracted_path.exists() {
return Ok(ExtractedValues::new());
}
let content = std::fs::read_to_string(&extracted_path)
.map_err(|e| BenchError::ResultsParseError(e.to_string()))?;
let parsed: serde_json::Value = serde_json::from_str(&content)
.map_err(|e| BenchError::ResultsParseError(e.to_string()))?;
let mut extracted = ExtractedValues::new();
if let Some(values) = parsed.as_object() {
for (key, value) in values {
extracted.set(key.clone(), value.clone());
}
}
Ok(extracted)
}
fn resolve_base_path(&self, parser: &SpecParser) -> Option<String> {
if let Some(cli_base_path) = &self.base_path {
if cli_base_path.is_empty() {
return None;
}
return Some(cli_base_path.clone());
}
parser.get_base_path()
}
async fn build_mock_config(&self) -> MockIntegrationConfig {
if MockServerDetector::looks_like_mock_server(&self.target) {
if let Ok(info) = MockServerDetector::detect(&self.target).await {
if info.is_mockforge {
TerminalReporter::print_success(&format!(
"Detected MockForge server (version: {})",
info.version.as_deref().unwrap_or("unknown")
));
return MockIntegrationConfig::mock_server();
}
}
}
MockIntegrationConfig::real_api()
}
fn build_crud_flow_config(&self) -> Option<CrudFlowConfig> {
if !self.crud_flow {
return None;
}
if let Some(config_path) = &self.flow_config {
match CrudFlowConfig::from_file(config_path) {
Ok(config) => return Some(config),
Err(e) => {
TerminalReporter::print_warning(&format!(
"Failed to load flow config: {}. Using auto-detection.",
e
));
}
}
}
let extract_fields = self
.extract_fields
.as_ref()
.map(|f| f.split(',').map(|s| s.trim().to_string()).collect())
.unwrap_or_else(|| vec!["id".to_string(), "uuid".to_string()]);
Some(CrudFlowConfig {
flows: Vec::new(), default_extract_fields: extract_fields,
})
}
fn build_data_driven_config(&self) -> Option<DataDrivenConfig> {
let data_file = self.data_file.as_ref()?;
let distribution = DataDistribution::from_str(&self.data_distribution)
.unwrap_or(DataDistribution::UniquePerVu);
let mappings = self
.data_mappings
.as_ref()
.map(|m| DataMapping::parse_mappings(m).unwrap_or_default())
.unwrap_or_default();
Some(DataDrivenConfig {
file_path: data_file.to_string_lossy().to_string(),
distribution,
mappings,
csv_has_header: true,
per_uri_control: self.per_uri_control,
per_uri_columns: crate::data_driven::PerUriColumns::default(),
})
}
fn build_invalid_data_config(&self) -> Option<InvalidDataConfig> {
let error_rate = self.error_rate?;
let error_types = self
.error_types
.as_ref()
.map(|types| InvalidDataConfig::parse_error_types(types).unwrap_or_default())
.unwrap_or_default();
Some(InvalidDataConfig {
error_rate,
error_types,
target_fields: Vec::new(),
})
}
fn build_security_config(&self) -> Option<SecurityTestConfig> {
if !self.security_test {
return None;
}
let categories = self
.security_categories
.as_ref()
.map(|cats| SecurityTestConfig::parse_categories(cats).unwrap_or_default())
.unwrap_or_else(|| {
let mut default = HashSet::new();
default.insert(SecurityCategory::SqlInjection);
default.insert(SecurityCategory::Xss);
default
});
let target_fields = self
.security_target_fields
.as_ref()
.map(|fields| fields.split(',').map(|f| f.trim().to_string()).collect())
.unwrap_or_default();
let custom_payloads_file =
self.security_payloads.as_ref().map(|p| p.to_string_lossy().to_string());
Some(SecurityTestConfig {
enabled: true,
categories,
target_fields,
custom_payloads_file,
include_high_risk: false,
})
}
fn build_parallel_config(&self) -> Option<ParallelConfig> {
let count = self.parallel_create?;
Some(ParallelConfig::new(count))
}
fn load_wafbench_payloads(&self) -> Vec<SecurityPayload> {
let Some(ref wafbench_dir) = self.wafbench_dir else {
return Vec::new();
};
let mut loader = WafBenchLoader::new();
if let Err(e) = loader.load_from_pattern(wafbench_dir) {
TerminalReporter::print_warning(&format!("Failed to load WAFBench tests: {}", e));
return Vec::new();
}
let stats = loader.stats();
if stats.files_processed == 0 {
TerminalReporter::print_warning(&format!(
"No WAFBench YAML files found matching '{}'",
wafbench_dir
));
if !stats.parse_errors.is_empty() {
TerminalReporter::print_warning("Some files were found but failed to parse:");
for error in &stats.parse_errors {
TerminalReporter::print_warning(&format!(" - {}", error));
}
}
return Vec::new();
}
TerminalReporter::print_progress(&format!(
"Loaded {} WAFBench files, {} test cases, {} payloads",
stats.files_processed, stats.test_cases_loaded, stats.payloads_extracted
));
for (category, count) in &stats.by_category {
TerminalReporter::print_progress(&format!(" - {}: {} tests", category, count));
}
for error in &stats.parse_errors {
TerminalReporter::print_warning(&format!(" Parse error: {}", error));
}
loader.to_security_payloads()
}
pub(crate) fn generate_enhanced_script(&self, base_script: &str) -> Result<String> {
let mut enhanced_script = base_script.to_string();
let mut additional_code = String::new();
if let Some(config) = self.build_data_driven_config() {
TerminalReporter::print_progress("Adding data-driven testing support...");
additional_code.push_str(&DataDrivenGenerator::generate_setup(&config));
additional_code.push('\n');
TerminalReporter::print_success("Data-driven testing enabled");
}
if let Some(config) = self.build_invalid_data_config() {
TerminalReporter::print_progress("Adding invalid data testing support...");
additional_code.push_str(&InvalidDataGenerator::generate_invalidation_logic());
additional_code.push('\n');
additional_code
.push_str(&InvalidDataGenerator::generate_should_invalidate(config.error_rate));
additional_code.push('\n');
additional_code
.push_str(&InvalidDataGenerator::generate_type_selection(&config.error_types));
additional_code.push('\n');
TerminalReporter::print_success(&format!(
"Invalid data testing enabled ({}% error rate)",
(self.error_rate.unwrap_or(0.0) * 100.0) as u32
));
}
let security_config = self.build_security_config();
let wafbench_payloads = self.load_wafbench_payloads();
let security_requested = security_config.is_some() || self.wafbench_dir.is_some();
if security_config.is_some() || !wafbench_payloads.is_empty() {
TerminalReporter::print_progress("Adding security testing support...");
let mut payload_list: Vec<SecurityPayload> = Vec::new();
if let Some(ref config) = security_config {
payload_list.extend(SecurityPayloads::get_payloads(config));
}
if !wafbench_payloads.is_empty() {
TerminalReporter::print_progress(&format!(
"Loading {} WAFBench attack patterns...",
wafbench_payloads.len()
));
payload_list.extend(wafbench_payloads);
}
let target_fields =
security_config.as_ref().map(|c| c.target_fields.clone()).unwrap_or_default();
additional_code.push_str(&SecurityTestGenerator::generate_payload_selection(
&payload_list,
self.wafbench_cycle_all,
));
additional_code.push('\n');
additional_code
.push_str(&SecurityTestGenerator::generate_apply_payload(&target_fields));
additional_code.push('\n');
additional_code.push_str(&SecurityTestGenerator::generate_security_checks());
additional_code.push('\n');
let mode = if self.wafbench_cycle_all {
"cycle-all"
} else {
"random"
};
TerminalReporter::print_success(&format!(
"Security testing enabled ({} payloads, {} mode)",
payload_list.len(),
mode
));
} else if security_requested {
TerminalReporter::print_warning(
"Security testing was requested but no payloads were loaded. \
Ensure --wafbench-dir points to valid CRS YAML files or add --security-test.",
);
additional_code
.push_str(&SecurityTestGenerator::generate_payload_selection(&[], false));
additional_code.push('\n');
additional_code.push_str(&SecurityTestGenerator::generate_apply_payload(&[]));
additional_code.push('\n');
}
if let Some(config) = self.build_parallel_config() {
TerminalReporter::print_progress("Adding parallel execution support...");
additional_code.push_str(&ParallelRequestGenerator::generate_batch_helper(&config));
additional_code.push('\n');
TerminalReporter::print_success(&format!(
"Parallel execution enabled (count: {})",
config.count
));
}
if !additional_code.is_empty() {
if let Some(import_end) = enhanced_script.find("export const options") {
enhanced_script.insert_str(
import_end,
&format!("\n// === Advanced Testing Features ===\n{}\n", additional_code),
);
}
}
Ok(enhanced_script)
}
async fn execute_sequential_specs(&self) -> Result<()> {
TerminalReporter::print_progress("Sequential spec mode: Loading specs individually...");
let mut all_specs: Vec<(PathBuf, OpenApiSpec)> = Vec::new();
if !self.spec.is_empty() {
let specs = load_specs_from_files(self.spec.clone())
.await
.map_err(|e| BenchError::Other(format!("Failed to load spec files: {}", e)))?;
all_specs.extend(specs);
}
if let Some(spec_dir) = &self.spec_dir {
let dir_specs = load_specs_from_directory(spec_dir).await.map_err(|e| {
BenchError::Other(format!("Failed to load specs from directory: {}", e))
})?;
all_specs.extend(dir_specs);
}
if all_specs.is_empty() {
return Err(BenchError::Other(
"No spec files found for sequential execution".to_string(),
));
}
TerminalReporter::print_success(&format!("Loaded {} spec(s)", all_specs.len()));
let execution_order = if let Some(config_path) = &self.dependency_config {
TerminalReporter::print_progress("Loading dependency configuration...");
let config = SpecDependencyConfig::from_file(config_path)?;
if !config.disable_auto_detect && config.execution_order.is_empty() {
self.detect_and_sort_specs(&all_specs)?
} else {
config.execution_order.iter().flat_map(|g| g.specs.clone()).collect()
}
} else {
self.detect_and_sort_specs(&all_specs)?
};
TerminalReporter::print_success(&format!(
"Execution order: {}",
execution_order
.iter()
.map(|p| p.file_name().unwrap_or_default().to_string_lossy().to_string())
.collect::<Vec<_>>()
.join(" → ")
));
let mut extracted_values = ExtractedValues::new();
let total_specs = execution_order.len();
for (index, spec_path) in execution_order.iter().enumerate() {
let spec_name = spec_path.file_name().unwrap_or_default().to_string_lossy().to_string();
TerminalReporter::print_progress(&format!(
"[{}/{}] Executing spec: {}",
index + 1,
total_specs,
spec_name
));
let spec = all_specs
.iter()
.find(|(p, _)| {
p == spec_path
|| p.file_name() == spec_path.file_name()
|| p.file_name() == Some(spec_path.as_os_str())
})
.map(|(_, s)| s.clone())
.ok_or_else(|| {
BenchError::Other(format!("Spec not found: {}", spec_path.display()))
})?;
let new_values = self.execute_single_spec(&spec, &spec_name, &extracted_values).await?;
extracted_values.merge(&new_values);
TerminalReporter::print_success(&format!(
"[{}/{}] Completed: {} (extracted {} values)",
index + 1,
total_specs,
spec_name,
new_values.values.len()
));
}
TerminalReporter::print_success(&format!(
"Sequential execution complete: {} specs executed",
total_specs
));
Ok(())
}
fn detect_and_sort_specs(&self, specs: &[(PathBuf, OpenApiSpec)]) -> Result<Vec<PathBuf>> {
TerminalReporter::print_progress("Auto-detecting spec dependencies...");
let mut detector = DependencyDetector::new();
let dependencies = detector.detect_dependencies(specs);
if dependencies.is_empty() {
TerminalReporter::print_progress("No dependencies detected, using file order");
return Ok(specs.iter().map(|(p, _)| p.clone()).collect());
}
TerminalReporter::print_progress(&format!(
"Detected {} cross-spec dependencies",
dependencies.len()
));
for dep in &dependencies {
TerminalReporter::print_progress(&format!(
" {} → {} (via field '{}')",
dep.dependency_spec.file_name().unwrap_or_default().to_string_lossy(),
dep.dependent_spec.file_name().unwrap_or_default().to_string_lossy(),
dep.field_name
));
}
topological_sort(specs, &dependencies)
}
async fn execute_single_spec(
&self,
spec: &OpenApiSpec,
spec_name: &str,
_external_values: &ExtractedValues,
) -> Result<ExtractedValues> {
let parser = SpecParser::from_spec(spec.clone());
if self.crud_flow {
self.execute_crud_flow_with_extraction(&parser, spec_name).await
} else {
self.execute_standard_spec(&parser, spec_name).await?;
Ok(ExtractedValues::new())
}
}
async fn execute_crud_flow_with_extraction(
&self,
parser: &SpecParser,
spec_name: &str,
) -> Result<ExtractedValues> {
let operations = parser.get_operations();
let flows = CrudFlowDetector::detect_flows(&operations);
if flows.is_empty() {
TerminalReporter::print_warning(&format!("No CRUD flows detected in {}", spec_name));
return Ok(ExtractedValues::new());
}
TerminalReporter::print_progress(&format!(
" {} CRUD flow(s) in {}",
flows.len(),
spec_name
));
let mut handlebars = handlebars::Handlebars::new();
handlebars.register_helper(
"json",
Box::new(
|h: &handlebars::Helper,
_: &handlebars::Handlebars,
_: &handlebars::Context,
_: &mut handlebars::RenderContext,
out: &mut dyn handlebars::Output|
-> handlebars::HelperResult {
let param = h.param(0).map(|v| v.value()).unwrap_or(&serde_json::Value::Null);
out.write(&serde_json::to_string(param).unwrap_or_else(|_| "[]".to_string()))?;
Ok(())
},
),
);
let template = include_str!("templates/k6_crud_flow.hbs");
let output_dir = self.output.join(format!("{}_results", spec_name.replace('.', "_")));
let custom_headers = self.parse_headers()?;
let config = self.build_crud_flow_config().unwrap_or_default();
let param_overrides = if let Some(params_file) = &self.params_file {
let overrides = ParameterOverrides::from_file(params_file)?;
Some(overrides)
} else {
None
};
let duration_secs = Self::parse_duration(&self.duration)?;
let scenario =
LoadScenario::from_str(&self.scenario).map_err(BenchError::InvalidScenario)?;
let stages = scenario.generate_stages(duration_secs, self.vus);
let api_base_path = self.resolve_base_path(parser);
let mut all_headers = custom_headers.clone();
if let Some(auth) = &self.auth {
all_headers.insert("Authorization".to_string(), auth.clone());
}
let headers_json = serde_json::to_string(&all_headers).unwrap_or_else(|_| "{}".to_string());
let mut all_placeholders: HashSet<DynamicPlaceholder> = HashSet::new();
let flows_data: Vec<serde_json::Value> = flows.iter().map(|f| {
let sanitized_name = K6ScriptGenerator::sanitize_js_identifier(&f.name);
serde_json::json!({
"name": sanitized_name.clone(),
"display_name": f.name,
"base_path": f.base_path,
"steps": f.steps.iter().enumerate().map(|(idx, s)| {
let parts: Vec<&str> = s.operation.splitn(2, ' ').collect();
let method_raw = if !parts.is_empty() {
parts[0].to_uppercase()
} else {
"GET".to_string()
};
let method = if !parts.is_empty() {
let m = parts[0].to_lowercase();
if m == "delete" { "del".to_string() } else { m }
} else {
"get".to_string()
};
let raw_path = if parts.len() >= 2 { parts[1] } else { "/" };
let path = if let Some(ref bp) = api_base_path {
format!("{}{}", bp, raw_path)
} else {
raw_path.to_string()
};
let is_get_or_head = method == "get" || method == "head";
let has_body = matches!(method.as_str(), "post" | "put" | "patch");
let body_value = if has_body {
param_overrides.as_ref()
.map(|po| po.get_for_operation(None, &method_raw, raw_path))
.and_then(|oo| oo.body)
.unwrap_or_else(|| serde_json::json!({}))
} else {
serde_json::json!({})
};
let processed_body = DynamicParamProcessor::process_json_body(&body_value);
let body_has_extracted_placeholders = processed_body.value.contains("${extracted.");
let body_is_dynamic = processed_body.is_dynamic || body_has_extracted_placeholders;
serde_json::json!({
"operation": s.operation,
"method": method,
"path": path,
"extract": s.extract,
"use_values": s.use_values,
"use_body": s.use_body,
"merge_body": if s.merge_body.is_empty() { None } else { Some(&s.merge_body) },
"inject_attacks": s.inject_attacks,
"attack_types": s.attack_types,
"description": s.description,
"display_name": s.description.clone().unwrap_or_else(|| format!("Step {}", idx)),
"is_get_or_head": is_get_or_head,
"has_body": has_body,
"body": processed_body.value,
"body_is_dynamic": body_is_dynamic,
"_placeholders": processed_body.placeholders.iter().map(|p| format!("{:?}", p)).collect::<Vec<_>>(),
})
}).collect::<Vec<_>>(),
})
}).collect();
for flow_data in &flows_data {
if let Some(steps) = flow_data.get("steps").and_then(|s| s.as_array()) {
for step in steps {
if let Some(placeholders_arr) =
step.get("_placeholders").and_then(|p| p.as_array())
{
for p_str in placeholders_arr {
if let Some(p_name) = p_str.as_str() {
match p_name {
"VU" => {
all_placeholders.insert(DynamicPlaceholder::VU);
}
"Iteration" => {
all_placeholders.insert(DynamicPlaceholder::Iteration);
}
"Timestamp" => {
all_placeholders.insert(DynamicPlaceholder::Timestamp);
}
"UUID" => {
all_placeholders.insert(DynamicPlaceholder::UUID);
}
"Random" => {
all_placeholders.insert(DynamicPlaceholder::Random);
}
"Counter" => {
all_placeholders.insert(DynamicPlaceholder::Counter);
}
"Date" => {
all_placeholders.insert(DynamicPlaceholder::Date);
}
"VuIter" => {
all_placeholders.insert(DynamicPlaceholder::VuIter);
}
_ => {}
}
}
}
}
}
}
}
let required_imports = DynamicParamProcessor::get_required_imports(&all_placeholders);
let required_globals = DynamicParamProcessor::get_required_globals(&all_placeholders);
let security_testing_enabled = self.wafbench_dir.is_some() || self.security_test;
let data = serde_json::json!({
"base_url": self.target,
"flows": flows_data,
"extract_fields": config.default_extract_fields,
"duration_secs": duration_secs,
"max_vus": self.vus,
"auth_header": self.auth,
"custom_headers": custom_headers,
"skip_tls_verify": self.skip_tls_verify,
"stages": stages.iter().map(|s| serde_json::json!({
"duration": s.duration,
"target": s.target,
})).collect::<Vec<_>>(),
"threshold_percentile": self.threshold_percentile,
"threshold_ms": self.threshold_ms,
"max_error_rate": self.max_error_rate,
"headers": headers_json,
"dynamic_imports": required_imports,
"dynamic_globals": required_globals,
"extracted_values_output_path": output_dir.join("extracted_values.json").to_string_lossy(),
"security_testing_enabled": security_testing_enabled,
"has_custom_headers": !custom_headers.is_empty(),
});
let mut script = handlebars
.render_template(template, &data)
.map_err(|e| BenchError::ScriptGenerationFailed(e.to_string()))?;
if security_testing_enabled {
script = self.generate_enhanced_script(&script)?;
}
let script_path =
self.output.join(format!("k6-{}-crud-flow.js", spec_name.replace('.', "_")));
std::fs::create_dir_all(self.output.clone())?;
std::fs::write(&script_path, &script)?;
if !self.generate_only {
let executor = K6Executor::new()?;
std::fs::create_dir_all(&output_dir)?;
executor.execute(&script_path, Some(&output_dir), self.verbose).await?;
let extracted = Self::parse_extracted_values(&output_dir)?;
TerminalReporter::print_progress(&format!(
" Extracted {} value(s) from {}",
extracted.values.len(),
spec_name
));
return Ok(extracted);
}
Ok(ExtractedValues::new())
}
async fn execute_standard_spec(&self, parser: &SpecParser, spec_name: &str) -> Result<()> {
let mut operations = if let Some(filter) = &self.operations {
parser.filter_operations(filter)?
} else {
parser.get_operations()
};
if let Some(exclude) = &self.exclude_operations {
operations = parser.exclude_operations(operations, exclude)?;
}
if operations.is_empty() {
TerminalReporter::print_warning(&format!("No operations found in {}", spec_name));
return Ok(());
}
TerminalReporter::print_progress(&format!(
" {} operations in {}",
operations.len(),
spec_name
));
let templates: Vec<_> = operations
.iter()
.map(RequestGenerator::generate_template)
.collect::<Result<Vec<_>>>()?;
let custom_headers = self.parse_headers()?;
let base_path = self.resolve_base_path(parser);
let scenario =
LoadScenario::from_str(&self.scenario).map_err(BenchError::InvalidScenario)?;
let security_testing_enabled = self.security_test || self.wafbench_dir.is_some();
let k6_config = K6Config {
target_url: self.target.clone(),
base_path,
scenario,
duration_secs: Self::parse_duration(&self.duration)?,
max_vus: self.vus,
threshold_percentile: self.threshold_percentile.clone(),
threshold_ms: self.threshold_ms,
max_error_rate: self.max_error_rate,
auth_header: self.auth.clone(),
custom_headers,
skip_tls_verify: self.skip_tls_verify,
security_testing_enabled,
};
let generator = K6ScriptGenerator::new(k6_config, templates);
let mut script = generator.generate()?;
let has_advanced_features = self.data_file.is_some()
|| self.error_rate.is_some()
|| self.security_test
|| self.parallel_create.is_some()
|| self.wafbench_dir.is_some();
if has_advanced_features {
script = self.generate_enhanced_script(&script)?;
}
let script_path = self.output.join(format!("k6-{}.js", spec_name.replace('.', "_")));
std::fs::create_dir_all(self.output.clone())?;
std::fs::write(&script_path, &script)?;
if !self.generate_only {
let executor = K6Executor::new()?;
let output_dir = self.output.join(format!("{}_results", spec_name.replace('.', "_")));
std::fs::create_dir_all(&output_dir)?;
executor.execute(&script_path, Some(&output_dir), self.verbose).await?;
}
Ok(())
}
async fn execute_crud_flow(&self, parser: &SpecParser) -> Result<()> {
let config = self.build_crud_flow_config().unwrap_or_default();
let flows = if !config.flows.is_empty() {
TerminalReporter::print_progress("Using custom flow configuration...");
config.flows.clone()
} else {
TerminalReporter::print_progress("Detecting CRUD operations...");
let operations = parser.get_operations();
CrudFlowDetector::detect_flows(&operations)
};
if flows.is_empty() {
return Err(BenchError::Other(
"No CRUD flows detected in spec. Ensure spec has POST/GET/PUT/DELETE operations on related paths.".to_string(),
));
}
if config.flows.is_empty() {
TerminalReporter::print_success(&format!("Detected {} CRUD flow(s)", flows.len()));
} else {
TerminalReporter::print_success(&format!("Loaded {} custom flow(s)", flows.len()));
}
for flow in &flows {
TerminalReporter::print_progress(&format!(
" - {}: {} steps",
flow.name,
flow.steps.len()
));
}
let mut handlebars = handlebars::Handlebars::new();
handlebars.register_helper(
"json",
Box::new(
|h: &handlebars::Helper,
_: &handlebars::Handlebars,
_: &handlebars::Context,
_: &mut handlebars::RenderContext,
out: &mut dyn handlebars::Output|
-> handlebars::HelperResult {
let param = h.param(0).map(|v| v.value()).unwrap_or(&serde_json::Value::Null);
out.write(&serde_json::to_string(param).unwrap_or_else(|_| "[]".to_string()))?;
Ok(())
},
),
);
let template = include_str!("templates/k6_crud_flow.hbs");
let custom_headers = self.parse_headers()?;
let param_overrides = if let Some(params_file) = &self.params_file {
TerminalReporter::print_progress("Loading parameter overrides...");
let overrides = ParameterOverrides::from_file(params_file)?;
TerminalReporter::print_success(&format!(
"Loaded parameter overrides ({} operation-specific, {} defaults)",
overrides.operations.len(),
if overrides.defaults.is_empty() { 0 } else { 1 }
));
Some(overrides)
} else {
None
};
let duration_secs = Self::parse_duration(&self.duration)?;
let scenario =
LoadScenario::from_str(&self.scenario).map_err(BenchError::InvalidScenario)?;
let stages = scenario.generate_stages(duration_secs, self.vus);
let api_base_path = self.resolve_base_path(parser);
if let Some(ref bp) = api_base_path {
TerminalReporter::print_progress(&format!("Using base path: {}", bp));
}
let mut all_headers = custom_headers.clone();
if let Some(auth) = &self.auth {
all_headers.insert("Authorization".to_string(), auth.clone());
}
let headers_json = serde_json::to_string(&all_headers).unwrap_or_else(|_| "{}".to_string());
let mut all_placeholders: HashSet<DynamicPlaceholder> = HashSet::new();
let flows_data: Vec<serde_json::Value> = flows.iter().map(|f| {
let sanitized_name = K6ScriptGenerator::sanitize_js_identifier(&f.name);
serde_json::json!({
"name": sanitized_name.clone(), "display_name": f.name, "base_path": f.base_path,
"steps": f.steps.iter().enumerate().map(|(idx, s)| {
let parts: Vec<&str> = s.operation.splitn(2, ' ').collect();
let method_raw = if !parts.is_empty() {
parts[0].to_uppercase()
} else {
"GET".to_string()
};
let method = if !parts.is_empty() {
let m = parts[0].to_lowercase();
if m == "delete" { "del".to_string() } else { m }
} else {
"get".to_string()
};
let raw_path = if parts.len() >= 2 { parts[1] } else { "/" };
let path = if let Some(ref bp) = api_base_path {
format!("{}{}", bp, raw_path)
} else {
raw_path.to_string()
};
let is_get_or_head = method == "get" || method == "head";
let has_body = matches!(method.as_str(), "post" | "put" | "patch");
let body_value = if has_body {
param_overrides.as_ref()
.map(|po| po.get_for_operation(None, &method_raw, raw_path))
.and_then(|oo| oo.body)
.unwrap_or_else(|| serde_json::json!({}))
} else {
serde_json::json!({})
};
let processed_body = DynamicParamProcessor::process_json_body(&body_value);
let body_has_extracted_placeholders = processed_body.value.contains("${extracted.");
let body_is_dynamic = processed_body.is_dynamic || body_has_extracted_placeholders;
serde_json::json!({
"operation": s.operation,
"method": method,
"path": path,
"extract": s.extract,
"use_values": s.use_values,
"use_body": s.use_body,
"merge_body": if s.merge_body.is_empty() { None } else { Some(&s.merge_body) },
"inject_attacks": s.inject_attacks,
"attack_types": s.attack_types,
"description": s.description,
"display_name": s.description.clone().unwrap_or_else(|| format!("Step {}", idx)),
"is_get_or_head": is_get_or_head,
"has_body": has_body,
"body": processed_body.value,
"body_is_dynamic": body_is_dynamic,
"_placeholders": processed_body.placeholders.iter().map(|p| format!("{:?}", p)).collect::<Vec<_>>(),
})
}).collect::<Vec<_>>(),
})
}).collect();
for flow_data in &flows_data {
if let Some(steps) = flow_data.get("steps").and_then(|s| s.as_array()) {
for step in steps {
if let Some(placeholders_arr) =
step.get("_placeholders").and_then(|p| p.as_array())
{
for p_str in placeholders_arr {
if let Some(p_name) = p_str.as_str() {
match p_name {
"VU" => {
all_placeholders.insert(DynamicPlaceholder::VU);
}
"Iteration" => {
all_placeholders.insert(DynamicPlaceholder::Iteration);
}
"Timestamp" => {
all_placeholders.insert(DynamicPlaceholder::Timestamp);
}
"UUID" => {
all_placeholders.insert(DynamicPlaceholder::UUID);
}
"Random" => {
all_placeholders.insert(DynamicPlaceholder::Random);
}
"Counter" => {
all_placeholders.insert(DynamicPlaceholder::Counter);
}
"Date" => {
all_placeholders.insert(DynamicPlaceholder::Date);
}
"VuIter" => {
all_placeholders.insert(DynamicPlaceholder::VuIter);
}
_ => {}
}
}
}
}
}
}
}
let required_imports = DynamicParamProcessor::get_required_imports(&all_placeholders);
let required_globals = DynamicParamProcessor::get_required_globals(&all_placeholders);
let invalid_data_config = self.build_invalid_data_config();
let error_injection_enabled = invalid_data_config.is_some();
let error_rate = self.error_rate.unwrap_or(0.0);
let error_types: Vec<String> = invalid_data_config
.as_ref()
.map(|c| c.error_types.iter().map(|t| format!("{:?}", t)).collect())
.unwrap_or_default();
if error_injection_enabled {
TerminalReporter::print_progress(&format!(
"Error injection enabled ({}% rate)",
(error_rate * 100.0) as u32
));
}
let security_testing_enabled = self.wafbench_dir.is_some() || self.security_test;
let data = serde_json::json!({
"base_url": self.target,
"flows": flows_data,
"extract_fields": config.default_extract_fields,
"duration_secs": duration_secs,
"max_vus": self.vus,
"auth_header": self.auth,
"custom_headers": custom_headers,
"skip_tls_verify": self.skip_tls_verify,
"stages": stages.iter().map(|s| serde_json::json!({
"duration": s.duration,
"target": s.target,
})).collect::<Vec<_>>(),
"threshold_percentile": self.threshold_percentile,
"threshold_ms": self.threshold_ms,
"max_error_rate": self.max_error_rate,
"headers": headers_json,
"dynamic_imports": required_imports,
"dynamic_globals": required_globals,
"extracted_values_output_path": self
.output
.join("crud_flow_extracted_values.json")
.to_string_lossy(),
"error_injection_enabled": error_injection_enabled,
"error_rate": error_rate,
"error_types": error_types,
"security_testing_enabled": security_testing_enabled,
"has_custom_headers": !custom_headers.is_empty(),
});
let mut script = handlebars
.render_template(template, &data)
.map_err(|e| BenchError::ScriptGenerationFailed(e.to_string()))?;
if security_testing_enabled {
script = self.generate_enhanced_script(&script)?;
}
TerminalReporter::print_progress("Validating CRUD flow script...");
let validation_errors = K6ScriptGenerator::validate_script(&script);
if !validation_errors.is_empty() {
TerminalReporter::print_error("CRUD flow script validation failed");
for error in &validation_errors {
eprintln!(" {}", error);
}
return Err(BenchError::Other(format!(
"CRUD flow script validation failed with {} error(s)",
validation_errors.len()
)));
}
TerminalReporter::print_success("CRUD flow script generated");
let script_path = if let Some(output) = &self.script_output {
output.clone()
} else {
self.output.join("k6-crud-flow-script.js")
};
if let Some(parent) = script_path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(&script_path, &script)?;
TerminalReporter::print_success(&format!("Script written to: {}", script_path.display()));
if self.generate_only {
println!("\nScript generated successfully. Run it with:");
println!(" k6 run {}", script_path.display());
return Ok(());
}
TerminalReporter::print_progress("Executing CRUD flow test...");
let executor = K6Executor::new()?;
std::fs::create_dir_all(&self.output)?;
let results = executor.execute(&script_path, Some(&self.output), self.verbose).await?;
let duration_secs = Self::parse_duration(&self.duration)?;
TerminalReporter::print_summary(&results, duration_secs);
Ok(())
}
async fn execute_conformance_test(&self) -> Result<()> {
use crate::conformance::generator::{ConformanceConfig, ConformanceGenerator};
use crate::conformance::report::ConformanceReport;
use crate::conformance::spec::ConformanceFeature;
TerminalReporter::print_progress("OpenAPI 3.0.0 Conformance Testing Mode");
TerminalReporter::print_progress(
"Conformance mode runs 1 VU, 1 iteration per endpoint (--vus and -d are ignored)",
);
let categories = self.conformance_categories.as_ref().map(|cats_str| {
cats_str
.split(',')
.filter_map(|s| {
let trimmed = s.trim();
if let Some(canonical) = ConformanceFeature::category_from_cli_name(trimmed) {
Some(canonical.to_string())
} else {
TerminalReporter::print_warning(&format!(
"Unknown conformance category: '{}'. Valid categories: {}",
trimmed,
ConformanceFeature::cli_category_names()
.iter()
.map(|(cli, _)| *cli)
.collect::<Vec<_>>()
.join(", ")
));
None
}
})
.collect::<Vec<String>>()
});
let custom_headers: Vec<(String, String)> = self
.conformance_headers
.iter()
.filter_map(|h| {
let (name, value) = h.split_once(':')?;
Some((name.trim().to_string(), value.trim().to_string()))
})
.collect();
if !custom_headers.is_empty() {
TerminalReporter::print_progress(&format!(
"Using {} custom header(s) for authentication",
custom_headers.len()
));
}
if self.conformance_delay_ms > 0 {
TerminalReporter::print_progress(&format!(
"Using {}ms delay between conformance requests",
self.conformance_delay_ms
));
}
std::fs::create_dir_all(&self.output)?;
let config = ConformanceConfig {
target_url: self.target.clone(),
api_key: self.conformance_api_key.clone(),
basic_auth: self.conformance_basic_auth.clone(),
skip_tls_verify: self.skip_tls_verify,
categories,
base_path: self.base_path.clone(),
custom_headers,
output_dir: Some(self.output.clone()),
all_operations: self.conformance_all_operations,
custom_checks_file: self.conformance_custom.clone(),
request_delay_ms: self.conformance_delay_ms,
};
let annotated_ops = if !self.spec.is_empty() {
TerminalReporter::print_progress("Spec-driven conformance mode: analyzing spec...");
let parser = SpecParser::from_file(&self.spec[0]).await?;
let operations = parser.get_operations();
let annotated =
crate::conformance::spec_driven::SpecDrivenConformanceGenerator::annotate_operations(
&operations,
parser.spec(),
);
TerminalReporter::print_success(&format!(
"Analyzed {} operations, found {} feature annotations",
operations.len(),
annotated.iter().map(|a| a.features.len()).sum::<usize>()
));
Some(annotated)
} else {
None
};
if self.generate_only || self.use_k6 {
let script = if let Some(annotated) = &annotated_ops {
let gen = crate::conformance::spec_driven::SpecDrivenConformanceGenerator::new(
config,
annotated.clone(),
);
let op_count = gen.operation_count();
let (script, check_count) = gen.generate()?;
TerminalReporter::print_success(&format!(
"Conformance: {} operations analyzed, {} unique checks generated",
op_count, check_count
));
script
} else {
let generator = ConformanceGenerator::new(config);
generator.generate()?
};
let script_path = self.output.join("k6-conformance.js");
std::fs::write(&script_path, &script).map_err(|e| {
BenchError::Other(format!("Failed to write conformance script: {}", e))
})?;
TerminalReporter::print_success(&format!(
"Conformance script generated: {}",
script_path.display()
));
if self.generate_only {
println!("\nScript generated. Run with:");
println!(" k6 run {}", script_path.display());
return Ok(());
}
if !K6Executor::is_k6_installed() {
TerminalReporter::print_error("k6 is not installed");
TerminalReporter::print_warning(
"Install k6 from: https://k6.io/docs/get-started/installation/",
);
return Err(BenchError::K6NotFound);
}
TerminalReporter::print_progress("Running conformance tests via k6...");
let executor = K6Executor::new()?;
executor.execute(&script_path, Some(&self.output), self.verbose).await?;
let report_path = self.output.join("conformance-report.json");
if report_path.exists() {
let report = ConformanceReport::from_file(&report_path)?;
report.print_report_with_options(self.conformance_all_operations);
self.save_conformance_report(&report, &report_path)?;
} else {
TerminalReporter::print_warning(
"Conformance report not generated (k6 handleSummary may not have run)",
);
}
return Ok(());
}
TerminalReporter::print_progress("Running conformance tests (native executor)...");
let mut executor = crate::conformance::executor::NativeConformanceExecutor::new(config)?;
executor = if let Some(annotated) = &annotated_ops {
executor.with_spec_driven_checks(annotated)
} else {
executor.with_reference_checks()
};
executor = executor.with_custom_checks()?;
TerminalReporter::print_success(&format!(
"Executing {} conformance checks...",
executor.check_count()
));
let report = executor.execute().await?;
report.print_report_with_options(self.conformance_all_operations);
let failure_details = report.failure_details();
if !failure_details.is_empty() {
let details_path = self.output.join("conformance-failure-details.json");
if let Ok(json) = serde_json::to_string_pretty(&failure_details) {
let _ = std::fs::write(&details_path, json);
TerminalReporter::print_success(&format!(
"Failure details saved to: {}",
details_path.display()
));
}
}
let report_path = self.output.join("conformance-report.json");
let report_json = serde_json::to_string_pretty(&report.to_json())
.map_err(|e| BenchError::Other(format!("Failed to serialize report: {}", e)))?;
std::fs::write(&report_path, &report_json)
.map_err(|e| BenchError::Other(format!("Failed to write report: {}", e)))?;
TerminalReporter::print_success(&format!("Report saved to: {}", report_path.display()));
self.save_conformance_report(&report, &report_path)?;
Ok(())
}
fn save_conformance_report(
&self,
report: &crate::conformance::report::ConformanceReport,
report_path: &Path,
) -> Result<()> {
if self.conformance_report_format == "sarif" {
use crate::conformance::sarif::ConformanceSarifReport;
ConformanceSarifReport::write(report, &self.target, &self.conformance_report)?;
TerminalReporter::print_success(&format!(
"SARIF report saved to: {}",
self.conformance_report.display()
));
} else if self.conformance_report != *report_path {
std::fs::copy(report_path, &self.conformance_report)?;
TerminalReporter::print_success(&format!(
"Report saved to: {}",
self.conformance_report.display()
));
}
Ok(())
}
async fn execute_multi_target_conformance(&self, targets_file: &Path) -> Result<()> {
use crate::conformance::generator::{ConformanceConfig, ConformanceGenerator};
use crate::conformance::report::ConformanceReport;
use crate::conformance::spec::ConformanceFeature;
TerminalReporter::print_progress("Multi-target OpenAPI 3.0.0 Conformance Testing Mode");
TerminalReporter::print_progress("Parsing targets file...");
let targets = parse_targets_file(targets_file)?;
let num_targets = targets.len();
TerminalReporter::print_success(&format!("Loaded {} targets", num_targets));
if targets.is_empty() {
return Err(BenchError::Other("No targets found in file".to_string()));
}
TerminalReporter::print_progress(
"Conformance mode runs 1 VU, 1 iteration per endpoint (--vus and -d are ignored)",
);
let categories = self.conformance_categories.as_ref().map(|cats_str| {
cats_str
.split(',')
.filter_map(|s| {
let trimmed = s.trim();
if let Some(canonical) = ConformanceFeature::category_from_cli_name(trimmed) {
Some(canonical.to_string())
} else {
TerminalReporter::print_warning(&format!(
"Unknown conformance category: '{}'. Valid categories: {}",
trimmed,
ConformanceFeature::cli_category_names()
.iter()
.map(|(cli, _)| *cli)
.collect::<Vec<_>>()
.join(", ")
));
None
}
})
.collect::<Vec<String>>()
});
let base_custom_headers: Vec<(String, String)> = self
.conformance_headers
.iter()
.filter_map(|h| {
let (name, value) = h.split_once(':')?;
Some((name.trim().to_string(), value.trim().to_string()))
})
.collect();
if !base_custom_headers.is_empty() {
TerminalReporter::print_progress(&format!(
"Using {} base custom header(s) for authentication",
base_custom_headers.len()
));
}
let annotated_ops = if !self.spec.is_empty() {
TerminalReporter::print_progress("Spec-driven conformance mode: analyzing spec...");
let parser = SpecParser::from_file(&self.spec[0]).await?;
let operations = parser.get_operations();
let annotated =
crate::conformance::spec_driven::SpecDrivenConformanceGenerator::annotate_operations(
&operations,
parser.spec(),
);
TerminalReporter::print_success(&format!(
"Analyzed {} operations, found {} feature annotations",
operations.len(),
annotated.iter().map(|a| a.features.len()).sum::<usize>()
));
Some(annotated)
} else {
None
};
std::fs::create_dir_all(&self.output)?;
struct TargetResult {
url: String,
passed: usize,
failed: usize,
elapsed: std::time::Duration,
report_json: serde_json::Value,
owasp_coverage: Vec<crate::conformance::report::OwaspCoverageEntry>,
}
let mut target_results: Vec<TargetResult> = Vec::with_capacity(num_targets);
let total_start = std::time::Instant::now();
for (idx, target) in targets.iter().enumerate() {
tracing::info!(
"Running conformance tests against target {}/{}: {}",
idx + 1,
num_targets,
target.url
);
TerminalReporter::print_progress(&format!(
"\n--- Target {}/{}: {} ---",
idx + 1,
num_targets,
target.url
));
let mut merged_headers = base_custom_headers.clone();
if let Some(ref target_headers) = target.headers {
for (name, value) in target_headers {
if let Some(existing) = merged_headers.iter_mut().find(|(n, _)| n == name) {
existing.1 = value.clone();
} else {
merged_headers.push((name.clone(), value.clone()));
}
}
}
if let Some(ref auth) = target.auth {
if let Some(existing) =
merged_headers.iter_mut().find(|(n, _)| n.eq_ignore_ascii_case("Authorization"))
{
existing.1 = auth.clone();
} else {
merged_headers.push(("Authorization".to_string(), auth.clone()));
}
}
let target_dir = self.output.join(format!("target_{}", idx));
std::fs::create_dir_all(&target_dir)?;
let config = ConformanceConfig {
target_url: target.url.clone(),
api_key: self.conformance_api_key.clone(),
basic_auth: self.conformance_basic_auth.clone(),
skip_tls_verify: self.skip_tls_verify,
categories: categories.clone(),
base_path: self.base_path.clone(),
custom_headers: merged_headers,
output_dir: Some(target_dir.clone()),
all_operations: self.conformance_all_operations,
custom_checks_file: self.conformance_custom.clone(),
request_delay_ms: self.conformance_delay_ms,
};
let target_start = std::time::Instant::now();
let report = if self.use_k6 {
if !K6Executor::is_k6_installed() {
TerminalReporter::print_error("k6 is not installed");
TerminalReporter::print_warning(
"Install k6 from: https://k6.io/docs/get-started/installation/",
);
return Err(BenchError::K6NotFound);
}
let script = if let Some(ref annotated) = annotated_ops {
let gen = crate::conformance::spec_driven::SpecDrivenConformanceGenerator::new(
config.clone(),
annotated.clone(),
);
let (script, _check_count) = gen.generate()?;
script
} else {
let generator = ConformanceGenerator::new(config.clone());
generator.generate()?
};
let script_path = target_dir.join("k6-conformance.js");
std::fs::write(&script_path, &script).map_err(|e| {
BenchError::Other(format!("Failed to write conformance script: {}", e))
})?;
TerminalReporter::print_success(&format!(
"Conformance script generated: {}",
script_path.display()
));
TerminalReporter::print_progress(&format!(
"Running conformance tests via k6 against {}...",
target.url
));
let k6 = K6Executor::new()?;
let api_port = 6565u16.saturating_add(idx as u16);
k6.execute_with_port(&script_path, Some(&target_dir), self.verbose, Some(api_port))
.await?;
let report_path = target_dir.join("conformance-report.json");
if report_path.exists() {
ConformanceReport::from_file(&report_path)?
} else {
TerminalReporter::print_warning(&format!(
"Conformance report not generated for target {} (k6 handleSummary may not have run)",
target.url
));
continue;
}
} else {
let mut executor =
crate::conformance::executor::NativeConformanceExecutor::new(config)?;
executor = if let Some(ref annotated) = annotated_ops {
executor.with_spec_driven_checks(annotated)
} else {
executor.with_reference_checks()
};
executor = executor.with_custom_checks()?;
TerminalReporter::print_success(&format!(
"Executing {} conformance checks against {}...",
executor.check_count(),
target.url
));
executor.execute().await?
};
let target_elapsed = target_start.elapsed();
let report_json = report.to_json();
let passed = report_json["summary"]["passed"].as_u64().unwrap_or(0) as usize;
let failed = report_json["summary"]["failed"].as_u64().unwrap_or(0) as usize;
let total_checks = passed + failed;
let rate = if total_checks == 0 {
0.0
} else {
(passed as f64 / total_checks as f64) * 100.0
};
TerminalReporter::print_success(&format!(
"Target {}: {}/{} passed ({:.1}%) in {:.1}s",
target.url,
passed,
total_checks,
rate,
target_elapsed.as_secs_f64()
));
let target_report_path = target_dir.join("conformance-report.json");
let report_str = serde_json::to_string_pretty(&report_json)
.map_err(|e| BenchError::Other(format!("Failed to serialize report: {}", e)))?;
std::fs::write(&target_report_path, &report_str)
.map_err(|e| BenchError::Other(format!("Failed to write report: {}", e)))?;
let failure_details = report.failure_details();
if !failure_details.is_empty() {
let details_path = target_dir.join("conformance-failure-details.json");
if let Ok(json) = serde_json::to_string_pretty(&failure_details) {
let _ = std::fs::write(&details_path, json);
}
}
let owasp_coverage = report.owasp_coverage_data();
target_results.push(TargetResult {
url: target.url.clone(),
passed,
failed,
elapsed: target_elapsed,
report_json,
owasp_coverage,
});
}
let total_elapsed = total_start.elapsed();
println!("\n{}", "=".repeat(80));
println!(" Multi-Target Conformance Summary");
println!("{}", "=".repeat(80));
println!(
" {:<40} {:>8} {:>8} {:>8} {:>8}",
"Target URL", "Passed", "Failed", "Rate", "Time"
);
println!(" {}", "-".repeat(76));
let mut total_passed = 0usize;
let mut total_failed = 0usize;
for result in &target_results {
let total_checks = result.passed + result.failed;
let rate = if total_checks == 0 {
0.0
} else {
(result.passed as f64 / total_checks as f64) * 100.0
};
let display_url = if result.url.len() > 38 {
format!("{}...", &result.url[..35])
} else {
result.url.clone()
};
println!(
" {:<40} {:>8} {:>8} {:>7.1}% {:>6.1}s",
display_url,
result.passed,
result.failed,
rate,
result.elapsed.as_secs_f64()
);
total_passed += result.passed;
total_failed += result.failed;
}
let grand_total = total_passed + total_failed;
let overall_rate = if grand_total == 0 {
0.0
} else {
(total_passed as f64 / grand_total as f64) * 100.0
};
println!(" {}", "-".repeat(76));
println!(
" {:<40} {:>8} {:>8} {:>7.1}% {:>6.1}s",
format!("TOTAL ({} targets)", num_targets),
total_passed,
total_failed,
overall_rate,
total_elapsed.as_secs_f64()
);
println!("{}", "=".repeat(80));
for result in &target_results {
println!("\n OWASP API Security Top 10 Coverage for {}:", result.url);
for entry in &result.owasp_coverage {
let status = if !entry.tested {
"-"
} else if entry.all_passed {
"pass"
} else {
"FAIL"
};
let via = if entry.via_categories.is_empty() {
String::new()
} else {
format!(" (via {})", entry.via_categories.join(", "))
};
println!(" {:<12} {:<40} {}{}", entry.id, entry.name, status, via);
}
}
let per_target_summaries: Vec<serde_json::Value> = target_results
.iter()
.enumerate()
.map(|(idx, r)| {
let total_checks = r.passed + r.failed;
let rate = if total_checks == 0 {
0.0
} else {
(r.passed as f64 / total_checks as f64) * 100.0
};
let owasp_json: Vec<serde_json::Value> = r
.owasp_coverage
.iter()
.map(|e| {
serde_json::json!({
"id": e.id,
"name": e.name,
"tested": e.tested,
"all_passed": e.all_passed,
"via_categories": e.via_categories,
})
})
.collect();
serde_json::json!({
"target_url": r.url,
"target_index": idx,
"checks_passed": r.passed,
"checks_failed": r.failed,
"total_checks": total_checks,
"pass_rate": rate,
"elapsed_seconds": r.elapsed.as_secs_f64(),
"report": r.report_json,
"owasp_coverage": owasp_json,
})
})
.collect();
let combined_summary = serde_json::json!({
"total_targets": num_targets,
"total_checks_passed": total_passed,
"total_checks_failed": total_failed,
"overall_pass_rate": overall_rate,
"total_elapsed_seconds": total_elapsed.as_secs_f64(),
"targets": per_target_summaries,
});
let summary_path = self.output.join("multi-target-conformance-summary.json");
let summary_str = serde_json::to_string_pretty(&combined_summary)
.map_err(|e| BenchError::Other(format!("Failed to serialize summary: {}", e)))?;
std::fs::write(&summary_path, &summary_str)
.map_err(|e| BenchError::Other(format!("Failed to write summary: {}", e)))?;
TerminalReporter::print_success(&format!(
"Combined summary saved to: {}",
summary_path.display()
));
Ok(())
}
async fn execute_owasp_test(&self, parser: &SpecParser) -> Result<()> {
TerminalReporter::print_progress("OWASP API Security Top 10 Testing Mode");
let custom_headers = self.parse_headers()?;
let mut config = OwaspApiConfig::new()
.with_auth_header(&self.owasp_auth_header)
.with_verbose(self.verbose)
.with_insecure(self.skip_tls_verify)
.with_concurrency(self.vus as usize)
.with_iterations(self.owasp_iterations as usize)
.with_base_path(self.base_path.clone())
.with_custom_headers(custom_headers);
if let Some(ref token) = self.owasp_auth_token {
config = config.with_valid_auth_token(token);
}
if let Some(ref cats_str) = self.owasp_categories {
let categories: Vec<OwaspCategory> = cats_str
.split(',')
.filter_map(|s| {
let trimmed = s.trim();
match trimmed.parse::<OwaspCategory>() {
Ok(cat) => Some(cat),
Err(e) => {
TerminalReporter::print_warning(&e);
None
}
}
})
.collect();
if !categories.is_empty() {
config = config.with_categories(categories);
}
}
if let Some(ref admin_paths_file) = self.owasp_admin_paths {
config.admin_paths_file = Some(admin_paths_file.clone());
if let Err(e) = config.load_admin_paths() {
TerminalReporter::print_warning(&format!("Failed to load admin paths file: {}", e));
}
}
if let Some(ref id_fields_str) = self.owasp_id_fields {
let id_fields: Vec<String> = id_fields_str
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
if !id_fields.is_empty() {
config = config.with_id_fields(id_fields);
}
}
if let Some(ref report_path) = self.owasp_report {
config = config.with_report_path(report_path);
}
if let Ok(format) = self.owasp_report_format.parse::<ReportFormat>() {
config = config.with_report_format(format);
}
let categories = config.categories_to_test();
TerminalReporter::print_success(&format!(
"Testing {} OWASP categories: {}",
categories.len(),
categories.iter().map(|c| c.cli_name()).collect::<Vec<_>>().join(", ")
));
if config.valid_auth_token.is_some() {
TerminalReporter::print_progress("Using provided auth token for baseline requests");
}
TerminalReporter::print_progress("Generating OWASP security test script...");
let generator = OwaspApiGenerator::new(config, self.target.clone(), parser);
let script = generator.generate()?;
TerminalReporter::print_success("OWASP security test script generated");
let script_path = if let Some(output) = &self.script_output {
output.clone()
} else {
self.output.join("k6-owasp-security-test.js")
};
if let Some(parent) = script_path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(&script_path, &script)?;
TerminalReporter::print_success(&format!("Script written to: {}", script_path.display()));
if self.generate_only {
println!("\nOWASP security test script generated. Run it with:");
println!(" k6 run {}", script_path.display());
return Ok(());
}
TerminalReporter::print_progress("Executing OWASP security tests...");
let executor = K6Executor::new()?;
std::fs::create_dir_all(&self.output)?;
let results = executor.execute(&script_path, Some(&self.output), self.verbose).await?;
let duration_secs = Self::parse_duration(&self.duration)?;
TerminalReporter::print_summary(&results, duration_secs);
println!("\nOWASP security test results saved to: {}", self.output.display());
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
fn test_parse_duration() {
assert_eq!(BenchCommand::parse_duration("30s").unwrap(), 30);
assert_eq!(BenchCommand::parse_duration("5m").unwrap(), 300);
assert_eq!(BenchCommand::parse_duration("1h").unwrap(), 3600);
assert_eq!(BenchCommand::parse_duration("60").unwrap(), 60);
}
#[test]
fn test_parse_duration_invalid() {
assert!(BenchCommand::parse_duration("invalid").is_err());
assert!(BenchCommand::parse_duration("30x").is_err());
}
#[test]
fn test_parse_headers() {
let cmd = BenchCommand {
spec: vec![PathBuf::from("test.yaml")],
spec_dir: None,
merge_conflicts: "error".to_string(),
spec_mode: "merge".to_string(),
dependency_config: None,
target: "http://localhost".to_string(),
base_path: None,
duration: "1m".to_string(),
vus: 10,
scenario: "ramp-up".to_string(),
operations: None,
exclude_operations: None,
auth: None,
headers: Some("X-API-Key:test123,X-Client-ID:client456".to_string()),
output: PathBuf::from("output"),
generate_only: false,
script_output: None,
threshold_percentile: "p(95)".to_string(),
threshold_ms: 500,
max_error_rate: 0.05,
verbose: false,
skip_tls_verify: false,
targets_file: None,
max_concurrency: None,
results_format: "both".to_string(),
params_file: None,
crud_flow: false,
flow_config: None,
extract_fields: None,
parallel_create: None,
data_file: None,
data_distribution: "unique-per-vu".to_string(),
data_mappings: None,
per_uri_control: false,
error_rate: None,
error_types: None,
security_test: false,
security_payloads: None,
security_categories: None,
security_target_fields: None,
wafbench_dir: None,
wafbench_cycle_all: false,
owasp_api_top10: false,
owasp_categories: None,
owasp_auth_header: "Authorization".to_string(),
owasp_auth_token: None,
owasp_admin_paths: None,
owasp_id_fields: None,
owasp_report: None,
owasp_report_format: "json".to_string(),
owasp_iterations: 1,
conformance: false,
conformance_api_key: None,
conformance_basic_auth: None,
conformance_report: PathBuf::from("conformance-report.json"),
conformance_categories: None,
conformance_report_format: "json".to_string(),
conformance_headers: vec![],
conformance_all_operations: false,
conformance_custom: None,
conformance_delay_ms: 0,
use_k6: false,
};
let headers = cmd.parse_headers().unwrap();
assert_eq!(headers.get("X-API-Key"), Some(&"test123".to_string()));
assert_eq!(headers.get("X-Client-ID"), Some(&"client456".to_string()));
}
#[test]
fn test_get_spec_display_name() {
let cmd = BenchCommand {
spec: vec![PathBuf::from("test.yaml")],
spec_dir: None,
merge_conflicts: "error".to_string(),
spec_mode: "merge".to_string(),
dependency_config: None,
target: "http://localhost".to_string(),
base_path: None,
duration: "1m".to_string(),
vus: 10,
scenario: "ramp-up".to_string(),
operations: None,
exclude_operations: None,
auth: None,
headers: None,
output: PathBuf::from("output"),
generate_only: false,
script_output: None,
threshold_percentile: "p(95)".to_string(),
threshold_ms: 500,
max_error_rate: 0.05,
verbose: false,
skip_tls_verify: false,
targets_file: None,
max_concurrency: None,
results_format: "both".to_string(),
params_file: None,
crud_flow: false,
flow_config: None,
extract_fields: None,
parallel_create: None,
data_file: None,
data_distribution: "unique-per-vu".to_string(),
data_mappings: None,
per_uri_control: false,
error_rate: None,
error_types: None,
security_test: false,
security_payloads: None,
security_categories: None,
security_target_fields: None,
wafbench_dir: None,
wafbench_cycle_all: false,
owasp_api_top10: false,
owasp_categories: None,
owasp_auth_header: "Authorization".to_string(),
owasp_auth_token: None,
owasp_admin_paths: None,
owasp_id_fields: None,
owasp_report: None,
owasp_report_format: "json".to_string(),
owasp_iterations: 1,
conformance: false,
conformance_api_key: None,
conformance_basic_auth: None,
conformance_report: PathBuf::from("conformance-report.json"),
conformance_categories: None,
conformance_report_format: "json".to_string(),
conformance_headers: vec![],
conformance_all_operations: false,
conformance_custom: None,
conformance_delay_ms: 0,
use_k6: false,
};
assert_eq!(cmd.get_spec_display_name(), "test.yaml");
let cmd_multi = BenchCommand {
spec: vec![PathBuf::from("a.yaml"), PathBuf::from("b.yaml")],
spec_dir: None,
merge_conflicts: "error".to_string(),
spec_mode: "merge".to_string(),
dependency_config: None,
target: "http://localhost".to_string(),
base_path: None,
duration: "1m".to_string(),
vus: 10,
scenario: "ramp-up".to_string(),
operations: None,
exclude_operations: None,
auth: None,
headers: None,
output: PathBuf::from("output"),
generate_only: false,
script_output: None,
threshold_percentile: "p(95)".to_string(),
threshold_ms: 500,
max_error_rate: 0.05,
verbose: false,
skip_tls_verify: false,
targets_file: None,
max_concurrency: None,
results_format: "both".to_string(),
params_file: None,
crud_flow: false,
flow_config: None,
extract_fields: None,
parallel_create: None,
data_file: None,
data_distribution: "unique-per-vu".to_string(),
data_mappings: None,
per_uri_control: false,
error_rate: None,
error_types: None,
security_test: false,
security_payloads: None,
security_categories: None,
security_target_fields: None,
wafbench_dir: None,
wafbench_cycle_all: false,
owasp_api_top10: false,
owasp_categories: None,
owasp_auth_header: "Authorization".to_string(),
owasp_auth_token: None,
owasp_admin_paths: None,
owasp_id_fields: None,
owasp_report: None,
owasp_report_format: "json".to_string(),
owasp_iterations: 1,
conformance: false,
conformance_api_key: None,
conformance_basic_auth: None,
conformance_report: PathBuf::from("conformance-report.json"),
conformance_categories: None,
conformance_report_format: "json".to_string(),
conformance_headers: vec![],
conformance_all_operations: false,
conformance_custom: None,
conformance_delay_ms: 0,
use_k6: false,
};
assert_eq!(cmd_multi.get_spec_display_name(), "2 spec files");
}
#[test]
fn test_parse_extracted_values_from_output_dir() {
let dir = tempdir().unwrap();
let path = dir.path().join("extracted_values.json");
std::fs::write(
&path,
r#"{
"pool_id": "abc123",
"count": 0,
"enabled": false,
"metadata": { "owner": "team-a" }
}"#,
)
.unwrap();
let extracted = BenchCommand::parse_extracted_values(dir.path()).unwrap();
assert_eq!(extracted.get("pool_id"), Some(&serde_json::json!("abc123")));
assert_eq!(extracted.get("count"), Some(&serde_json::json!(0)));
assert_eq!(extracted.get("enabled"), Some(&serde_json::json!(false)));
assert_eq!(extracted.get("metadata"), Some(&serde_json::json!({"owner": "team-a"})));
}
#[test]
fn test_parse_extracted_values_missing_file() {
let dir = tempdir().unwrap();
let extracted = BenchCommand::parse_extracted_values(dir.path()).unwrap();
assert!(extracted.values.is_empty());
}
}