use httpmock::Method::GET;
use httpmock::{Mock, MockRef, MockServer};
use serial_test::serial;
use std::fmt;
mod common;
use goose::prelude::*;
const INDEX_PATH: &str = "/";
const ERROR_PATH: &str = "/error";
const INDEX_KEY: usize = 0;
const ERROR_KEY: usize = 1;
const EXPECT_WORKERS: usize = 2;
enum TestType {
Metrics,
Debug,
MetricsAndDebug,
}
impl fmt::Display for TestType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let printable = match *self {
TestType::Metrics => "metrics",
TestType::Debug => "debug",
TestType::MetricsAndDebug => "metrics-and-debug",
};
write!(f, "{}", printable)
}
}
pub async fn get_index(user: &GooseUser) -> GooseTaskResult {
let _goose = user.get(INDEX_PATH).await?;
Ok(())
}
pub async fn get_error(user: &GooseUser) -> GooseTaskResult {
let mut goose = user.get(ERROR_PATH).await?;
if let Ok(r) = goose.response {
let headers = &r.headers().clone();
let status_code = r.status();
if !status_code.is_success() {
return user.set_failure(
"loaded /error and got non-200 message",
&mut goose.request,
Some(headers),
None,
);
}
}
Ok(())
}
fn setup_mock_server_endpoints(server: &MockServer) -> Vec<MockRef> {
let mut endpoints: Vec<MockRef> = Vec::new();
endpoints.push(
Mock::new()
.expect_method(GET)
.expect_path(INDEX_PATH)
.return_status(200)
.create_on(&server),
);
endpoints.push(
Mock::new()
.expect_method(GET)
.expect_path(ERROR_PATH)
.return_status(503)
.create_on(&server),
);
endpoints
}
fn get_tasks() -> GooseTaskSet {
taskset!("LoadTest")
.register_task(task!(get_index))
.register_task(task!(get_error))
}
fn validate_test(
goose_metrics: GooseMetrics,
mock_endpoints: &[MockRef],
test_type: &TestType,
metrics_files: &[String],
debug_files: &[String],
) {
assert!(mock_endpoints[INDEX_KEY].times_called() > 0);
assert!(mock_endpoints[ERROR_KEY].times_called() > 0);
assert!(goose_metrics.duration == 2);
match test_type {
TestType::Debug => {
assert!(!debug_files.is_empty());
let mut debug_file_lines = 0;
for debug_file in debug_files {
assert!(std::path::Path::new(debug_file).exists());
debug_file_lines += common::file_length(debug_file);
}
assert!(debug_file_lines > 0);
}
TestType::Metrics => {
assert!(!metrics_files.is_empty());
let mut metrics_file_lines = 0;
for metrics_file in metrics_files {
assert!(std::path::Path::new(metrics_file).exists());
metrics_file_lines += common::file_length(metrics_file);
}
assert!(metrics_file_lines > 0);
}
TestType::MetricsAndDebug => {
assert!(!debug_files.is_empty());
assert!(!metrics_files.is_empty());
let mut debug_file_lines = 0;
for debug_file in debug_files {
assert!(std::path::Path::new(debug_file).exists());
debug_file_lines += common::file_length(debug_file);
}
assert!(debug_file_lines > 0);
let mut metrics_file_lines = 0;
for metrics_file in metrics_files {
assert!(std::path::Path::new(metrics_file).exists());
metrics_file_lines += common::file_length(metrics_file);
}
assert!(metrics_file_lines > 0);
}
}
}
fn run_standalone_test(test_type: TestType, format: &str) {
let metrics_file = test_type.to_string() + "-metrics-log." + format;
let debug_file = test_type.to_string() + "-debug-log." + format;
let server = MockServer::start();
let mock_endpoints = setup_mock_server_endpoints(&server);
let mut configuration_flags = match test_type {
TestType::Debug => vec!["--debug-file", &debug_file, "--debug-format", format],
TestType::Metrics => vec!["--metrics-file", &metrics_file, "--metrics-format", format],
TestType::MetricsAndDebug => vec![
"--metrics-file",
&metrics_file,
"--metrics-format",
format,
"--debug-file",
&debug_file,
"--debug-format",
format,
],
};
configuration_flags.extend(vec!["--users", "4", "--hatch-rate", "4", "--run-time", "2"]);
let configuration = common::build_configuration(&server, configuration_flags);
let goose_metrics = common::run_load_test(
common::build_load_test(configuration, &get_tasks(), None, None),
None,
);
validate_test(
goose_metrics,
&mock_endpoints,
&test_type,
&[metrics_file.to_string()],
&[debug_file.to_string()],
);
common::cleanup_files(vec![&metrics_file, &debug_file]);
}
fn run_gaggle_test(test_type: TestType, format: &str) {
let metrics_file = test_type.to_string() + "-gaggle-metrics-log." + format;
let debug_file = test_type.to_string() + "-gaggle-debug-log." + format;
let server = MockServer::start();
let mock_endpoints = setup_mock_server_endpoints(&server);
let mut worker_handles = Vec::new();
let mut metrics_files = Vec::new();
let mut debug_files = Vec::new();
for i in 0..EXPECT_WORKERS {
let worker_metrics_file = metrics_file.clone() + &i.to_string();
let worker_debug_file = debug_file.clone() + &i.to_string();
metrics_files.push(worker_metrics_file.clone());
debug_files.push(worker_debug_file.clone());
let worker_configuration_flags = match test_type {
TestType::Debug => vec![
"--worker",
"--debug-file",
&worker_debug_file,
"--debug-format",
format,
],
TestType::Metrics => vec![
"--worker",
"--metrics-file",
&worker_metrics_file,
"--metrics-format",
format,
],
TestType::MetricsAndDebug => vec![
"--worker",
"--metrics-file",
&worker_metrics_file,
"--metrics-format",
format,
"--debug-file",
&worker_debug_file,
"--debug-format",
format,
],
};
let worker_configuration = common::build_configuration(&server, worker_configuration_flags);
let worker_goose_attack =
common::build_load_test(worker_configuration.clone(), &get_tasks(), None, None);
worker_handles.push(std::thread::spawn(move || {
common::run_load_test(worker_goose_attack, None);
}));
}
let manager_configuration = common::build_configuration(
&server,
vec![
"--manager",
"--expect-workers",
&EXPECT_WORKERS.to_string(),
"--users",
"4",
"--hatch-rate",
"4",
"--run-time",
"2",
],
);
let manager_goose_attack =
common::build_load_test(manager_configuration, &get_tasks(), None, None);
let goose_metrics = common::run_load_test(manager_goose_attack, Some(worker_handles));
validate_test(
goose_metrics,
&mock_endpoints,
&test_type,
&metrics_files,
&debug_files,
);
for file in metrics_files {
common::cleanup_files(vec![&file]);
}
for file in debug_files {
common::cleanup_files(vec![&file]);
}
}
#[test]
fn test_metrics_logs_json() {
run_standalone_test(TestType::Metrics, "json");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_metrics_logs_json_gaggle() {
run_gaggle_test(TestType::Metrics, "json");
}
#[test]
fn test_metrics_logs_csv() {
run_standalone_test(TestType::Metrics, "csv");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_metrics_logs_csv_gaggle() {
run_gaggle_test(TestType::Metrics, "csv");
}
#[test]
fn test_metrics_logs_raw() {
run_standalone_test(TestType::Metrics, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_metrics_logs_raw_gaggle() {
run_gaggle_test(TestType::Metrics, "raw");
}
#[test]
fn test_debug_logs_raw() {
run_standalone_test(TestType::Debug, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_debug_logs_raw_gaggle() {
run_gaggle_test(TestType::Debug, "raw");
}
#[test]
fn test_debug_logs_json() {
run_standalone_test(TestType::Debug, "json");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_debug_logs_json_gaggle() {
run_gaggle_test(TestType::Debug, "json");
}
#[test]
fn test_metrics_and_debug_logs() {
run_standalone_test(TestType::MetricsAndDebug, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_metrics_and_debug_logs_gaggle() {
run_gaggle_test(TestType::MetricsAndDebug, "raw");
}