use httpmock::{Method::GET, MockRef, MockServer};
use serial_test::serial;
use std::fmt;
mod common;
use swanling::prelude::*;
const INDEX_PATH: &str = "/";
const ERROR_PATH: &str = "/error";
const INDEX_KEY: usize = 0;
const ERROR_KEY: usize = 1;
const EXPECT_WORKERS: usize = 2;
enum TestType {
Requests,
Tasks,
Error,
Debug,
All,
}
struct LogFiles<'a> {
request_logs: &'a [String],
task_logs: &'a [String],
error_logs: &'a [String],
debug_logs: &'a [String],
}
impl fmt::Display for TestType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let printable = match *self {
TestType::Requests => "requests",
TestType::Tasks => "tasks",
TestType::Error => "error",
TestType::Debug => "debug",
TestType::All => "all",
};
write!(f, "{}", printable)
}
}
pub async fn get_index(user: &SwanlingUser) -> SwanlingTaskResult {
let _swanling = user.get(INDEX_PATH).await?;
Ok(())
}
pub async fn get_error(user: &SwanlingUser) -> SwanlingTaskResult {
let mut swanling = user.get(ERROR_PATH).await?;
if let Ok(r) = swanling.response {
let headers = &r.headers().clone();
let status_code = r.status();
if !status_code.is_success() {
return user.set_failure(
"loaded /error and got non-200 message",
&mut swanling.request,
Some(headers),
None,
);
}
}
Ok(())
}
fn setup_mock_server_endpoints(server: &MockServer) -> Vec<MockRef> {
vec![
server.mock(|when, then| {
when.method(GET).path(INDEX_PATH);
then.status(200);
}),
server.mock(|when, then| {
when.method(GET).path(ERROR_PATH);
then.status(503);
}),
]
}
fn get_tasks() -> SwanlingTaskSet {
taskset!("LoadTest")
.register_task(task!(get_index))
.register_task(task!(get_error))
}
fn validate_test(
swanling_metrics: SwanlingMetrics,
mock_endpoints: &[MockRef],
test_type: &TestType,
log_files: &LogFiles,
) {
assert!(mock_endpoints[INDEX_KEY].hits() > 0);
assert!(mock_endpoints[ERROR_KEY].hits() > 0);
assert!(swanling_metrics.duration == 2);
match test_type {
TestType::Debug => {
assert!(!log_files.debug_logs.is_empty());
let mut debug_file_lines = 0;
for debug_file in log_files.debug_logs {
assert!(std::path::Path::new(debug_file).exists());
debug_file_lines += common::file_length(debug_file);
}
assert!(debug_file_lines > 0);
}
TestType::Requests => {
assert!(!log_files.request_logs.is_empty());
let mut requests_file_lines = 0;
for request_log in log_files.request_logs {
assert!(std::path::Path::new(request_log).exists());
requests_file_lines += common::file_length(request_log);
}
assert!(requests_file_lines > 0);
}
TestType::Tasks => {
assert!(!log_files.task_logs.is_empty());
let mut tasks_file_lines = 0;
for tasks_file in log_files.task_logs {
assert!(std::path::Path::new(tasks_file).exists());
tasks_file_lines += common::file_length(tasks_file);
}
assert!(tasks_file_lines > 0);
}
TestType::Error => {
assert!(!log_files.error_logs.is_empty());
let mut error_file_lines = 0;
for error_file in log_files.error_logs {
assert!(std::path::Path::new(error_file).exists());
error_file_lines += common::file_length(error_file);
}
assert!(error_file_lines > 0);
}
TestType::All => {
assert!(!log_files.debug_logs.is_empty());
assert!(!log_files.error_logs.is_empty());
assert!(!log_files.request_logs.is_empty());
assert!(!log_files.task_logs.is_empty());
let mut debug_file_lines = 0;
for debug_log in log_files.debug_logs {
assert!(std::path::Path::new(debug_log).exists());
debug_file_lines += common::file_length(debug_log);
}
assert!(debug_file_lines > 0);
let mut error_file_lines = 0;
for error_log in log_files.error_logs {
assert!(std::path::Path::new(error_log).exists());
error_file_lines += common::file_length(error_log);
}
assert!(error_file_lines > 0);
let mut requests_file_lines = 0;
for request_log in log_files.request_logs {
assert!(std::path::Path::new(request_log).exists());
requests_file_lines += common::file_length(request_log);
}
assert!(requests_file_lines > 0);
let mut tasks_file_lines = 0;
for tasks_log in log_files.task_logs {
assert!(std::path::Path::new(tasks_log).exists());
tasks_file_lines += common::file_length(tasks_log);
}
assert!(tasks_file_lines > 0);
}
}
}
fn run_standalone_test(test_type: TestType, format: &str) {
let request_log = test_type.to_string() + "-request-log." + format;
let task_log = test_type.to_string() + "-task-log." + format;
let debug_log = test_type.to_string() + "-debug-log." + format;
let error_log = test_type.to_string() + "-error-log." + format;
let server = MockServer::start();
let mock_endpoints = setup_mock_server_endpoints(&server);
let mut configuration_flags = match test_type {
TestType::Debug => vec!["--debug-log", &debug_log, "--debug-format", format],
TestType::Error => vec!["--error-log", &error_log, "--error-format", format],
TestType::Requests => vec!["--request-log", &request_log, "--request-format", format],
TestType::Tasks => vec!["--task-log", &task_log, "--task-format", format],
TestType::All => vec![
"--request-log",
&request_log,
"--request-format",
format,
"--task-log",
&task_log,
"--task-format",
format,
"--error-log",
&error_log,
"--error-format",
format,
"--debug-log",
&debug_log,
"--debug-format",
format,
],
};
configuration_flags.extend(vec!["--users", "4", "--hatch-rate", "4", "--run-time", "2"]);
let configuration = common::build_configuration(&server, configuration_flags);
let swanling_metrics = common::run_load_test(
common::build_load_test(configuration, &get_tasks(), None, None),
None,
);
let log_files = LogFiles {
request_logs: &[request_log.to_string()],
task_logs: &[task_log.to_string()],
error_logs: &[error_log.to_string()],
debug_logs: &[debug_log.to_string()],
};
validate_test(swanling_metrics, &mock_endpoints, &test_type, &log_files);
common::cleanup_files(vec![&request_log, &task_log, &error_log, &debug_log]);
}
fn run_gaggle_test(test_type: TestType, format: &str) {
let requests_file = test_type.to_string() + "-gaggle-request-log." + format;
let tasks_file = test_type.to_string() + "-gaggle-task-log." + format;
let error_file = test_type.to_string() + "-gaggle-error-log." + format;
let debug_file = test_type.to_string() + "-gaggle-debug-log." + format;
let server = MockServer::start();
let mock_endpoints = setup_mock_server_endpoints(&server);
let mut worker_handles = Vec::new();
let mut requests_files = Vec::new();
let mut tasks_files = Vec::new();
let mut error_files = Vec::new();
let mut debug_files = Vec::new();
for i in 0..EXPECT_WORKERS {
let worker_requests_file = requests_file.clone() + &i.to_string();
let worker_tasks_file = tasks_file.clone() + &i.to_string();
let worker_error_file = error_file.clone() + &i.to_string();
let worker_debug_file = debug_file.clone() + &i.to_string();
requests_files.push(worker_requests_file.clone());
tasks_files.push(worker_tasks_file.clone());
error_files.push(worker_error_file.clone());
debug_files.push(worker_debug_file.clone());
let worker_configuration_flags = match test_type {
TestType::Debug => vec![
"--worker",
"--debug-log",
&worker_debug_file,
"--debug-format",
format,
],
TestType::Error => vec![
"--worker",
"--error-log",
&worker_error_file,
"--error-format",
format,
],
TestType::Requests => vec![
"--worker",
"--request-log",
&worker_requests_file,
"--request-format",
format,
],
TestType::Tasks => vec![
"--worker",
"--task-log",
&worker_tasks_file,
"--task-format",
format,
],
TestType::All => vec![
"--worker",
"--request-log",
&worker_requests_file,
"--request-format",
format,
"--task-log",
&worker_tasks_file,
"--task-format",
format,
"--error-log",
&worker_error_file,
"--error-format",
format,
"--debug-log",
&worker_debug_file,
"--debug-format",
format,
],
};
let worker_configuration = common::build_configuration(&server, worker_configuration_flags);
let worker_swanling_attack =
common::build_load_test(worker_configuration.clone(), &get_tasks(), None, None);
worker_handles.push(std::thread::spawn(move || {
common::run_load_test(worker_swanling_attack, None);
}));
}
let manager_configuration = common::build_configuration(
&server,
vec![
"--manager",
"--expect-workers",
&EXPECT_WORKERS.to_string(),
"--users",
"4",
"--hatch-rate",
"4",
"--run-time",
"2",
],
);
let manager_swanling_attack =
common::build_load_test(manager_configuration, &get_tasks(), None, None);
let swanling_metrics = common::run_load_test(manager_swanling_attack, Some(worker_handles));
let log_files = LogFiles {
request_logs: &requests_files,
task_logs: &tasks_files,
error_logs: &error_files,
debug_logs: &debug_files,
};
validate_test(swanling_metrics, &mock_endpoints, &test_type, &log_files);
for file in requests_files {
common::cleanup_files(vec![&file]);
}
for file in tasks_files {
common::cleanup_files(vec![&file]);
}
for file in error_files {
common::cleanup_files(vec![&file]);
}
for file in debug_files {
common::cleanup_files(vec![&file]);
}
}
#[test]
fn test_requests_logs_json() {
run_standalone_test(TestType::Requests, "json");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_requests_logs_json_gaggle() {
run_gaggle_test(TestType::Requests, "json");
}
#[test]
fn test_requests_logs_csv() {
run_standalone_test(TestType::Requests, "csv");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_requests_logs_csv_gaggle() {
run_gaggle_test(TestType::Requests, "csv");
}
#[test]
fn test_requests_logs_raw() {
run_standalone_test(TestType::Requests, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_requests_logs_raw_gaggle() {
run_gaggle_test(TestType::Requests, "raw");
}
#[test]
fn test_tasks_logs_json() {
run_standalone_test(TestType::Tasks, "json");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_tasks_logs_json_gaggle() {
run_gaggle_test(TestType::Tasks, "json");
}
#[test]
fn test_tasks_logs_csv() {
run_standalone_test(TestType::Tasks, "csv");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_tasks_logs_csv_gaggle() {
run_gaggle_test(TestType::Tasks, "csv");
}
#[test]
fn test_tasks_logs_raw() {
run_standalone_test(TestType::Tasks, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_tasks_logs_raw_gaggle() {
run_gaggle_test(TestType::Tasks, "raw");
}
#[test]
fn test_error_logs_raw() {
run_standalone_test(TestType::Error, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_error_logs_raw_gaggle() {
run_gaggle_test(TestType::Error, "raw");
}
#[test]
fn test_error_logs_json() {
run_standalone_test(TestType::Error, "json");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_error_logs_json_gaggle() {
run_gaggle_test(TestType::Error, "json");
}
#[test]
fn test_error_logs_csv() {
run_standalone_test(TestType::Error, "csv");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_error_logs_csv_gaggle() {
run_gaggle_test(TestType::Error, "csv");
}
#[test]
fn test_debug_logs_raw() {
run_standalone_test(TestType::Debug, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_debug_logs_raw_gaggle() {
run_gaggle_test(TestType::Debug, "raw");
}
#[test]
fn test_debug_logs_json() {
run_standalone_test(TestType::Debug, "json");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_debug_logs_json_gaggle() {
run_gaggle_test(TestType::Debug, "json");
}
#[test]
fn test_debug_logs_csv() {
run_standalone_test(TestType::Debug, "csv");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_debug_logs_csv_gaggle() {
run_gaggle_test(TestType::Debug, "csv");
}
#[test]
fn test_all_logs_raw() {
run_standalone_test(TestType::All, "raw");
}
#[test]
#[serial]
#[cfg_attr(not(feature = "gaggle"), ignore)]
fn test_all_logs_raw_gaggle() {
run_gaggle_test(TestType::All, "raw");
}